From 15acb010408a24927a243a2fd29a40fa1c64557f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Thu, 14 Aug 2014 12:51:44 +0200 Subject: [PATCH 001/239] Initial commit --- pkgs/gcloud/AUTHORS | 6 ++++++ pkgs/gcloud/LICENSE | 24 ++++++++++++++++++++++++ pkgs/gcloud/PATENTS | 23 +++++++++++++++++++++++ pkgs/gcloud/README.md | 3 +++ pkgs/gcloud/codereview.settings | 3 +++ 5 files changed, 59 insertions(+) create mode 100644 pkgs/gcloud/AUTHORS create mode 100644 pkgs/gcloud/LICENSE create mode 100644 pkgs/gcloud/PATENTS create mode 100644 pkgs/gcloud/README.md create mode 100644 pkgs/gcloud/codereview.settings diff --git a/pkgs/gcloud/AUTHORS b/pkgs/gcloud/AUTHORS new file mode 100644 index 00000000..7c12ae68 --- /dev/null +++ b/pkgs/gcloud/AUTHORS @@ -0,0 +1,6 @@ +# Below is a list of people and organizations that have contributed +# to the Dart project. Names should be added to the list like so: +# +# Name/Organization + +Google Inc. diff --git a/pkgs/gcloud/LICENSE b/pkgs/gcloud/LICENSE new file mode 100644 index 00000000..49475511 --- /dev/null +++ b/pkgs/gcloud/LICENSE @@ -0,0 +1,24 @@ +Copyright 2014, the Dart project authors. All rights reserved. +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of Google Inc. nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/pkgs/gcloud/PATENTS b/pkgs/gcloud/PATENTS new file mode 100644 index 00000000..69541968 --- /dev/null +++ b/pkgs/gcloud/PATENTS @@ -0,0 +1,23 @@ +Additional IP Rights Grant (Patents) + +"This implementation" means the copyrightable works distributed by +Google as part of the Dart Project. + +Google hereby grants to you a perpetual, worldwide, non-exclusive, +no-charge, royalty-free, irrevocable (except as stated in this +section) patent license to make, have made, use, offer to sell, sell, +import, transfer, and otherwise run, modify and propagate the contents +of this implementation of Dart, where such license applies only to +those patent claims, both currently owned by Google and acquired in +the future, licensable by Google that are necessarily infringed by +this implementation of Dart. This grant does not include claims that +would be infringed only as a consequence of further modification of +this implementation. If you or your agent or exclusive licensee +institute or order or agree to the institution of patent litigation +against any entity (including a cross-claim or counterclaim in a +lawsuit) alleging that this implementation of Dart or any code +incorporated within this implementation of Dart constitutes direct or +contributory patent infringement, or inducement of patent +infringement, then any patent rights granted to you under this License +for this implementation of Dart shall terminate as of the date such +litigation is filed. diff --git a/pkgs/gcloud/README.md b/pkgs/gcloud/README.md new file mode 100644 index 00000000..140e8d0c --- /dev/null +++ b/pkgs/gcloud/README.md @@ -0,0 +1,3 @@ +## Google Cloud Platform + +High level interface for Google Cloud Platform APIs diff --git a/pkgs/gcloud/codereview.settings b/pkgs/gcloud/codereview.settings new file mode 100644 index 00000000..d25f0372 --- /dev/null +++ b/pkgs/gcloud/codereview.settings @@ -0,0 +1,3 @@ +CODE_REVIEW_SERVER: http://codereview.chromium.org/ +VIEW_VC: https://github.com/dart-lang/gcloud/commit/ +CC_LIST: reviews@dartlang.org From ae2dcd98f7551959df2f77cda0e0be77d108c05b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Wed, 3 Sep 2014 14:53:53 +0200 Subject: [PATCH 002/239] Initial gcloud API for Google Cloud Pub/Sub See https://developers.google.com/pubsub/ for the Google Develeopers page. R=kustermann@google.com, lrn@google.com BUG= Review URL: https://codereview.chromium.org//496333002 --- pkgs/gcloud/.gitignore | 2 + pkgs/gcloud/lib/common.dart | 27 ++ pkgs/gcloud/lib/pubsub.dart | 396 +++++++++++++++ pkgs/gcloud/lib/src/pubsub_impl.dart | 534 +++++++++++++++++++++ pkgs/gcloud/pubspec.yaml | 17 + pkgs/gcloud/test/pubsub_test.dart | 689 +++++++++++++++++++++++++++ 6 files changed, 1665 insertions(+) create mode 100644 pkgs/gcloud/.gitignore create mode 100644 pkgs/gcloud/lib/common.dart create mode 100644 pkgs/gcloud/lib/pubsub.dart create mode 100644 pkgs/gcloud/lib/src/pubsub_impl.dart create mode 100644 pkgs/gcloud/pubspec.yaml create mode 100644 pkgs/gcloud/test/pubsub_test.dart diff --git a/pkgs/gcloud/.gitignore b/pkgs/gcloud/.gitignore new file mode 100644 index 00000000..25161add --- /dev/null +++ b/pkgs/gcloud/.gitignore @@ -0,0 +1,2 @@ +pubspec.lock +packages diff --git a/pkgs/gcloud/lib/common.dart b/pkgs/gcloud/lib/common.dart new file mode 100644 index 00000000..afd532ec --- /dev/null +++ b/pkgs/gcloud/lib/common.dart @@ -0,0 +1,27 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +library gcloud.pubsub; + +import 'dart:async'; + +/// A single page of paged results from a query. +/// +/// Use `next` to move to the next page. If this is the last page `next` +/// completes with `null` +abstract class Page { + /// The items in this page. + List get items; + + /// Whether this is the last page of results. + bool get isLast; + + /// Move to the next page. + /// + /// The future returned completes with the next page or results. + /// + /// If [next] is called on the last page the returned future completes + /// with `null`. + Future> next({int pageSize}); +} \ No newline at end of file diff --git a/pkgs/gcloud/lib/pubsub.dart b/pkgs/gcloud/lib/pubsub.dart new file mode 100644 index 00000000..e298683e --- /dev/null +++ b/pkgs/gcloud/lib/pubsub.dart @@ -0,0 +1,396 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +library gcloud.pubsub; + +import 'dart:async'; +import 'dart:convert'; +import 'package:crypto/crypto.dart'; +import 'package:http/http.dart' as http; + +import 'package:googleapis_beta/pubsub/v1beta1.dart' as pubsub; + +import 'common.dart'; +export 'common.dart'; + +part 'src/pubsub_impl.dart'; + +/// A Cloud Pub/Sub client. +/// +/// Connects to the Cloud Pub/Sub service and gives access to its operations. +/// +/// Google Cloud Pub/Sub is a reliable, many-to-many, asynchronous messaging +/// service from Google Cloud Platform. A detailed overview is available on +/// [Pub/Sub docs](https://developers.google.com/pubsub/overview). +/// +/// To access Pub/Sub, an authenticate HTTP client is required. This client +/// should as a minimum provide access to the scopes `PubSub.Scopes`. +/// +/// The following example shows how to access Pub/Sub using a service account +/// and pull a message from a subscription. +/// +/// import 'package:http/http.dart' as http; +/// import 'package:googleapis_auth/auth_io.dart' as auth; +/// import 'package:gcloud/pubsub.dart'; +/// +/// Future createClient() { +/// // Service account credentials retreived from Cloud Console. +/// String creds = +/// r''' +/// { +/// "private_key_id": ..., +/// "private_key": ..., +/// "client_email": ..., +/// "client_id": ..., +/// "type": "service_account" +/// }'''; +/// return auth.clientViaServiceAccount( +/// new auth.ServiceAccountCredentials.fromJson(creds), +/// PubSub.Scopes); +/// } +/// +/// main() { +/// var project = 'my-project'; +/// var client; +/// var pubsub; +/// createClient().then((c) { +/// client = c; +/// pubsub = new PubSub(client, project); +/// return pubsub.lookupSubscription('my-subscription'); +/// }) +/// .then((Subscription subscription) => subscription.pull()) +/// .then((PullEvent event) => print('Message ${event.message.asString}')) +/// .whenComplete(() => client.close()); +/// } +/// +/// When working with topics and subscriptions they are referred to using +/// names. These names can be either relative names or absolute names. +/// +/// An absolute name of a topic starts with `/` and has the form: +/// +/// /topics// +/// +/// When a relative topic name is used, its absolute name is generated by +/// prepending `/topics//`, where `` is the project +/// id passed to the constructor. +/// +/// An absolute name of a subscription starts with `/` and has the form: +/// +/// /subscriptions// +/// +/// When a relative subscription name is used, its absolute name is +/// generated by prepending `/subscriptions//`, where +/// `` is the project id passed to the constructor. +/// +abstract class PubSub { + /// List of required OAuth2 scopes for Pub/Sub operation. + static const Scopes = const [ pubsub.PubsubApi.PubsubScope ]; + + /// Access Pub/Sub using an authenicated client. + /// + /// The [client] is an authentiacted HTTP client. This client must + /// provide access to at least the scopes in `PubSub.Scopes`. + /// + /// The [project] is the name of the Google Cloud project. + /// + /// Returs an object providing access to Pub/Sub. The passed-in [client] will + /// not be closed automatically. The caller is responsible for closing it. + factory PubSub(http.Client client, String project) = _PubSubImpl; + + /// The name of the project. + String get project; + + /// Create a new topic named [name]. + /// + /// The [name] can be either an absolute name or a relative name. + /// + /// Returns a `Future` which completes with the newly created topic. + Future createTopic(String name); + + /// Delete topic named [name]. + /// + /// The [name] can be either an absolute name or a relative name. + /// + /// Returns a `Future` which completes with `null` when the operation + /// is finished. + Future deleteTopic(String name); + + /// Look up topic named [name]. + /// + /// The [name] can be either an absolute name or a relative name. + /// + /// Returns a `Future` which completes with the topic. + Future lookupTopic(String name); + + /// Lists all topics. + /// + /// Returns a `Stream` of topics. + Stream listTopics(); + + /// Start paging through all topics. + /// + /// The maximum number of topics in each page is specified in [pageSize]. + /// + /// Returns a `Future` which completes with a `Page` object holding the + /// first page. Use the `Page` object to move to the next page of topics. + Future> pageTopics({int pageSize: 50}); + + /// Create a new subscription named [name] listening on topic [topic]. + /// + /// If [endpoint] is passed this will create a push subscription. + /// + /// Otherwise this will create a pull subscription. + /// + /// The [name] can be either an absolute name or a relative name. + /// + /// Returns a `Future` which completes with the newly created subscripton. + Future createSubscription( + String name, String topic, {Uri endpoint}); + + /// Delete subscription named [name]. + /// + /// The [name] can be either an absolute name or a relative name. + /// + /// Returns a `Future` which completes with the subscription. + Future deleteSubscription(String name); + + /// Lookup subscription with named [name]. + /// + /// The [name] can be either an absolute name or a relative name. + /// + /// Returns a `Future` which completes with the subscription. + Future lookupSubscription(String name); + + /// List all subscriptions. + /// + /// Returns a `Stream` of subscriptions. + Stream listSubscriptions(); + + /// Start paging through all subscriptions. + /// + /// The maximum number of subscriptions in each page is specified in + /// [pageSize] + /// + /// Returns a `Future` which completes with a `Page` object holding the + /// first page. Use the `Page` object to move to the next page of + /// subscriptions. + Future> pageSubscriptions({int pageSize: 50}); +} + +/// A Pub/Sub topic. +/// +/// A topic is used by a publisher to publish (send) messages. +abstract class Topic { + /// The relative name of this topic. + String get name; + + /// The name of the project for this topic. + String get project; + + /// The absolute name of this topic. + String get absoluteName; + + /// Delete this topic. + /// + /// Returns a `Future` which completes with `null` when the operation + /// is finished. + Future delete(); + + /// Publish a message. + /// + /// Returns a `Future` which completes with `null` when the operation + /// is finished. + Future publish(Message message); + + /// Publish a string as a message. + /// + /// The message will get the labels specified in [labels]. The keys in this + /// map must be strings and the values must be either Strings or integers. + /// + /// The [labels] are passed together with the message to the receiver. + /// + /// Returns a `Future` which completes with `null` when the operation + /// is finished. + Future publishString(String message, {Map labels}); + + /// Publish bytes as a message. + /// + /// The message will get the labels specified in [labels]. The keys in this + /// map must be strings and the values must be either Strings or integers. + /// + /// The [labels] are passed together with the message to the receiver. + /// + /// Returns a `Future` which completes with `null` when the operation + /// is finished. + Future publishBytes(List message, {Map labels}); +} + +/// A Pub/Sub subscription +/// +/// A subscription is used to receive messages. A subscriber application +/// create a subscription on a topic to receive messages from it. +/// +/// Subscriptions can be either pull subscriptions or push subscriptions. +/// +/// For a pull subscription the receiver calls the `Subscription.pull` +/// method on the subscription object to get the next message. +/// +/// For a push subscription a HTTPS endpoint is configured. This endpoint get +/// POST requests with the messages. +abstract class Subscription { + /// The relative name of this subscription. + String get name; + + /// The name of the project for this subscription. + String get project; + + /// The absolute name of this subscription. + String get absoluteName; + + /// The topic subscribed to. + Topic get topic; + + /// Whether this is a push subscription. + /// + /// A push subscription is configured with an endpoint URI, and messages + /// are automatically sent to this endpoint without needing to call [pull]. + bool get isPush; + + /// Whether this is a pull subscription. + /// + /// A subscription without a configured endpoint URI is a pull subscripton. + /// Messages are not delivered automatically, but must instead be requested + /// using [pull]. + bool get isPull; + + /// The URI for the push endpoint. + /// + /// If this is a pull subscription this is `null`. + Uri get endpoint; + + /// Update the push configuration with a new endpoint. + /// + /// if [endpoint] is `null`, the subscription stops delivering messages + /// automatically, and becomes a pull subscription, if it isn't already. + /// + /// If [endpoint] is not `null`, the subscription will be a push + /// subscription, if it wasn't already, and Pub/Sub will start automatically + /// delivering message to the endpoint URI. + /// + /// Returns a `Future` which completes when the operation completes. + Future updatePushConfiguration(Uri endpoint); + + /// Delete this subscription. + /// + /// Returns a `Future` which completes when the operation completes. + Future delete(); + + + /// Pull a message from the subscription. + /// + /// If [noWait] is true, the method will complete the returned `Future` + /// with `null` if it finds that there are no messages available. + /// + /// If `noWait` is false, the method will wait for a message to become + /// available, and will then complete the `Future` with a `PullEvent` + /// containing the message. + Future pull({bool noWait: true}); +} + +/// The content of a Pub/Sub message. +/// +/// All Pub/Sub messages consist of a body of binary data and has an optional +/// set of labels (key-value pairs) associated with it. +/// +/// A `Message` contains the message body a list of bytes. The message body can +/// be read and written as a String, in which case the string is converted to +/// or from UTF-8 automatically. +abstract class Message { + /// Creates a new message with a String for the body. The String will + /// be UTF-8 encoded to create the actual binary body for the message. + /// + /// Message labels can be passed in the [labels] Map. The values in this + /// map must be either Strings or integers. Integers must be positive + /// 64-bit integers. + factory Message.withString(String message, {Map labels}) = + _MessageImpl.withString; + + /// Creates a new message with a binary body. + /// + /// Message labels can be passed in the [labels] Map. The values in this + /// map must be either Strings or integers. Integers must be positive + /// 64-bit integers. + factory Message.withBytes(List message, {Map labels}) = + _MessageImpl.withBytes; + + /// The message body as a String. + /// + /// The binary body is decoded into a String using an UTF-8 decoder. + /// + /// If the body is not UTF-8 encoded use the [asBytes] getter and manually + /// apply the corect decoding. + String get asString; + + /// The message body as bytes. + List get asBytes; + + /// The labels for this message. The values in the Map are either + /// Strings or integers. + /// + /// TODO: Values can be 64-bit integers. Deal with this for dart2js? + Map get labels; +} + +/// A Pub/Sub pull event. +/// +/// Instances of this class are returned when pulling messages with +/// [Subscription.pull]. +abstract class PullEvent { + /// The message content. + Message get message; + + /// Whether the message was truncated. + bool get isTruncated; + + /// Acknowledge reception of this message. + /// + /// Returns a `Future` which completes with `null` when the acknowledge has + /// been processed. + Future acknowledge(); +} + +/// Pub/Sub push event. +/// +/// This class can be used in a HTTP server for decoding messages pushed to +/// an endpoint. +/// +/// When a message is received on a push endpoint use the [PushEvent.fromJson] +/// constructor with the HTTP body to decode the received message. +/// +/// E.g. with a `dart:io` HTTP handler: +/// +/// void pushHandler(HttpRequest request) { +/// // Decode the JSON body. +/// request.transform(UTF8.decoder).join('').then((body) { +/// // Decode the JSON into a push message. +/// var message = new PushMessage.fromJson(body) +/// +/// // Process the message... +/// +/// // Respond with status code 20X to acknowledge the message. +/// response.statusCode = statusCode; +/// response.close(); +/// }); +/// } +//// +abstract class PushEvent { + /// The message content. + Message get message; + + /// The absolute name of the subscription. + String get subscriptionName; + + /// Create a `PushMessage` from JSON received on a Pub/Sub push endpoint. + factory PushEvent.fromJson(String json) = _PushEventImpl.fromJson; +} diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart new file mode 100644 index 00000000..d75f6725 --- /dev/null +++ b/pkgs/gcloud/lib/src/pubsub_impl.dart @@ -0,0 +1,534 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +part of gcloud.pubsub; + +class _PubSubImpl implements PubSub { + final http.Client _client; + final String project; + final pubsub.PubsubApi _api; + final String _topicPrefix; + final String _subscriptionPrefix; + + _PubSubImpl(client, project) : + this._client = client, + this.project = project, + _api = new pubsub.PubsubApi(client), + _topicPrefix = '/topics/$project/', + _subscriptionPrefix = '/subscriptions/$project/'; + + + String _fullTopicName(String name) { + if (name.startsWith('/') && !name.startsWith('/topics')) { + throw new ArgumentError("Illegal absolute topic name. Absolute topic " + "name must start with '/topics'"); + } + return name.startsWith('/topics') ? name : '${_topicPrefix}$name'; + } + + String _fullSubscriptionName(name) { + if (name.startsWith('/') && !name.startsWith('/subscriptions')) { + throw new ArgumentError("Illegal absolute topic name. Absolute topic " + "name must start with '/subscriptions'"); + } + return name.startsWith('/subscriptions') ? name + : '${_subscriptionPrefix}$name'; + } + + Future _createTopic(String name) { + return _api.topics.create(new pubsub.Topic()..name = name); + } + + Future _deleteTopic(String name) { + return _api.topics.delete(name); + } + + Future _getTopic(String name) { + return _api.topics.get(name); + } + + Future _listTopics( + int pageSize, [String nextPageToken]) { + var query = 'cloud.googleapis.com/project in (/projects/$project)'; + return _api.topics.list( + query: query, maxResults: pageSize, pageToken: nextPageToken); + } + + Future _createSubscription( + String name, String topic, {Uri endpoint}) { + var subscription = new pubsub.Subscription() + ..name = name + ..topic = topic; + if (endpoint != null) { + var pushConfig = + new pubsub.PushConfig()..pushEndpoint = endpoint.toString(); + subscription.pushConfig = pushConfig; + } + return _api.subscriptions.create(subscription); + } + + Future _deleteSubscription(String name) { + return _api.subscriptions.delete(_fullSubscriptionName(name)); + } + + Future _getSubscription(String name) { + return _api.subscriptions.get(name); + } + + Future _listSubscriptions( + int pageSize, [String nextPageToken]) { + var query = 'cloud.googleapis.com/project in (/projects/$project)'; + return _api.subscriptions.list( + query: query, maxResults: pageSize, pageToken: nextPageToken); + } + + Future _modifyPushConfig(String subscription, Uri endpoint) { + var pushConfig = new pubsub.PushConfig() + ..pushEndpoint = endpoint != null ? endpoint.toString() : null; + var request = new pubsub.ModifyPushConfigRequest() + ..subscription = subscription + ..pushConfig = pushConfig; + return _api.subscriptions.modifyPushConfig(request); + } + + Future _publish( + String topic, List message, Map labels) { + var l = null; + if (labels != null) { + l = []; + labels.forEach((key, value) { + if (value is String) { + l.add(new pubsub.Label()..key = key..strValue = value); + } else { + l.add(new pubsub.Label()..key = key..numValue = value.toString()); + } + }); + } + var request = new pubsub.PublishRequest() + ..topic = topic + ..message = (new pubsub.PubsubMessage() + ..dataAsBytes = message + ..label = l); + return _api.topics.publish(request); + } + + Future _pull( + String subscription, bool returnImmediately) { + var request = new pubsub.PullRequest() + ..subscription = subscription + ..returnImmediately = returnImmediately; + return _api.subscriptions.pull(request); + } + + Future _ack(String ackId, String subscription) { + var request = new pubsub.AcknowledgeRequest() + ..ackId = [ ackId ] + ..subscription = subscription; + return _api.subscriptions.acknowledge(request); + } + + void _checkTopicName(name) { + if (name.startsWith('/') && !name.startsWith(_topicPrefix)) { + throw new ArgumentError( + "Illegal topic name. Absolute topic names for project '$project' " + "must start with $_topicPrefix"); + } + if (name.length == _topicPrefix.length) { + throw new ArgumentError( + 'Illegal topic name. Relative part of the name cannot be empty'); + } + } + + void _checkSubscriptionName(name) { + if (name.startsWith('/') && !name.startsWith(_subscriptionPrefix)) { + throw new ArgumentError( + "Illegal subscription name. Absolute subscription names for project " + "'$project' must start with $_subscriptionPrefix"); + } + if (name.length == _subscriptionPrefix.length) { + throw new ArgumentError( + 'Illegal subscription name. ' + 'Relative part of the name cannot be empty'); + } + } + + Future createTopic(String name) { + _checkTopicName(name); + return _createTopic(_fullTopicName(name)) + .then((top) => new _TopicImpl(this, top)); + } + + Future deleteTopic(String name) { + _checkTopicName(name); + return _deleteTopic(_fullTopicName(name)); + } + + Future lookupTopic(String name) { + _checkTopicName(name); + return _getTopic(_fullTopicName(name)) + .then((top) => new _TopicImpl(this, top)); + } + + Stream listTopics() { + bool paused = false; + Page currentPage; + StreamController controller; + + handlePage(Page page) { + currentPage = page; + page.items.forEach(controller.add); + if (page.isLast) { + controller.close(); + } else if (!paused) { + page.next().then(handlePage); + } + } + + onPause() => paused = true; + onResume() { + print('res'); + paused = false; + currentPage.next().then(handlePage); + } + + controller = new StreamController( + sync: true, onPause: onPause, onResume: onResume); + + int pageSize = 50; + _listTopics(pageSize).then((response) { + handlePage(new _TopicPageImpl(this, pageSize, response)); + }); + + return controller.stream; + } + + Future> pageTopics({int pageSize: 50}) { + return _listTopics(pageSize).then((response) { + return new _TopicPageImpl(this, pageSize, response); + }); + } + + Future createSubscription( + String name, String topic, {Uri endpoint}) { + _checkSubscriptionName(name); + _checkTopicName(topic); + return _createSubscription(_fullSubscriptionName(name), + _fullTopicName(topic), + endpoint: endpoint) + .then((sub) => new _SubscriptionImpl(this, sub)); + } + + Future deleteSubscription(String name) { + _checkSubscriptionName(name); + return _deleteSubscription(_fullSubscriptionName(name)); + } + + Future lookupSubscription(String name) { + _checkSubscriptionName(name); + return _getSubscription(_fullSubscriptionName(name)) + .then((sub) => new _SubscriptionImpl(this, sub)); + } + + Stream listSubscriptions() { + bool paused = false; + Page currentPage; + StreamController controller; + + handlePage(Page page) { + currentPage = page; + page.items.forEach(controller.add); + if (page.isLast) { + controller.close(); + } else if (!paused) { + page.next().then(handlePage); + } + } + + onPause() => paused = true; + onResume() { + paused = false; + currentPage.next().then(handlePage); + } + + controller = new StreamController(onPause: onPause, onResume: onResume); + + pageSubscriptions().then(handlePage); + + return controller.stream; + } + + Future> pageSubscriptions({int pageSize: 50}) { + return _listSubscriptions(pageSize).then((response) { + return new _SubscriptionPageImpl(this, pageSize, response); + }); + } +} + +/// Message class for messages constructed through 'new Message()'. It stores +/// the user supplied body as either String or bytes. +class _MessageImpl implements Message { + // The message body, if it is a `String`. In that case, [bytesMessage] is + // null. + final String _stringMessage; + + // The message body, if it is a byte list. In that case, [stringMessage] is + // null. + final List _bytesMessage; + + final Map labels; + + _MessageImpl.withString(this._stringMessage, {this.labels}) + : _bytesMessage = null; + + _MessageImpl.withBytes(this._bytesMessage, {this.labels}) + : _stringMessage = null; + + List get asBytes => + _bytesMessage != null ? _bytesMessage : UTF8.encode(_stringMessage); + + String get asString => + _stringMessage != null ? _stringMessage : UTF8.decode(_bytesMessage); +} + +/// Message received using [Subscription.pull]. +/// +/// Contains the [pubsub.PubsubMessage] received from Pub/Sub, and +/// makes the message body and labels available on request. +/// +/// The labels map is lazily created when first accessed. +class _PullMessage implements Message { + final pubsub.PubsubMessage _message; + List _bytes; + String _string; + Map _labels; + + _PullMessage(this._message); + + List get asBytes { + if (_bytes == null) _bytes = _message.dataAsBytes; + return _bytes; + } + + String get asString { + if (_string == null) _string = UTF8.decode(_message.dataAsBytes); + return _string; + } + + Map get labels { + if (_labels == null) { + _labels = {}; + _message.label.forEach((label) { + _labels[label.key] = + label.numValue != null ? label.numValue : label.strValue; + }); + } + return _labels; + } +} + +/// Message received through Pub/Sub push delivery. +/// +/// Stores the message body received from Pub/Sub as the Base64 encoded string +/// from the wire protocol. +/// +/// The labels have been decoded into a Map. +class _PushMessage implements Message { + final String _base64Message; + final Map labels; + + _PushMessage(this._base64Message, this.labels); + + List get asBytes => CryptoUtils.base64StringToBytes(_base64Message); + + String get asString => UTF8.decode(asBytes); +} + +/// Pull event received from Pub/Sub pull delivery. +/// +/// Stores the pull response received from Pub/Sub. +class _PullEventImpl implements PullEvent { + /// Pub/Sub API object. + final _PubSubImpl _api; + /// Low level response received from Pub/Sub. + final pubsub.PullResponse _response; + final Message message; + + _PullEventImpl(this._api, response) + : this._response = response, + message = new _PullMessage(response.pubsubEvent.message); + + bool get isTruncated => _response.pubsubEvent.truncated; + + Future acknowledge() { + return _api._ack(_response.ackId, _response.pubsubEvent.subscription); + } + +} + +/// Push event received from Pub/Sub push delivery. +/// +/// decoded from JSON encoded push HTTP request body. +class _PushEventImpl implements PushEvent { + final Message _message; + final String _subscriptionName; + + Message get message => _message; + + String get subscriptionName => _subscriptionName; + + _PushEventImpl(this._message, this._subscriptionName); + + factory _PushEventImpl.fromJson(String json) { + var body = JSON.decode(json); + var data = body['message']['data']; + var labels = {}; + body['message']['labels'].forEach((label) { + var key = label['key']; + var value = label['strValue']; + if (value == null) value = label['numValue']; + labels[key] = value; + }); + return new _PushEventImpl( + new _PushMessage(data, labels), + '/subscriptions/' + body['subscription']); + } +} + +class _TopicImpl implements Topic { + final _PubSubImpl _api; + final pubsub.Topic _topic; + + _TopicImpl(this._api, this._topic); + + String get name { + assert(_topic.name.startsWith(_api._topicPrefix)); + return _topic.name.substring(_api._topicPrefix.length); + } + + String get project { + assert(_topic.name.startsWith(_api._topicPrefix)); + return _api.project; + } + + String get absoluteName => _topic.name; + + Future publish(Message message) { + return _api._publish(_topic.name, message.asBytes, message.labels); + } + + Future delete() => _api._deleteTopic(_topic.name); + + Future publishString(String message, {Map labels}) { + return _api._publish(_topic.name, UTF8.encode(message), labels); + } + + Future publishBytes(List message, {Map labels}) { + return _api._publish(_topic.name, message, labels); + } +} + +class _SubscriptionImpl implements Subscription { + final _PubSubImpl _api; + final pubsub.Subscription _subscription; + + _SubscriptionImpl(this._api, this._subscription); + + String get name { + assert(_subscription.name.startsWith(_api._subscriptionPrefix)); + return _subscription.name.substring(_api._subscriptionPrefix.length); + } + + String get project { + assert(_subscription.name.startsWith(_api._subscriptionPrefix)); + return _api.project; + } + + String get absoluteName => _subscription.name; + + Topic get topic { + var topic = new pubsub.Topic()..name = _subscription.topic; + return new _TopicImpl(_api, topic); + } + + Future delete() => _api._deleteSubscription(_subscription.name); + + Future pull({bool noWait: true}) { + return _api._pull(_subscription.name, noWait) + .then((response) { + return new _PullEventImpl(_api, response); + }).catchError((e) => null, + test: (e) => e is pubsub.DetailedApiRequestError && + e.status == 400); + } + + Uri get endpoint => null; + + bool get isPull => endpoint == null; + + bool get isPush => endpoint != null; + + Future updatePushConfiguration(Uri endpoint) { + return _api._modifyPushConfig(_subscription.name, endpoint); + } +} + +class _TopicPageImpl implements Page { + final _PubSubImpl _api; + final int _pageSize; + final String _nextPageToken; + final List items; + + _TopicPageImpl(this._api, + this._pageSize, + pubsub.ListTopicsResponse response) + : items = new List(response.topic.length), + _nextPageToken = response.nextPageToken { + for (int i = 0; i < response.topic.length; i++) { + items[i] = new _TopicImpl(_api, response.topic[i]); + } + } + + bool get isLast => _nextPageToken == null; + + Future> next({int pageSize}) { + if (isLast) return new Future.value(null); + if (pageSize == null) pageSize = this._pageSize; + + return _api._listTopics(pageSize, _nextPageToken).then((response) { + return new _TopicPageImpl(_api, pageSize, response); + }); + } +} + +class _SubscriptionPageImpl implements Page { + final _PubSubImpl _api; + final int _pageSize; + final String _nextPageToken; + final List items; + + _SubscriptionPageImpl(this._api, + this._pageSize, + pubsub.ListSubscriptionsResponse response) + : items = new List(response.subscription != null + ? response.subscription.length + : 0), + _nextPageToken = response.nextPageToken{ + if (response.subscription != null) { + for (int i = 0; i < response.subscription.length; i++) { + items[i] = new _SubscriptionImpl(_api, response.subscription[i]); + } + } + } + + bool get isLast => _nextPageToken == null; + + Future> next({int pageSize}) { + if (_nextPageToken == null) return new Future.value(null); + if (pageSize == null) pageSize = this._pageSize; + + return _api._listSubscriptions(pageSize, _nextPageToken).then((response) { + return new _SubscriptionPageImpl(_api, pageSize, response); + }); + } +} \ No newline at end of file diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml new file mode 100644 index 00000000..76ac800a --- /dev/null +++ b/pkgs/gcloud/pubspec.yaml @@ -0,0 +1,17 @@ +name: gcloud +version: 0.0.1-dev +description: Dart gcloud APIs +environment: + sdk: '>=1.5.0 <2.0.0' +dependencies: + crypto: '>=0.9.0 <0.10.0' + googleapis_auth: '>=0.1.0 <0.2.0' + googleapis_beta: '>=0.1.0 <0.2.0' +dev_dependencies: + http: '>=0.11.0 <0.12.0' + unittest: '>=0.11.0 <0.12.0' +dependency_overrides: + googleapis_auth: + path: ../googleapis_auth + googleapis_beta: + path: ../googleapis/generated/googleapis_beta diff --git a/pkgs/gcloud/test/pubsub_test.dart b/pkgs/gcloud/test/pubsub_test.dart new file mode 100644 index 00000000..996933ac --- /dev/null +++ b/pkgs/gcloud/test/pubsub_test.dart @@ -0,0 +1,689 @@ +import 'dart:async'; +import 'dart:convert'; + +import 'package:http/http.dart' as http; +import 'package:http/testing.dart' as http_testing; +import 'package:unittest/unittest.dart'; + +import 'package:gcloud/pubsub.dart'; + +import 'package:googleapis_beta/pubsub/v1beta1.dart' as pubsub; + +const PROJECT = 'test-project'; +const CONTENT_TYPE_JSON_UTF8 = 'application/json; charset=utf-8'; +const RESPONSE_HEADERS = const { + 'content-type': CONTENT_TYPE_JSON_UTF8 +}; + +const String ROOT_PATH = '/pubsub/v1beta1/'; +final Uri ROOT_URI = Uri.parse('https://www.googleapis.com$ROOT_PATH'); + +class MockClient extends http.BaseClient { + Map> mocks = {}; + http_testing.MockClient client; + + MockClient() { + client = new http_testing.MockClient(handler); + } + + void register(String method, Pattern path, + http_testing.MockClientHandler handler) { + mocks.putIfAbsent(method, () => new Map())[path] = handler; + } + + void clear() { + mocks = {}; + } + + Future handler(http.Request request) { + expect(request.url.host, 'www.googleapis.com'); + expect(request.url.path.startsWith(ROOT_PATH), isTrue); + var path = request.url.path.substring(ROOT_PATH.length); + if (mocks[request.method] == null) { + throw 'No mock handler for method ${request.method} found. ' + 'Request URL was: ${request.url}'; + } + var mockHandler; + mocks[request.method].forEach((pattern, handler) { + if (pattern.matchAsPrefix(path) != null) { + mockHandler = handler; + } + }); + if (mockHandler == null) { + throw 'No mock handler for method ${request.method} and path ' + '[$path] found. Request URL was: ${request.url}'; + } + return mockHandler(request); + } + + Future send(http.BaseRequest request) { + return client.send(request); + } + + Future respond(response) { + return new Future.value( + new http.Response( + JSON.encode(response.toJson()), 200, headers: RESPONSE_HEADERS)); + } + + Future respondEmpty() { + return new Future.value( + new http.Response('', 200, headers: RESPONSE_HEADERS)); + } +} + +main() { + group('api', () { + var badTopicNames = [ + '/', '/topics', '/topics/$PROJECT', '/topics/$PROJECT/', + '/topics/${PROJECT}x', '/topics/${PROJECT}x/']; + + var badSubscriptionNames = [ + '/', '/subscriptions', '/subscriptions/$PROJECT', + '/subscriptions/$PROJECT/', '/subscriptions/${PROJECT}x', + '/subscriptions/${PROJECT}x/']; + + group('topic', () { + var name = 'test-topic'; + var absoluteName = '/topics/$PROJECT/test-topic'; + + test('create', () { + var mock = new MockClient(); + mock.register('POST', 'topics', expectAsync((request) { + var requestTopic = + new pubsub.Topic.fromJson(JSON.decode(request.body)); + expect(requestTopic.name, absoluteName); + return mock.respond(new pubsub.Topic()..name = absoluteName); + }, count: 2)); + + var api = new PubSub(mock, PROJECT); + return api.createTopic(name).then(expectAsync((topic) { + expect(topic.name, name); + expect(topic.project, PROJECT); + expect(topic.absoluteName, absoluteName); + return api.createTopic(absoluteName).then(expectAsync((topic) { + expect(topic.name, name); + expect(topic.absoluteName, absoluteName); + })); + })); + }); + + test('create-error', () { + var mock = new MockClient(); + var api = new PubSub(mock, PROJECT); + badTopicNames.forEach((name) { + expect(() => api.createTopic(name), throwsArgumentError); + }); + badSubscriptionNames.forEach((name) { + expect(() => api.createTopic(name), throwsArgumentError); + }); + }); + + test('delete', () { + var mock = new MockClient(); + mock.register( + 'DELETE', new RegExp(r'topics/[a-z/-]*$'), expectAsync((request) { + expect(request.url.path, '${ROOT_PATH}topics/$absoluteName'); + expect(request.body.length, 0); + return mock.respondEmpty(); + }, count: 2)); + + var api = new PubSub(mock, PROJECT); + return api.deleteTopic(name).then(expectAsync((result) { + expect(result, isNull); + return api.deleteTopic(absoluteName).then(expectAsync((topic) { + expect(result, isNull); + })); + })); + }); + + test('delete-error', () { + var mock = new MockClient(); + var api = new PubSub(mock, PROJECT); + badTopicNames.forEach((name) { + expect(() => api.deleteTopic(name), throwsArgumentError); + }); + badSubscriptionNames.forEach((name) { + expect(() => api.deleteTopic(name), throwsArgumentError); + }); + }); + + test('lookup', () { + var mock = new MockClient(); + mock.register( + 'GET', new RegExp(r'topics/[a-z/-]*$'), expectAsync((request) { + expect(request.url.path, '${ROOT_PATH}topics/$absoluteName'); + expect(request.body.length, 0); + return mock.respond(new pubsub.Topic()..name = absoluteName); + }, count: 2)); + + var api = new PubSub(mock, PROJECT); + return api.lookupTopic(name).then(expectAsync((topic) { + expect(topic.name, name); + expect(topic.project, PROJECT); + expect(topic.absoluteName, absoluteName); + return api.lookupTopic(absoluteName).then(expectAsync((topic) { + expect(topic.name, name); + expect(topic.absoluteName, absoluteName); + })); + })); + }); + + test('lookup-error', () { + var mock = new MockClient(); + var api = new PubSub(mock, PROJECT); + badTopicNames.forEach((name) { + expect(() => api.lookupTopic(name), throwsArgumentError); + }); + badSubscriptionNames.forEach((name) { + expect(() => api.lookupTopic(name), throwsArgumentError); + }); + }); + + group('query', () { + var query = 'cloud.googleapis.com/project in (/projects/$PROJECT)'; + var defaultPageSize = 50; + + addTopics(pubsub.ListTopicsResponse response, int first, int count) { + response.topic = []; + for (int i = 0; i < count; i++) { + response.topic.add(new pubsub.Topic()..name = 'topic-${first + i}'); + } + } + + // Mock that expect/generates [n] topics in pages of page size + // [pageSize]. + registerQueryMock(mock, n, pageSize) { + var totalPages = (n + pageSize - 1) ~/ pageSize; + // No items still generate one request. + if (totalPages == 0) totalPages = 1; + var pageCount = 0; + mock.register('GET', 'topics', expectAsync((request) { + pageCount++; + expect(request.url.queryParameters['query'], query); + expect(request.url.queryParameters['maxResults'], '$pageSize'); + expect(request.body.length, 0); + if (pageCount > 1) { + expect(request.url.queryParameters['pageToken'], 'next-page'); + } + + var response = new pubsub.ListTopicsResponse(); + var first = (pageCount - 1) * pageSize + 1; + if (pageCount < totalPages) { + response.nextPageToken = 'next-page'; + addTopics(response, first, pageSize); + } else { + addTopics(response, first, n - (totalPages - 1) * pageSize); + } + return mock.respond(response); + }, count: totalPages)); + } + + group('list', () { + test('empty', () { + var mock = new MockClient(); + registerQueryMock(mock, 0, 50); + + var api = new PubSub(mock, PROJECT); + return api.listTopics().listen( + ((_) => throw 'Unexpected'), + onDone: expectAsync(() => null)); + }); + + test('single', () { + var mock = new MockClient(); + registerQueryMock(mock, 10, 50); + + var api = new PubSub(mock, PROJECT); + return api.listTopics().listen( + expectAsync(((_) => null), count: 10), + onDone: expectAsync(() => null)); + }); + + test('multiple', () { + var mock = new MockClient(); + registerQueryMock(mock, 170, 50); + + var api = new PubSub(mock, PROJECT); + return api.listTopics().listen( + expectAsync(((_) => null), count: 170), + onDone: expectAsync(() => null)); + }); + }); + + group('page', () { + test('empty', () { + var mock = new MockClient(); + registerQueryMock(mock, 0, 50); + + var api = new PubSub(mock, PROJECT); + return api.pageTopics().then(expectAsync((page) { + expect(page.items.length, 0); + expect(page.isLast, isTrue); + expect(page.next(), completion(isNull)); + + mock.clear(); + registerQueryMock(mock, 0, 20); + return api.pageTopics(pageSize: 20).then(expectAsync((page) { + expect(page.items.length, 0); + expect(page.isLast, isTrue); + expect(page.next(), completion(isNull)); + })); + })); + }); + + test('single', () { + var mock = new MockClient(); + registerQueryMock(mock, 10, 50); + + var api = new PubSub(mock, PROJECT); + return api.pageTopics().then(expectAsync((page) { + expect(page.items.length, 10); + expect(page.isLast, isTrue); + expect(page.next(), completion(isNull)); + + mock.clear(); + registerQueryMock(mock, 20, 20); + return api.pageTopics(pageSize: 20).then(expectAsync((page) { + expect(page.items.length, 20); + expect(page.isLast, isTrue); + expect(page.next(), completion(isNull)); + })); + })); + }); + + test('multiple', () { + runTest(n, pageSize) { + var totalPages = (n + pageSize - 1) ~/ pageSize; + var pageCount = 0; + + var completer = new Completer(); + var mock = new MockClient(); + registerQueryMock(mock, n, pageSize); + + handlePage(page) { + pageCount++; + expect(page.isLast, pageCount == totalPages); + expect(page.items.length, + page.isLast ? n - (totalPages - 1) * pageSize + : pageSize ); + page.next().then(expectAsync((page) { + if (page != null) { + handlePage(page); + } else { + expect(pageCount, totalPages); + completer.complete(); + } + })); + } + + var api = new PubSub(mock, PROJECT); + api.pageTopics(pageSize: pageSize).then(expectAsync(handlePage)); + + return completer.future; + } + + return runTest(70, 50) + .then((_) => runTest(99, 1)) + .then((_) => runTest(99, 50)) + .then((_) => runTest(99, 98)) + .then((_) => runTest(99, 99)) + .then((_) => runTest(99, 100)) + .then((_) => runTest(100, 1)) + .then((_) => runTest(100, 50)) + .then((_) => runTest(100, 100)) + .then((_) => runTest(101, 50)); + }); + }); + }); + }); + + group('subscription', () { + var name = 'test-subscription'; + var absoluteName = '/subscriptions/$PROJECT/test-subscription'; + var topicName = 'test-topic'; + var absoluteTopicName = '/topics/$PROJECT/test-topic'; + + test('create', () { + var mock = new MockClient(); + mock.register('POST', 'subscriptions', expectAsync((request) { + var requestSubscription = + new pubsub.Subscription.fromJson(JSON.decode(request.body)); + expect(requestSubscription.name, absoluteName); + return mock.respond(new pubsub.Subscription()..name = absoluteName); + }, count: 2)); + + var api = new PubSub(mock, PROJECT); + return api.createSubscription(name, topicName) + .then(expectAsync((subscription) { + expect(subscription.name, name); + expect(subscription.absoluteName, absoluteName); + return api.createSubscription(absoluteName, absoluteTopicName) + .then(expectAsync((subscription) { + expect(subscription.name, name); + expect(subscription.project, PROJECT); + expect(subscription.absoluteName, absoluteName); + })); + })); + }); + + test('create-error', () { + var mock = new MockClient(); + var api = new PubSub(mock, PROJECT); + badSubscriptionNames.forEach((name) { + expect(() => api.createSubscription(name, 'test-topic'), + throwsArgumentError); + }); + badTopicNames.forEach((name) { + expect(() => api.createSubscription('test-subscription', name), + throwsArgumentError); + }); + }); + + test('delete', () { + var mock = new MockClient(); + mock.register( + 'DELETE', + new RegExp(r'subscriptions/[a-z/-]*$'), expectAsync((request) { + expect(request.url.path, '${ROOT_PATH}subscriptions/$absoluteName'); + expect(request.body.length, 0); + return mock.respondEmpty(); + }, count: 2)); + + var api = new PubSub(mock, PROJECT); + return api.deleteSubscription(name).then(expectAsync((result) { + expect(result, isNull); + return api.deleteSubscription(absoluteName).then(expectAsync((topic) { + expect(result, isNull); + })); + })); + }); + + test('delete-error', () { + var mock = new MockClient(); + var api = new PubSub(mock, PROJECT); + badSubscriptionNames.forEach((name) { + expect(() => api.deleteSubscription(name), throwsArgumentError); + }); + badTopicNames.forEach((name) { + expect(() => api.deleteSubscription(name), throwsArgumentError); + }); + }); + + test('lookup', () { + var mock = new MockClient(); + mock.register( + 'GET', + new RegExp(r'subscriptions/[a-z/-]*$'), expectAsync((request) { + expect(request.url.path, '${ROOT_PATH}subscriptions/$absoluteName'); + expect(request.body.length, 0); + return mock.respond(new pubsub.Subscription()..name = absoluteName); + }, count: 2)); + + var api = new PubSub(mock, PROJECT); + return api.lookupSubscription(name).then(expectAsync((subscription) { + expect(subscription.name, name); + expect(subscription.absoluteName, absoluteName); + return api.lookupSubscription(absoluteName) + .then(expectAsync((subscription) { + expect(subscription.name, name); + expect(subscription.project, PROJECT); + expect(subscription.absoluteName, absoluteName); + })); + })); + }); + + test('lookup-error', () { + var mock = new MockClient(); + var api = new PubSub(mock, PROJECT); + badSubscriptionNames.forEach((name) { + expect(() => api.lookupSubscription(name), throwsArgumentError); + }); + badTopicNames.forEach((name) { + expect(() => api.lookupSubscription(name), throwsArgumentError); + }); + }); + + group('query', () { + var query = 'cloud.googleapis.com/project in (/projects/$PROJECT)'; + var defaultPageSize = 50; + + addSubscriptions( + pubsub.ListSubscriptionsResponse response, int first, int count) { + response.subscription = []; + for (int i = 0; i < count; i++) { + response.subscription.add( + new pubsub.Subscription()..name = 'subscription-${first + i}'); + } + } + + // Mock that expect/generates [n] subscriptions in pages of page size + // [pageSize]. + registerQueryMock(mock, n, pageSize) { + var totalPages = (n + pageSize - 1) ~/ pageSize; + // No items still generate one request. + if (totalPages == 0) totalPages = 1; + var pageCount = 0; + mock.register('GET', 'subscriptions', expectAsync((request) { + pageCount++; + expect(request.url.queryParameters['query'], query); + expect(request.url.queryParameters['maxResults'], '$pageSize'); + expect(request.body.length, 0); + if (pageCount > 1) { + expect(request.url.queryParameters['pageToken'], 'next-page'); + } + + var response = new pubsub.ListSubscriptionsResponse(); + var first = (pageCount - 1) * pageSize + 1; + if (pageCount < totalPages) { + response.nextPageToken = 'next-page'; + addSubscriptions(response, first, pageSize); + } else { + addSubscriptions( + response, first, n - (totalPages - 1) * pageSize); + } + return mock.respond(response); + }, count: totalPages)); + } + + group('list', () { + test('empty', () { + var mock = new MockClient(); + registerQueryMock(mock, 0, 50); + + var api = new PubSub(mock, PROJECT); + return api.listSubscriptions().listen( + ((_) => throw 'Unexpected'), + onDone: expectAsync(() => null)); + }); + + test('single', () { + var mock = new MockClient(); + registerQueryMock(mock, 10, 50); + + var api = new PubSub(mock, PROJECT); + return api.listSubscriptions().listen( + expectAsync(((_) => null), count: 10), + onDone: expectAsync(() => null)); + }); + + test('multiple', () { + var mock = new MockClient(); + registerQueryMock(mock, 170, 50); + + var api = new PubSub(mock, PROJECT); + return api.listSubscriptions().listen( + expectAsync(((_) => null), count: 170), + onDone: expectAsync(() => null)); + }); + }); + + group('page', () { + test('empty', () { + var mock = new MockClient(); + registerQueryMock(mock, 0, 50); + + var api = new PubSub(mock, PROJECT); + return api.pageSubscriptions().then(expectAsync((page) { + expect(page.items.length, 0); + expect(page.isLast, isTrue); + expect(page.next(), completion(isNull)); + + mock.clear(); + registerQueryMock(mock, 0, 20); + return api.pageSubscriptions(pageSize: 20) + .then(expectAsync((page) { + expect(page.items.length, 0); + expect(page.isLast, isTrue); + expect(page.next(), completion(isNull)); + })); + })); + }); + + test('single', () { + var mock = new MockClient(); + registerQueryMock(mock, 10, 50); + + var api = new PubSub(mock, PROJECT); + return api.pageSubscriptions().then(expectAsync((page) { + expect(page.items.length, 10); + expect(page.isLast, isTrue); + expect(page.next(), completion(isNull)); + + mock.clear(); + registerQueryMock(mock, 20, 20); + return api.pageSubscriptions(pageSize: 20) + .then(expectAsync((page) { + expect(page.items.length, 20); + expect(page.isLast, isTrue); + expect(page.next(), completion(isNull)); + })); + })); + }); + + test('multiple', () { + runTest(n, pageSize) { + var totalPages = (n + pageSize - 1) ~/ pageSize; + var pageCount = 0; + + var completer = new Completer(); + var mock = new MockClient(); + registerQueryMock(mock, n, pageSize); + + handlingPage(page) { + pageCount++; + expect(page.isLast, pageCount == totalPages); + expect(page.items.length, + page.isLast ? n - (totalPages - 1) * pageSize + : pageSize ); + page.next().then((page) { + if (page != null) { + handlingPage(page); + } else { + expect(pageCount, totalPages); + completer.complete(); + } + }); + } + + var api = new PubSub(mock, PROJECT); + api.pageSubscriptions(pageSize: pageSize).then(handlingPage); + + return completer.future; + } + + return runTest(70, 50) + .then((_) => runTest(99, 1)) + .then((_) => runTest(99, 50)) + .then((_) => runTest(99, 98)) + .then((_) => runTest(99, 99)) + .then((_) => runTest(99, 100)) + .then((_) => runTest(100, 1)) + .then((_) => runTest(100, 50)) + .then((_) => runTest(100, 100)) + .then((_) => runTest(101, 50)); + }); + }); + }); + }); + }); + + group('topic', () { + var name = 'test-topic'; + var absoluteName = '/topics/$PROJECT/test-topic'; + + test('delete', () { + var mock = new MockClient(); + mock.register( + 'GET', new RegExp(r'topics/[a-z/-]*$'), expectAsync((request) { + expect(request.url.path, '${ROOT_PATH}topics/$absoluteName'); + expect(request.body.length, 0); + return mock.respond(new pubsub.Topic()..name = absoluteName); + })); + + var api = new PubSub(mock, PROJECT); + return api.lookupTopic(name).then(expectAsync((topic) { + expect(topic.name, name); + expect(topic.absoluteName, absoluteName); + + mock.register( + 'DELETE', new RegExp(r'topics/[a-z/-]*$'), expectAsync((request) { + expect(request.url.path, '${ROOT_PATH}topics/$absoluteName'); + expect(request.body.length, 0); + return mock.respondEmpty(); + })); + + return topic.delete().then(expectAsync((result) { + expect(result, isNull); + })); + })); + }); + }); + + group('subscription', () { + var name = 'test-subscription'; + var absoluteName = '/subscriptions/$PROJECT/test-subscription'; + var topicName = 'test-topic'; + var absoluteTopicName = '/topics/$PROJECT/test-topic'; + + test('delete', () { + var mock = new MockClient(); + mock.register( + 'GET', new RegExp(r'subscriptions/[a-z/-]*$'), expectAsync((request) { + expect(request.url.path, '${ROOT_PATH}subscriptions/$absoluteName'); + expect(request.body.length, 0); + return mock.respond(new pubsub.Topic()..name = absoluteName); + })); + + var api = new PubSub(mock, PROJECT); + return api.lookupSubscription(name).then(expectAsync((subscription) { + expect(subscription.name, name); + expect(subscription.absoluteName, absoluteName); + + mock.register( + 'DELETE', + new RegExp(r'subscriptions/[a-z/-]*$'), expectAsync((request) { + expect(request.url.path, '${ROOT_PATH}subscriptions/$absoluteName'); + expect(request.body.length, 0); + return mock.respondEmpty(); + })); + + return subscription.delete().then(expectAsync((result) { + expect(result, isNull); + })); + })); + }); + }); + + group('push', () { + var requestBody = + '{"message":{"data":"SGVsbG8sIHdvcmxkIDMwIG9mIDUwIQ==",' + '"labels":[{"key":"messageNo","numValue":30},' + '{"key":"test","strValue":"hello"}]},' + '"subscription":"sgjesse-managed-vm/test-push-subscription"}'; + var event = new PushEvent.fromJson(requestBody); + expect(event.message.asString, "Hello, world 30 of 50!"); + expect(event.message.labels['messageNo'], 30); + expect(event.message.labels['test'], 'hello'); + }); +} From b1bbcf9f8b2055183f6562c77f0af1e95d46324b Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Tue, 9 Sep 2014 09:18:41 +0200 Subject: [PATCH 003/239] Bulk import of initial implementation of cloud_datastore Changes: - renamed RawDataStore to Datastore (db.dart contains still DatastoreDB) - renamed library names R=sgjesse@google.com Review URL: https://codereview.chromium.org//546813002 --- pkgs/gcloud/lib/datastore.dart | 214 ++++++++++ pkgs/gcloud/lib/db.dart | 17 + pkgs/gcloud/lib/db/metamodel.dart | 32 ++ pkgs/gcloud/lib/src/db/annotations.dart | 10 + pkgs/gcloud/lib/src/db/db.dart | 364 +++++++++++++++++ pkgs/gcloud/lib/src/db/model_db.dart | 311 ++++++++++++++ pkgs/gcloud/lib/src/db/model_description.dart | 380 ++++++++++++++++++ pkgs/gcloud/lib/src/db/models.dart | 153 +++++++ pkgs/gcloud/lib/src/db/properties.dart | 165 ++++++++ pkgs/gcloud/pubspec.yaml | 3 +- pkgs/gcloud/test/db/model_db_test.dart | 57 +++ .../db/model_dbs/duplicate_fieldname.dart | 26 ++ .../test/db/model_dbs/duplicate_kind.dart | 23 ++ .../test/db/model_dbs/duplicate_property.dart | 17 + .../db/model_dbs/invalid_id_property.dart | 15 + .../db/model_dbs/multiple_annotations.dart | 16 + .../db/model_dbs/no_default_constructor.dart | 17 + pkgs/gcloud/test/db/properties_test.dart | 177 ++++++++ 18 files changed, 1995 insertions(+), 2 deletions(-) create mode 100644 pkgs/gcloud/lib/datastore.dart create mode 100644 pkgs/gcloud/lib/db.dart create mode 100644 pkgs/gcloud/lib/db/metamodel.dart create mode 100644 pkgs/gcloud/lib/src/db/annotations.dart create mode 100644 pkgs/gcloud/lib/src/db/db.dart create mode 100644 pkgs/gcloud/lib/src/db/model_db.dart create mode 100644 pkgs/gcloud/lib/src/db/model_description.dart create mode 100644 pkgs/gcloud/lib/src/db/models.dart create mode 100644 pkgs/gcloud/lib/src/db/properties.dart create mode 100644 pkgs/gcloud/test/db/model_db_test.dart create mode 100644 pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart create mode 100644 pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart create mode 100644 pkgs/gcloud/test/db/model_dbs/duplicate_property.dart create mode 100644 pkgs/gcloud/test/db/model_dbs/invalid_id_property.dart create mode 100644 pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart create mode 100644 pkgs/gcloud/test/db/model_dbs/no_default_constructor.dart create mode 100644 pkgs/gcloud/test/db/properties_test.dart diff --git a/pkgs/gcloud/lib/datastore.dart b/pkgs/gcloud/lib/datastore.dart new file mode 100644 index 00000000..80c02e4b --- /dev/null +++ b/pkgs/gcloud/lib/datastore.dart @@ -0,0 +1,214 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +library gcloud.datastore; + +import 'dart:async'; + +class DatastoreError implements Exception { + final String message; + + DatastoreError([String message]) : message = + (message != null ?message : 'DatastoreError: An unknown error occured'); +} + +class UnknownDatastoreError extends DatastoreError { + UnknownDatastoreError(error) : super("An unknown error occured ($error)."); +} + +class TransactionAbortedError extends DatastoreError { + TransactionAbortedError() : super("The transaction was aborted."); +} + +class TimeoutError extends DatastoreError { + TimeoutError() : super("The operation timed out."); +} + +class NeedIndexError extends DatastoreError { + NeedIndexError() + : super("An index is needed for the query to succeed."); +} + +class PermissionDeniedError extends DatastoreError { + PermissionDeniedError() : super("Permission denied."); +} + +class InternalError extends DatastoreError { + InternalError() : super("Internal service error."); +} + +class QuotaExceededError extends DatastoreError { + QuotaExceededError(error) : super("Quota was exceeded ($error)."); +} + + +class Entity { + final Key key; + final Map properties; + final Set unIndexedProperties; + + Entity(this.key, this.properties, {this.unIndexedProperties}); +} + +class Key { + final Partition partition; + final List elements; + + Key(this.elements, {Partition partition}) + : this.partition = (partition == null) ? Partition.DEFAULT : partition; + + factory Key.fromParent(String kind, int id, {Key parent}) { + var partition; + var elements = []; + if (parent != null) { + partition = parent.partition; + elements.addAll(parent.elements); + } + elements.add(new KeyElement(kind, id)); + return new Key(elements, partition: partition); + } + + int get hashCode => + elements.fold(partition.hashCode, (a, b) => a ^ b.hashCode); + + bool operator==(Object other) { + if (identical(this, other)) return true; + + if (other is Key && + partition == other.partition && + elements.length == other.elements.length) { + for (int i = 0; i < elements.length; i++) { + if (elements[i] != other.elements[i]) return false; + } + return true; + } + return false; + } + + String toString() { + var namespaceString = + partition.namespace == null ? 'null' : "'${partition.namespace}'"; + return "Key(namespace=$namespaceString, path=[${elements.join(', ')}])"; + } +} + +class Partition { + static const Partition DEFAULT = const Partition._default(); + + final String namespace; + + Partition(this.namespace) { + if (namespace == '') { + throw new ArgumentError("'namespace' must not be empty"); + } + } + + const Partition._default() : this.namespace = null; + + int get hashCode => namespace.hashCode; + + bool operator==(Object other) => + other is Partition && namespace == other.namespace; +} + +class KeyElement { + final String kind; + final id; // either int or string + + KeyElement(this.kind, this.id) { + if (kind == null) { + throw new ArgumentError("'kind' must not be null"); + } + if (id != null) { + if (id is! int && id is! String) { + throw new ArgumentError("'id' must be either null, a String or an int"); + } + } + } + + int get hashCode => kind.hashCode ^ id.hashCode; + + bool operator==(Object other) => + other is KeyElement && kind == other.kind && id == other.id; + + String toString() => "$kind.$id"; +} + +class FilterRelation { + static const FilterRelation LessThan = const FilterRelation._('<'); + static const FilterRelation LessThanOrEqual = const FilterRelation._('<='); + static const FilterRelation GreatherThan = const FilterRelation._('>'); + static const FilterRelation GreatherThanOrEqual = + const FilterRelation._('>='); + static const FilterRelation Equal = const FilterRelation._('=='); + static const FilterRelation In = const FilterRelation._('IN'); + + final String name; + const FilterRelation._(this.name); +} + +class Filter { + final FilterRelation relation; + final String name; + final Object value; + + Filter(this.relation, this.name, this.value); +} + +class OrderDirection { + static const OrderDirection Ascending = const OrderDirection._('Ascending'); + static const OrderDirection Decending = const OrderDirection._('Decending'); + + final String name; + const OrderDirection._(this.name); +} + +class Order { + final OrderDirection direction; + final String propertyName; + + Order(this.direction, this.propertyName); +} + +class Query { + final String kind; + final Key ancestorKey; + final List filters; + final List orders; + final int offset; + final int limit; + + Query({this.ancestorKey, this.kind, this.filters, this.orders, + this.offset, this.limit}); +} + +class CommitResult { + final List autoIdInsertKeys; + + CommitResult(this.autoIdInsertKeys); +} + +class BlobValue { + final List bytes; + BlobValue(this.bytes); +} + +abstract class Transaction { } + +abstract class Datastore { + Future> allocateIds(List keys); + + Future beginTransaction({bool crossEntityGroup: false}); + + // Can throw a [TransactionAbortedError] error. + Future commit({List inserts, + List autoIdInserts, + List deletes, + Transaction transaction}); + Future rollback(Transaction transaction); + + Future> lookup(List keys, {Transaction transaction}); + Future> query( + Query query, {Partition partition, Transaction transaction}); +} diff --git a/pkgs/gcloud/lib/db.dart b/pkgs/gcloud/lib/db.dart new file mode 100644 index 00000000..9ef333f2 --- /dev/null +++ b/pkgs/gcloud/lib/db.dart @@ -0,0 +1,17 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +library gcloud.db; + +import 'dart:async'; +import 'dart:collection'; +import 'dart:mirrors' as mirrors; +import 'datastore.dart' as datastore; + +part 'src/db/annotations.dart'; +part 'src/db/db.dart'; +part 'src/db/models.dart'; +part 'src/db/model_db.dart'; +part 'src/db/model_description.dart'; +part 'src/db/properties.dart'; diff --git a/pkgs/gcloud/lib/db/metamodel.dart b/pkgs/gcloud/lib/db/metamodel.dart new file mode 100644 index 00000000..d30dd87a --- /dev/null +++ b/pkgs/gcloud/lib/db/metamodel.dart @@ -0,0 +1,32 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +library gcloud.db.meta_model; + +import '../db.dart'; + +@ModelMetadata(const NamespaceDescription()) +class Namespace extends ExpandoModel { + String get name { + // The default namespace will be reported with id 1. + if (id == NamespaceDescription.EmptyNamespaceId) return null; + return id; + } +} + +@ModelMetadata(const KindDescription()) +class Kind extends Model { + String get name => id; +} + +class NamespaceDescription extends ExpandoModelDescription { + static const int EmptyNamespaceId = 1; + final id = const IntProperty(); + const NamespaceDescription() : super('__namespace__'); +} + +class KindDescription extends ModelDescription { + final id = const IntProperty(); + const KindDescription() : super('__kind__'); +} diff --git a/pkgs/gcloud/lib/src/db/annotations.dart b/pkgs/gcloud/lib/src/db/annotations.dart new file mode 100644 index 00000000..3ec409bc --- /dev/null +++ b/pkgs/gcloud/lib/src/db/annotations.dart @@ -0,0 +1,10 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +part of gcloud.db; + +class ModelMetadata { + final ModelDescription description; + const ModelMetadata(this.description); +} diff --git a/pkgs/gcloud/lib/src/db/db.dart b/pkgs/gcloud/lib/src/db/db.dart new file mode 100644 index 00000000..0999613b --- /dev/null +++ b/pkgs/gcloud/lib/src/db/db.dart @@ -0,0 +1,364 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +part of gcloud.db; + +class Transaction { + static const int _TRANSACTION_STARTED = 0; + static const int _TRANSACTION_ROLLED_BACK = 1; + static const int _TRANSACTION_COMMITTED = 2; + + final DatastoreDB db; + final datastore.Transaction _datastoreTransaction; + + final List _inserts = []; + final List _deletes = []; + + int _transactionState = _TRANSACTION_STARTED; + + Transaction(this.db, this._datastoreTransaction); + + /** + * Looks up [keys] within this transaction. + */ + Future> lookup(List keys) { + return _lookupHelper(db, keys, datastoreTransaction: _datastoreTransaction); + } + + /** + * Enqueues [inserts] and [deletes] which should be commited at commit time. + */ + void queueMutations({List inserts, List deletes}) { + _checkSealed(); + if (inserts != null) { + _inserts.addAll(inserts); + } + if (deletes != null) { + _deletes.addAll(deletes); + } + } + + /** + * Query for [kind] models with [ancestorKey]. + * + * Note that [ancestorKey] is required, since a transaction is not allowed to + * touch/look at an arbitrary number of rows. + */ + Query query(Type kind, Key ancestorKey, {Partition partition}) { + _checkSealed(); + var modelDescription = db.modelDB.modelDescriptionForType(kind); + var query = new Query(db, + modelDescription, + partition: partition, + ancestorKey: ancestorKey, + datastoreTransaction: _datastoreTransaction); + return modelDescription.finishQuery(db.modelDB, query); + } + + /** + * Rolls this transaction back. + */ + Future rollback() { + _checkSealed(changeState: _TRANSACTION_ROLLED_BACK); + return db.datastore.rollback(_datastoreTransaction); + } + + /** + * Commits this transaction including all of the queued mutations. + */ + Future commit() { + _checkSealed(changeState: _TRANSACTION_COMMITTED); + return _commitHelper(db, + inserts: _inserts, + deletes: _deletes, + datastoreTransaction: _datastoreTransaction); + } + + _checkSealed({int changeState}) { + if (_transactionState == _TRANSACTION_COMMITTED) { + throw new StateError( + 'The transaction has already been committed.'); + } else if (_transactionState == _TRANSACTION_ROLLED_BACK) { + throw new StateError( + 'The transaction has already been rolled back.'); + } + if (changeState != null) { + _transactionState = changeState; + } + } +} + +class Query { + final _relationMapping = const { + '<': datastore.FilterRelation.LessThan, + '<=': datastore.FilterRelation.LessThanOrEqual, + '>': datastore.FilterRelation.GreatherThan, + '>=': datastore.FilterRelation.GreatherThanOrEqual, + '=': datastore.FilterRelation.Equal, + 'IN': datastore.FilterRelation.In, + }; + + final DatastoreDB _db; + final datastore.Transaction _transaction; + final String _kind; + final ModelDescription _modelDescription; + + final Partition _partition; + final Key _ancestorKey; + + final List _filters = []; + final List _orders = []; + int _offset; + int _limit; + + Query(DatastoreDB dbImpl, ModelDescription modelDescription, + {Partition partition, Key ancestorKey, + datastore.Transaction datastoreTransaction}) + : _db = dbImpl, _kind = modelDescription.kindName(dbImpl.modelDB), + _modelDescription = modelDescription, _partition = partition, + _ancestorKey = ancestorKey, _transaction = datastoreTransaction; + + /** + * Adds a filter to this [Query]. + * + * [filterString] has form "name OP" where 'name' is a fieldName of the + * model and OP is an operator (e.g. "name >="). + * + * [comparisonObject] is the object for comparison. + */ + void filter(String filterString, Object comparisonObject) { + var parts = filterString.split(' '); + if (parts.length != 2 || !_relationMapping.containsKey(parts[1])) { + throw new ArgumentError( + "Invalid filter string '$filterString'."); + } + + // TODO: do value transformation on [comparisionObject] + + var propertyName = _convertToDatastoreName(parts[0]); + _filters.add(new datastore.Filter( + _relationMapping[parts[1]], propertyName, comparisonObject)); + } + + /** + * Adds an order to this [Query]. + * + * [orderString] has the form "-name" where 'name' is a fieldName of the model + * and the optional '-' says whether the order is decending or ascending. + */ + void order(String orderString) { + // TODO: validate [orderString] (e.g. is name valid) + if (orderString.startsWith('-')) { + _orders.add(new datastore.Order( + datastore.OrderDirection.Decending, + _convertToDatastoreName(orderString.substring(1)))); + } else { + _orders.add(new datastore.Order( + datastore.OrderDirection.Ascending, + _convertToDatastoreName(orderString))); + } + } + + /** + * Sets the [offset] of this [Query]. + * + * When running this query, [offset] results will be skipped. + */ + void offset(int offset) { + _offset = offset; + } + + /** + * Sets the [limit] of this [Query]. + * + * When running this query, a maximum of [limit] results will be returned. + */ + void limit(int limit) { + _limit = limit; + } + + /** + * Execute this [Query] on the datastore. + * + * Outside of transactions this method might return stale data or may not + * return the newest updates performed on the datastore since updates + * will be reflected in the indices in an eventual consistent way. + */ + Future> run() { + var ancestorKey; + if (_ancestorKey != null) { + ancestorKey = _db.modelDB.toDatastoreKey(_ancestorKey); + } + var query = new datastore.Query( + ancestorKey: ancestorKey, kind: _kind, + filters: _filters, orders: _orders, + offset: _offset, limit: _limit); + + var partition; + if (_partition != null) { + partition = new datastore.Partition(_partition.namespace); + } + + return _db.datastore.query( + query, transaction: _transaction, partition: partition) + .then((List entities) { + return entities.map(_db.modelDB.fromDatastoreEntity).toList(); + }); + } + + String _convertToDatastoreName(String name) { + var propertyName = + _modelDescription.fieldNameToPropertyName(_db.modelDB, name); + if (propertyName == null) { + throw new ArgumentError( + "Field $name is not available for kind $_modelDescription"); + } + return propertyName; + } +} + +class DatastoreDB { + final datastore.Datastore datastore; + final ModelDB _modelDB; + Partition _defaultPartition; + + DatastoreDB(this.datastore, {ModelDB modelDB}) + : _modelDB = modelDB != null ? modelDB : new ModelDB() { + _defaultPartition = new Partition(null); + } + + /** + * The [ModelDB] used to serialize/deserialize objects. + */ + ModelDB get modelDB => _modelDB; + + /** + * Gets the empty key using the default [Partition]. + * + * Model keys with parent set to [emptyKey] will create their own entity + * groups. + */ + Key get emptyKey => defaultPartition.emptyKey; + + /** + * Gets the default [Partition]. + */ + Partition get defaultPartition => _defaultPartition; + + /** + * Creates a new [Partition] with namespace [namespace]. + */ + Partition newPartition(String namespace) { + return new Partition(namespace); + } + + /** + * Begins a new a new transaction. + * + * A normal transaction can only touch entities inside one entity group. By + * setting [crossEntityGroup] to `true` it is possible to touch up to + * five entity groups. + * + * Cross entity group transactions come with a cost, due to the fact that + * a two-phase commit protocol will be used. So it will result in higher + * latency. + */ + Future beginTransaction({bool crossEntityGroup: false}) { + return datastore.beginTransaction(crossEntityGroup: crossEntityGroup) + .then((datastore.Transaction transaction) { + return new Transaction(this, transaction); + }); + } + + /** + * Build a query for [kind] models. + */ + Query query(Type kind, {Partition partition, Key ancestorKey}) { + var modelDescription = modelDB.modelDescriptionForType(kind); + + var q = new Query(this, + modelDescription, + partition: partition, + ancestorKey: ancestorKey); + return modelDescription.finishQuery(modelDB, q); + } + + /** + * Looks up [keys] in the datastore and returns a list of [Model] objects. + * + * For transactions, please use [beginTransaction] and call the [lookup] + * method on it's returned [Transaction] object. + */ + Future> lookup(List keys) { + return _lookupHelper(this, keys); + } + + /** + * Add [inserts] to the datastore and remove [deletes] from it. + * + * The order of inserts and deletes is not specified. When the commit is done + * direct lookups will see the effect but non-ancestor queries will see the + * change in an eventual consistent way. + * + * For transactions, please use `beginTransaction` and it's returned + * [Transaction] object. + */ + Future commit({List inserts, List deletes}) { + return _commitHelper(this, inserts: inserts, deletes: deletes); + } +} + +Future _commitHelper(DatastoreDB db, + {List inserts, + List deletes, + datastore.Transaction datastoreTransaction}) { + var entityInserts, entityAutoIdInserts, entityDeletes; + var autoIdModelInserts; + if (inserts != null) { + entityInserts = []; + entityAutoIdInserts = []; + autoIdModelInserts = []; + + for (var model in inserts) { + // If parent was not explicity set, we assume this model will map to + // it's own entity group. + if (model.parentKey == null) { + model.parentKey = db.defaultPartition.emptyKey; + } + if (model.id == null) { + autoIdModelInserts.add(model); + entityAutoIdInserts.add(db.modelDB.toDatastoreEntity(model)); + } else { + entityInserts.add(db.modelDB.toDatastoreEntity(model)); + } + } + } + if (deletes != null) { + entityDeletes = deletes.map(db.modelDB.toDatastoreKey).toList(); + } + + return db.datastore.commit(inserts: entityInserts, + autoIdInserts: entityAutoIdInserts, + deletes: entityDeletes, + transaction: datastoreTransaction) + .then((datastore.CommitResult result) { + if (entityAutoIdInserts != null && entityAutoIdInserts.length > 0) { + for (var i = 0; i < result.autoIdInsertKeys.length; i++) { + var key = db.modelDB.fromDatastoreKey(result.autoIdInsertKeys[i]); + autoIdModelInserts[i].parentKey = key.parent; + autoIdModelInserts[i].id = key.id; + } + } + }); +} + +Future> _lookupHelper( + DatastoreDB db, List keys, + {datastore.Transaction datastoreTransaction}) { + var entityKeys = keys.map(db.modelDB.toDatastoreKey).toList(); + return db.datastore.lookup(entityKeys, transaction: datastoreTransaction) + .then((List entities) { + return entities.map(db.modelDB.fromDatastoreEntity).toList(); + }); +} diff --git a/pkgs/gcloud/lib/src/db/model_db.dart b/pkgs/gcloud/lib/src/db/model_db.dart new file mode 100644 index 00000000..6aa9c48f --- /dev/null +++ b/pkgs/gcloud/lib/src/db/model_db.dart @@ -0,0 +1,311 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +part of gcloud.db; + + +// TODO: We might move some of the complexity of this class to +// [ModelDescription]! + +/** + * Represents an in-memory database of all model classes and it's corresponding + * [ModelDescriptions]s. + */ +class ModelDB { + // Map of properties for a given [ModelDescription] + final Map> _modelProperties = {}; + + // Arbitrary state a model description might want to have + final Map _modelDescriptionStates = {}; + + // Needed when getting data from datastore to instantiate model objects. + final Map _modelDescriptionByKind = {}; + final Map _modelClasses = {}; + final Map _typeByModelDescription = {}; + + // Needed when application gives us model objects. + final Map _modelDescriptionByType = {}; + + + /** + * Initializes a new [ModelDB] from all libraries. + * + * This will scan all libraries for [Model] classes and their + * [ModelDescription] annotations. It will also scan all [Property] instances + * on all [ModelDescription] objects. + * + * Once all libraries have been scanned it will call each [ModelDescription]s + * 'initialize' method and stores the returned state object (this can be + * queried later with [modelDescriptionState]. + * + * Afterwards every [ModelDescription] will be asked whether it wants to + * register a kind name and if so, that kind name will be associated with it. + * + * In case an error is encountered (e.g. two [ModelDescription] classes with + * the same kind name) a [StateError] will be thrown. + */ + ModelDB() { + // WARNING: This is O(n) of the source code, which is very bad! + // Would be nice to have: `currentMirrorSystem().subclassesOf(Model)` + _initialize(mirrors.currentMirrorSystem().libraries.values); + } + + /** + * Initializes a new [ModelDB] only using the library [librarySymbol]. + * + * See also the default [ModelDB] constructor. + */ + ModelDB.fromLibrary(Symbol librarySymbol) { + _initialize([mirrors.currentMirrorSystem().findLibrary(librarySymbol)]); + } + + + /** + * Converts a [datastore.Key] to a [Key]. + */ + Key fromDatastoreKey(datastore.Key datastoreKey) { + var namespace = new Partition(datastoreKey.partition.namespace); + Key key = namespace.emptyKey; + for (var element in datastoreKey.elements) { + var type = _typeByModelDescription[_modelDescriptionByKind[element.kind]]; + key = key.append(type, id: element.id); + } + return key; + } + + /** + * Converts a [Key] to a [datastore.Key]. + */ + datastore.Key toDatastoreKey(Key dbKey) { + List elements = []; + var currentKey = dbKey; + while (!currentKey.isEmpty) { + var id = currentKey.id; + + var modelDescription = modelDescriptionForType(currentKey.type); + var idProperty = + propertiesForModel(modelDescription)[ModelDescription.ID_FIELDNAME]; + var kind = modelDescription.kindName(this); + + if (idProperty is IntProperty && (id != null && id is! int)) { + throw new ArgumentError('Expected an integer id property but ' + 'id was of type ${id.runtimeType}'); + } + if (idProperty is StringProperty && (id != null && id is! String)) { + throw new ArgumentError('Expected a string id property but ' + 'id was of type ${id.runtimeType}'); + } + + elements.add(new datastore.KeyElement(kind, id)); + currentKey = currentKey.parent; + } + Partition partition = currentKey._parent; + return new datastore.Key( + elements.reversed.toList(), + partition: new datastore.Partition(partition.namespace)); + } + + /** + * Converts a [Model] instance to a [datastore.Entity]. + */ + datastore.Entity toDatastoreEntity(Model model) { + try { + var modelDescription = modelDescriptionForType(model.runtimeType); + return modelDescription.encodeModel(this, model); + } catch (error, stack) { + throw + new ArgumentError('Error while encoding entity ($error, $stack).'); + } + } + + /** + * Converts a [datastore.Entity] to a [Model] instance. + */ + Model fromDatastoreEntity(datastore.Entity entity) { + if (entity == null) return null; + + Key key = fromDatastoreKey(entity.key); + var kind = entity.key.elements.last.kind; + var modelDescription = _modelDescriptionByKind[kind]; + if (modelDescription == null) { + throw new StateError('Trying to deserialize entity of kind ' + '$kind, but no Model class available for it.'); + } + + try { + return modelDescription.decodeEntity(this, key, entity); + } catch (error, stack) { + throw new StateError('Error while decoding entity ($error, $stack).'); + } + } + + + Iterable get modelDescriptions { + return _modelDescriptionByType.values; + } + + Map propertiesForModel( + ModelDescription modelDescription) { + return _modelProperties[modelDescription]; + } + + ModelDescription modelDescriptionForType(Type type) { + return _modelDescriptionByType[type]; + } + + mirrors.ClassMirror modelClass(ModelDescription md) { + return _modelClasses[md]; + } + + modelDescriptionState(ModelDescription modelDescription) { + return _modelDescriptionStates[modelDescription]; + } + + + void _initialize(Iterable libraries) { + libraries.forEach((mirrors.LibraryMirror lm) { + lm.declarations.values + .where((d) => d is mirrors.ClassMirror && d.hasReflectedType) + .forEach((mirrors.ClassMirror declaration) { + var modelDescription = _descriptionFromModelClass(declaration); + if (modelDescription != null) { + _newModelDescription(declaration, modelDescription); + } + }); + }); + + // Ask every [ModelDescription] to compute whatever global state it wants + // to have. + for (var modelDescription in modelDescriptions) { + _modelDescriptionStates[modelDescription] = + modelDescription.initialize(this); + } + + + // Ask every [ModelDescription] whether we should register it with a given + // kind name. + for (var modelDescription in modelDescriptions) { + if (modelDescription.registerKind(this)) { + var kindName = modelDescription.kindName(this); + if (_modelDescriptionByKind.containsKey(kindName)) { + throw new StateError( + 'Cannot have two ModelDescriptions ' + 'with the same kind ($kindName)'); + } + _modelDescriptionByKind[kindName] = modelDescription; + } + } + } + + void _newModelDescription(mirrors.ClassMirror modelClass, + ModelDescription modelDesc) { + assert (!_modelDescriptionByType.containsKey(modelClass.reflectedType)); + + // Map the [modelClass.runtimeType] to this [modelDesc] and vice versa. + _modelDescriptionByType[modelClass.reflectedType] = modelDesc; + _typeByModelDescription[modelDesc] = modelClass.reflectedType; + // Map this [modelDesc] to the [modelClass] mirror for easy instantiation. + _modelClasses[modelDesc] = modelClass; + + // TODO: Move this out to the model description classes. + + // Get all properties, validate that the 'id' property is valid. + var properties = _propertiesFromModelDescription(modelDesc); + var idProperty = properties[ModelDescription.ID_FIELDNAME]; + if (idProperty == null || + (idProperty is! IntProperty && idProperty is! StringProperty)) { + throw new StateError( + 'You need to have an id property and it has to be either an ' + '[IntProperty] or a [StringProperty].'); + } + if (idProperty.propertyName != null) { + throw new StateError( + 'You can not have a new name for the id property.'); + } + _modelProperties[modelDesc] = properties; + + // Ensure we have an empty constructor. + bool defaultConstructorFound = false; + for (var declaration in modelClass.declarations.values) { + if (declaration is mirrors.MethodMirror) { + if (declaration.isConstructor && + declaration.constructorName == const Symbol('') && + declaration.parameters.length == 0) { + defaultConstructorFound = true; + break; + } + } + } + if (!defaultConstructorFound) { + throw new StateError( + 'Class ${modelClass.simpleName} does not have a default ' + 'constructor.'); + } + } + + // TODO: Move this out to the model description classes. + Map _propertiesFromModelDescription( + ModelDescription modelDescription) { + var modelMirror = mirrors.reflect(modelDescription); + var modelClassMirror = mirrors.reflectClass(modelDescription.runtimeType); + + var properties = new Map(); + var propertyNames = new Set(); + + // Loop over all classes in the inheritence path up to the Object class. + while (modelClassMirror.superclass != null) { + var memberMap = modelClassMirror.instanceMembers; + // Loop over all declarations (which includes fields) + modelClassMirror.declarations.forEach((Symbol s, _) { + // Look if we do have a method for [s] + if (memberMap.containsKey(s) && memberMap[s].isGetter) { + // Get a String representation of the field and the value. + var fieldName = mirrors.MirrorSystem.getName(s); + var fieldValue = modelMirror.getField(s).reflectee; + // If the field value is a Property instance we add it to the list + // of properties. + // Fields with '__' are reserved and will not be used. + if (!fieldName.startsWith('__') && + fieldValue != null && + fieldValue is Property) { + var propertyName = fieldValue.propertyName; + if (propertyName == null) propertyName = fieldName; + + if (properties.containsKey(fieldName)) { + throw new StateError( + 'Cannot have two Property objects describing the same Model ' + 'property name in a ModelDescription class hierarchy.'); + } + + if (propertyNames.contains(propertyName)) { + throw new StateError( + 'Cannot have two Property objects mapping to the same ' + 'datastore property name ($propertyName).'); + } + properties[fieldName] = fieldValue; + propertyNames.add(propertyName); + } + } + }); + modelClassMirror = modelClassMirror.superclass; + } + + return properties; + } + + ModelDescription _descriptionFromModelClass(mirrors.ClassMirror classMirror) { + var result; + for (mirrors.InstanceMirror instance in classMirror.metadata) { + if (instance.reflectee.runtimeType == ModelMetadata) { + if (result != null) { + throw new StateError( + 'Cannot have more than one ModelMetadata() annotation ' + 'on a Model class'); + } + result = instance.getField(#description).reflectee; + } + } + return result; + } +} diff --git a/pkgs/gcloud/lib/src/db/model_description.dart b/pkgs/gcloud/lib/src/db/model_description.dart new file mode 100644 index 00000000..24b38cdb --- /dev/null +++ b/pkgs/gcloud/lib/src/db/model_description.dart @@ -0,0 +1,380 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +part of gcloud.db; + +abstract class ModelDescription { + static String ID_FIELDNAME = 'id'; + + // NOTE: These integer constants are array indices into the state vector. + // Subclasses may need to take this into account. + static const int STATE_PROPERTYNAME_TO_FIELDNAME_MAP = 0; + static const int STATE_FIELDNAME_TO_PROPERTYNAME_MAP = 1; + static const int STATE_INDEXED_PROPERTIES = 2; + static const int STATE_UNINDEXED_PROPERTIES = 3; + static const int STATE_LAST = STATE_UNINDEXED_PROPERTIES; + + final String _kind; + const ModelDescription(this._kind); + + initialize(ModelDB db) { + // Compute propertyName -> fieldName mapping. + var property2FieldName = new HashMap(); + var field2PropertyName = new HashMap(); + + db.propertiesForModel(this).forEach((String fieldName, Property prop) { + // The default of a datastore property name is the fieldName. + // It can be overridden with [Property.propertyName]. + String propertyName = prop.propertyName; + if (propertyName == null) propertyName = fieldName; + + if (fieldName != ModelDescription.ID_FIELDNAME) { + property2FieldName[propertyName] = fieldName; + field2PropertyName[fieldName] = propertyName; + } + }); + + // Compute properties & unindexed properties + var indexedProperties = new Set(); + var unIndexedProperties = new Set(); + + db.propertiesForModel(this).forEach((String fieldName, Property prop) { + if (fieldName != ModelDescription.ID_FIELDNAME) { + String propertyName = prop.propertyName; + if (propertyName == null) propertyName = fieldName; + + if (prop.indexed) { + indexedProperties.add(propertyName); + } else { + unIndexedProperties.add(propertyName); + } + } + }); + + // NOTE: This state vector is indexed by the STATE_* integer constants! + return new List.from([ + property2FieldName, + field2PropertyName, + indexedProperties, + unIndexedProperties, + ], growable: false); + } + + bool registerKind(ModelDB db) => true; + + String kindName(ModelDB db) => _kind; + + datastore.Entity encodeModel(ModelDB db, Model model) { + List stateVector = db.modelDescriptionState(this); + var key = db.toDatastoreKey(model.key); + + var properties = {}; + var unIndexedProperties = stateVector[STATE_UNINDEXED_PROPERTIES]; + var mirror = mirrors.reflect(model); + + db.propertiesForModel(this).forEach((String fieldName, Property prop) { + _encodeProperty(db, model, mirror, properties, fieldName, prop); + }); + + return new datastore.Entity( + key, properties, unIndexedProperties: unIndexedProperties); + } + + _encodeProperty(ModelDB db, Model model, mirrors.InstanceMirror mirror, + Map properties, String fieldName, Property prop) { + String propertyName = prop.propertyName; + if (propertyName == null) propertyName = fieldName; + + if (fieldName != ModelDescription.ID_FIELDNAME) { + var value = mirror.getField( + mirrors.MirrorSystem.getSymbol(fieldName)).reflectee; + if (!prop.validate(db, value)) { + throw new StateError('Property validation failed for ' + 'property $fieldName while trying to serialize entity of kind ' + '${model.runtimeType}. '); + } + properties[propertyName] = prop.encodeValue(db, value); + } + } + + Model decodeEntity(ModelDB db, Key key, datastore.Entity entity) { + if (entity == null) return null; + + // NOTE: this assumes a default constructor for the model classes! + var classMirror = db.modelClass(this); + var mirror = classMirror.newInstance(const Symbol(''), []); + + // Set the id and the parent key + mirror.reflectee.id = key.id; + mirror.reflectee.parentKey = key.parent; + + db.propertiesForModel(this).forEach((String fieldName, Property prop) { + _decodeProperty(db, entity, mirror, fieldName, prop); + }); + return mirror.reflectee; + } + + _decodeProperty(ModelDB db, datastore.Entity entity, + mirrors.InstanceMirror mirror, String fieldName, + Property prop) { + String propertyName = fieldNameToPropertyName(db, fieldName); + + if (fieldName != ModelDescription.ID_FIELDNAME) { + var rawValue = entity.properties[propertyName]; + var value = prop.decodePrimitiveValue(db, rawValue); + + if (!prop.validate(db, value)) { + throw new StateError('Property validation failed while ' + 'trying to deserialize entity of kind ' + '${entity.key.elements.last.kind} (property name: $prop)'); + } + + mirror.setField(mirrors.MirrorSystem.getSymbol(fieldName), value); + } + } + + Query finishQuery(ModelDB db, Query q) => q; + + String fieldNameToPropertyName(ModelDB db, String fieldName) { + List stateVector = db.modelDescriptionState(this); + return stateVector[STATE_FIELDNAME_TO_PROPERTYNAME_MAP][fieldName]; + } + + String propertyNameToFieldName(ModelDB db, String propertySearchName) { + List stateVector = db.modelDescriptionState(this); + return stateVector[STATE_PROPERTYNAME_TO_FIELDNAME_MAP][propertySearchName]; + } + + Object encodeField(ModelDB db, String fieldName, Object value) { + Property property = db.propertiesForModel(this)[fieldName]; + if (property != null) return property.encodeValue(db, value); + return null; + } +} + +abstract class PolyModelDescription extends ModelDescription { + static const int STATE_POLYCLASSES = ModelDescription.STATE_LAST + 1; + static const int STATE_POLYCLASSLIST_TO_MODEL_DESCRIPTION_MAP = + ModelDescription.STATE_LAST + 2; + static const int STATE_LAST = STATE_POLYCLASSLIST_TO_MODEL_DESCRIPTION_MAP; + + final __classField = const StringListProperty(propertyName: 'class'); + const PolyModelDescription() : super('PolyModel'); + + initialize(ModelDB db) { + List stateVector = super.initialize(db); + + // Generate the class list, which begins with the root of the polymorphic + // class hierachy (first direct subclass of PolyModelDescription) and goes + // down to the concrete class. + List getPolyClasses(ModelDescription modelDescription) { + List classes = []; + var currentModelDesc = modelDescription.runtimeType; + while (currentModelDesc != PolyModelDescription) { + var classMirror = mirrors.reflectClass(currentModelDesc); + String polyModelName = classMirror.getField(#PolyModelName).reflectee; + classes.add(polyModelName); + currentModelDesc = classMirror.superclass.reflectedType; + } + return classes.reversed.toList(growable: false); + } + + // NOTE: This is a redundant computation, but we do this only *once* when + // initializing. [O(N^2) where N is number of poly model classes.] + // The reason is every model class should generate it's own state vector + // but part of that state is shared across a complete poly class hierachy. + // Could be optimized if necessary. + Map getPolyClassList2ModelDescriptionMap() { + var map = new HashMap(); + for (var md in db.modelDescriptions) { + if (md is PolyModelDescription) { + map[_getPolyClassString(getPolyClasses(md))] = md; + } + } + return map; + } + + return new List.from([] + ..addAll(stateVector) + ..add(getPolyClasses(this)) + ..add(getPolyClassList2ModelDescriptionMap()), + growable: false); + } + + // We register only the root PolyModelDescription class with the kindName(). + bool registerKind(ModelDB db) => _getPolyClasses(db, this).length == 1; + + String kindName(ModelDB db) => _getPolyClasses(db, this).first; + + datastore.Entity encodeModel(ModelDB db, Model model) { + List stateVector = db.modelDescriptionState(this); + var key = db.toDatastoreKey(model.key); + var properties = {}; + var unIndexedProperties = + stateVector[ModelDescription.STATE_UNINDEXED_PROPERTIES]; + var mirror = mirrors.reflect(model); + + db.propertiesForModel(this).forEach((String fieldName, Property prop) { + _encodeProperty(db, model, mirror, properties, fieldName, prop); + }); + properties[__classField.propertyName] = + __classField.encodeValue(db, _getPolyClasses(db, this)); + + return new datastore.Entity( + key, properties, unIndexedProperties: unIndexedProperties); + } + + Model decodeEntity(ModelDB db, Key key, datastore.Entity entity) { + List stateVector = db.modelDescriptionState(this); + ModelDescription getModelDescriptionByClassList(List classes) { + var polyClassString = _getPolyClassString(classes); + var bottomPolyModelDescription = stateVector + [STATE_POLYCLASSLIST_TO_MODEL_DESCRIPTION_MAP][polyClassString]; + if (bottomPolyModelDescription == null) { + throw new StateError( + 'Could not get ModelDescription for ${classes.join(' ')}'); + } + return bottomPolyModelDescription; + } + + if (entity == null) return null; + + // NOTE: this assumes a default constructor for the model classes! + List classes = __classField.decodePrimitiveValue( + db, entity.properties[__classField.propertyName]); + var bottomPolyModelDescription = getModelDescriptionByClassList(classes); + var classMirror = db.modelClass(bottomPolyModelDescription); + var mirror = classMirror.newInstance(const Symbol(''), []); + + // Set the id and the parent key + mirror.reflectee.id = key.id; + mirror.reflectee.parentKey = key.parent; + + db.propertiesForModel(bottomPolyModelDescription).forEach( + (String fieldName, Property prop) { + if (fieldName != __classField.propertyName) { + bottomPolyModelDescription._decodeProperty( + db, entity, mirror, fieldName, prop); + } + }); + return mirror.reflectee; + } + + Query finishQuery(ModelDB db, Query q) { + q.filter('class IN', [_getPolyClasses(db, this).last]); + return q; + } + + List _getPolyClasses(ModelDB db, + ModelDescription bottomModelDescription) { + List stateVector = db.modelDescriptionState(this); + return stateVector[STATE_POLYCLASSES]; + } + + String _getPolyClassString(List classes) { + // NOTE: We assume here that a classnames do not contain '\n' in it. + return classes.join('\n'); + } + + String fieldNameToPropertyName(ModelDB db, String fieldName) { + var propertyName = super.propertyNameToFieldName(db, fieldName); + if (propertyName == null && fieldName == 'class') return 'class'; + return propertyName; + } +} + +// NOTE/TODO: +// Currently expanded properties are only +// * decoded if there are no clashes in [usedNames] +// * encoded if there are no clashes in [usedNames] +// We might want to throw an error if there are clashes, because otherwise +// - we may end up removing properties after a read-write cycle +// - we may end up dropping added properties in a write +// ([usedNames] := [realFieldNames] + [realPropertyNames]) +abstract class ExpandoModelDescription extends ModelDescription { + static const int STATE_FIELD_SET = ModelDescription.STATE_LAST + 1; + static const int STATE_PROPERTY_SET = ModelDescription.STATE_LAST + 2; + static const int STATE_USED_NAMES = ModelDescription.STATE_LAST + 3; + static const int STATE_LAST = STATE_USED_NAMES; + + const ExpandoModelDescription(String kind) : super(kind); + + initialize(ModelDB db) { + var stateVector = super.initialize(db); + + var realFieldNames = new Set.from( + stateVector[ModelDescription.STATE_FIELDNAME_TO_PROPERTYNAME_MAP].keys); + var realPropertyNames = new Set.from( + stateVector[ModelDescription.STATE_PROPERTYNAME_TO_FIELDNAME_MAP].keys); + var usedNames = + new Set()..addAll(realFieldNames)..addAll(realPropertyNames); + + // NOPTE: [realFieldNames] and [realPropertyNames] are not used right now + // but we might use them to detect name clashes in the future. + return new List.from([] + ..addAll(stateVector) + ..add(realFieldNames) + ..add(realPropertyNames) + ..add(usedNames), + growable: false); + } + + datastore.Entity encodeModel(ModelDB db, ExpandoModel model) { + List stateVector = db.modelDescriptionState(this); + Set usedNames = stateVector[STATE_USED_NAMES]; + + var entity = super.encodeModel(db, model); + var properties = entity.properties; + model.additionalProperties.forEach((String key, Object value) { + // NOTE: All expanded properties will be indexed. + if (!usedNames.contains(key)) { + properties[key] = value; + } + }); + return entity; + } + + Model decodeEntity(ModelDB db, Key key, datastore.Entity entity) { + if (entity == null) return null; + + List stateVector = db.modelDescriptionState(this); + Set usedNames = stateVector[STATE_USED_NAMES]; + + ExpandoModel model = super.decodeEntity(db, key, entity); + var properties = entity.properties; + properties.forEach((String key, Object value) { + if (!usedNames.contains(key)) { + model.additionalProperties[key] = value; + } + }); + return model; + } + + String fieldNameToPropertyName(ModelDB db, String fieldName) { + String propertyName = super.fieldNameToPropertyName(db, fieldName); + // If the ModelDescription doesn't know about [fieldName], it's an + // expanded property, where propertyName == fieldName. + if (propertyName == null) propertyName = fieldName; + return propertyName; + } + + String propertyNameToFieldName(ModelDB db, String propertyName) { + String fieldName = super.propertyNameToFieldName(db, propertyName); + // If the ModelDescription doesn't know about [propertyName], it's an + // expanded property, where propertyName == fieldName. + if (fieldName == null) fieldName = propertyName; + return fieldName; + } + + Object encodeField(ModelDB db, String fieldName, Object value) { + Object primitiveValue = super.encodeField(db, fieldName, value); + // If superclass can't encode field, we return value here (and assume + // it's primitive) + // NOTE: Implicit assumption: + // If value != null then superclass will return != null. + // TODO: Ensure [value] is primitive in this case. + if (primitiveValue == null) primitiveValue = value; + return primitiveValue; + } +} diff --git a/pkgs/gcloud/lib/src/db/models.dart b/pkgs/gcloud/lib/src/db/models.dart new file mode 100644 index 00000000..ffadf07e --- /dev/null +++ b/pkgs/gcloud/lib/src/db/models.dart @@ -0,0 +1,153 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +part of gcloud.db; + +/** + * Represents a unique identifier for a [Model] stored in a datastore. + * + * The [Key] can be incomplete if it's id is `null`. In this case the id will + * be automatically allocated and set at commit time. + */ +class Key { + // Either KeyImpl or PartitionImpl + final Object _parent; + + final Type type; + final Object id; + + Key(Key parent, this.type, this.id) : _parent = parent { + if (type == null) { + throw new ArgumentError('The type argument must not be null.'); + } + if (id != null && id is! String && id is! int) { + throw new ArgumentError( + 'The id argument must not be an integer or a String.'); + } + } + + Key.emptyKey(Partition partition) + : _parent = partition, type = null, id = null; + + /** + * Parent of this [Key]. + */ + Key get parent { + if (_parent is Key) { + return _parent; + } + return null; + } + + /** + * The partition of this [Key]. + */ + Partition get partition { + var obj = _parent; + while (obj is! Partition) { + obj = obj._parent; + } + return obj; + } + + Key append(Type modelType, {Object id}) { + return new Key(this, modelType, id); + } + + bool get isEmpty => _parent is Partition; + + operator==(Object other) { + return + other is Key && + _parent == other._parent && + type == other.type && + id == other.id; + } + + int get hashCode => _parent.hashCode ^ type.hashCode ^ id.hashCode; +} + +/** + * Represents a datastore partition. + * + * A datastore is partitioned into namespaces. The default namespace is + * `null`. + */ +class Partition { + final String namespace; + + Partition(this.namespace) { + if (namespace == '') { + throw new ArgumentError( + 'The namespace must not be an empty string'); + } + } + + /** + * Returns an empty [Key]. + * + * Entities where the parent [Key] is empty will create their own entity + * group. + */ + Key get emptyKey => new Key.emptyKey(this); + + operator==(Object other) { + return other is Partition && namespace == other.namespace; + } + + int get hashCode => namespace.hashCode; +} + +/** + * Superclass for all model classes. + * + * Every model class has a [id] -- which must be an integer or a string, and + * a [parentKey]. The [key] getter is returning the key for the model object. + * + * Every concrete model class inheriting (directly or indirectly) from [Model] + * needs a [ModelMetadata] annotation containing it's [ModelDescription]. + */ +abstract class Model { + Object id; + Key parentKey; + + Key get key => parentKey.append(this.runtimeType, id: id); +} + +/** + * Superclass for all polymorphic model classes. + * + * The direct subclass of this model must have a [ModelMetadata] annotation + * containing a [PolyModelDescription]. + */ +abstract class PolyModel extends Model { } + +/** + * Superclass for all expanded model classes. + * + * The subclasses of this model must have a [ModelMetadata] annotation + * containing a [ExpandoModelDescription]. + * + * The [ExpandoModel] class adds support for having dynamic properties. You can + * set arbitrary fields on these models. The expanded values must be values + * accepted by the [RawDatastore] implementation. + */ +@proxy +abstract class ExpandoModel extends Model { + final Map additionalProperties = {}; + + Object noSuchMethod(Invocation invocation) { + var name = mirrors.MirrorSystem.getName(invocation.memberName); + if (name.endsWith('=')) name = name.substring(0, name.length - 1); + if (invocation.isGetter) { + return additionalProperties[name]; + } else if (invocation.isSetter) { + var value = invocation.positionalArguments[0]; + additionalProperties[name] = value; + return value; + } else { + throw new ArgumentError('Unsupported noSuchMethod call on ExpandoModel'); + } + } +} diff --git a/pkgs/gcloud/lib/src/db/properties.dart b/pkgs/gcloud/lib/src/db/properties.dart new file mode 100644 index 00000000..1e337444 --- /dev/null +++ b/pkgs/gcloud/lib/src/db/properties.dart @@ -0,0 +1,165 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +part of gcloud.db; + +abstract class Property { + // The name in the ModelClass is used if [propertyName] is null! + final String propertyName; + final bool required; + final bool indexed; + + const Property({this.propertyName, this.required: false, this.indexed: true}); + + bool validate(ModelDB db, Object value) { + if (required && value == null) return false; + return true; + } + + Object encodeValue(ModelDB db, Object value); + + Object decodePrimitiveValue(ModelDB db, Object value); +} + + +abstract class PrimitiveProperty extends Property { + const PrimitiveProperty( + {String propertyName, bool required: false, bool indexed: true}) + : super(propertyName: propertyName, required: required, indexed: indexed); + + Object encodeValue(ModelDB db, Object value) => value; + + Object decodePrimitiveValue(ModelDB db, Object value) => value; +} + + +class BoolProperty extends PrimitiveProperty { + const BoolProperty( + {String propertyName, bool required: false, bool indexed: true}) + : super(propertyName: propertyName, required: required, indexed: indexed); + + bool validate(ModelDB db, Object value) + => super.validate(db, value) && (value == null || value is bool); +} + +class IntProperty extends PrimitiveProperty { + const IntProperty( + {String propertyName, bool required: false, bool indexed: true}) + : super(propertyName: propertyName, required: required, indexed: indexed); + + bool validate(ModelDB db, Object value) + => super.validate(db, value) && (value == null || value is int); +} + +class StringProperty extends PrimitiveProperty { + const StringProperty( + {String propertyName, bool required: false, bool indexed: true}) + : super(propertyName: propertyName, required: required, indexed: indexed); + + bool validate(ModelDB db, Object value) + => super.validate(db, value) && (value == null || value is String); +} + +class ModelKeyProperty extends PrimitiveProperty { + const ModelKeyProperty( + {String propertyName, bool required: false, bool indexed: true}) + : super(propertyName: propertyName, required: required, indexed: indexed); + + bool validate(ModelDB db, Object value) + => super.validate(db, value) && (value == null || value is Key); + + Object encodeValue(ModelDB db, Object value) { + if (value == null) return null; + return db.toDatastoreKey(value); + } + + Object decodePrimitiveValue(ModelDB db, Object value) { + if (value == null) return null; + return db.fromDatastoreKey(value as datastore.Key); + } +} + +class BlobProperty extends PrimitiveProperty { + const BlobProperty({String propertyName, bool required: false}) + : super(propertyName: propertyName, required: required, indexed: false); + + // NOTE: We don't validate that the entries of the list are really integers + // of the range 0..255! + // If an untyped list was created the type check will always succeed. i.e. + // "[1, true, 'bar'] is List" evaluates to `true` + bool validate(ModelDB db, Object value) + => super.validate(db, value) && (value == null || value is List); + + Object encodeValue(ModelDB db, Object value) { + if (value == null) return null; + return new datastore.BlobValue(value); + } + + Object decodePrimitiveValue(ModelDB db, Object value) { + if (value == null) return null; + + datastore.BlobValue blobValue = value; + return blobValue.bytes; + } +} + +class DateTimeProperty extends PrimitiveProperty { + const DateTimeProperty( + {String propertyName, bool required: false, bool indexed: true}) + : super(propertyName: propertyName, required: required, indexed: indexed); + + bool validate(ModelDB db, Object value) + => super.validate(db, value) && (value == null || value is DateTime); + + Object decodePrimitiveValue(ModelDB db, Object value) { + if (value is int) { + return + new DateTime.fromMillisecondsSinceEpoch(value ~/ 1000, isUtc: true); + } + return value; + } +} + + +class ListProperty extends Property { + final PrimitiveProperty subProperty; + + // TODO: We want to support optional list properties as well. + // Get rid of "required: true" here. + const ListProperty(this.subProperty, + {String propertyName, bool indexed: true}) + : super(propertyName: propertyName, required: true, indexed: indexed); + + bool validate(ModelDB db, Object value) { + if (!super.validate(db, value) || value is! List) return false; + + for (var entry in value) { + if (!subProperty.validate(db, entry)) return false; + } + return true; + } + + Object encodeValue(ModelDB db, Object value) { + if (value == null) return null; + List list = value; + if (list.length == 0) return null; + if (list.length == 1) return list[0]; + return list.map( + (value) => subProperty.encodeValue(db, value)).toList(); + } + + Object decodePrimitiveValue(ModelDB db, Object value) { + if (value == null) return []; + if (value is! List) return [value]; + return (value as List) + .map((entry) => subProperty.decodePrimitiveValue(db, entry)) + .toList(); + } +} + +class StringListProperty extends ListProperty { + const StringListProperty({String propertyName, bool indexed: true}) + : super(const StringProperty(), + propertyName: propertyName, indexed: indexed); +} diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 76ac800a..7768df81 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -5,10 +5,9 @@ environment: sdk: '>=1.5.0 <2.0.0' dependencies: crypto: '>=0.9.0 <0.10.0' - googleapis_auth: '>=0.1.0 <0.2.0' + http: '>=0.11.0 <0.12.0' googleapis_beta: '>=0.1.0 <0.2.0' dev_dependencies: - http: '>=0.11.0 <0.12.0' unittest: '>=0.11.0 <0.12.0' dependency_overrides: googleapis_auth: diff --git a/pkgs/gcloud/test/db/model_db_test.dart b/pkgs/gcloud/test/db/model_db_test.dart new file mode 100644 index 00000000..dde0d6a2 --- /dev/null +++ b/pkgs/gcloud/test/db/model_db_test.dart @@ -0,0 +1,57 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +library gcloud.db_impl_test; + +import 'dart:async'; + +import 'package:gcloud/db.dart'; +import 'package:unittest/unittest.dart'; + + +// These unused imports make sure that [ModelDBImpl.fromLibrary()] will find +// all the Model/ModelDescription classes. +import 'model_dbs/duplicate_kind.dart' as test1; +import 'model_dbs/duplicate_property.dart' as test2; +import 'model_dbs/multiple_annotations.dart' as test3; +import 'model_dbs/invalid_id_property.dart' as test4; +import 'model_dbs/duplicate_fieldname.dart' as test5; +import 'model_dbs/no_default_constructor.dart' as test6; + +main() { + group('model_db', () { + group('from_library', () { + test('duplicate_kind', () { + expect(new Future.sync(() { + new ModelDB.fromLibrary(#gcloud.db.model_test.duplicate_kind); + }), throwsA(isStateError)); + }); + test('duplicate_property', () { + expect(new Future.sync(() { + new ModelDB.fromLibrary(#gcloud.db.model_test.duplicate_property); + }), throwsA(isStateError)); + }); + test('multiple_annotations', () { + expect(new Future.sync(() { + new ModelDB.fromLibrary(#gcloud.db.model_test.multiple_annotations); + }), throwsA(isStateError)); + }); + test('invalid_id', () { + expect(new Future.sync(() { + new ModelDB.fromLibrary(#gcloud.db.model_test.invalid_id); + }), throwsA(isStateError)); + }); + test('duplicate_fieldname', () { + expect(new Future.sync(() { + new ModelDB.fromLibrary(#gcloud.db.model_test.duplicate_fieldname); + }), throwsA(isStateError)); + }); + test('no_default_constructor', () { + expect(new Future.sync(() { + new ModelDB.fromLibrary(#gcloud.db.model_test.no_default_constructor); + }), throwsA(isStateError)); + }); + }); + }); +} diff --git a/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart b/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart new file mode 100644 index 00000000..7f376285 --- /dev/null +++ b/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart @@ -0,0 +1,26 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +library gcloud.db.model_test.duplicate_fieldname; + +import 'package:gcloud/db.dart' as db; + +@db.ModelMetadata(const ADesc()) +class A extends db.Model {} + +@db.ModelMetadata(const BDesc()) +class B extends A {} + + +class ADesc extends db.ModelDescription { + final id = const db.IntProperty(); + + final foo = const db.IntProperty(propertyName: 'foo'); + const ADesc({String kind: 'A'}) : super(kind); +} + +class BDesc extends ADesc { + final foo = const db.IntProperty(propertyName: 'bar'); + const BDesc() : super(kind: 'B'); +} diff --git a/pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart b/pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart new file mode 100644 index 00000000..46a90cd6 --- /dev/null +++ b/pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart @@ -0,0 +1,23 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +library gcloud.db.model_test.duplicate_kind; + +import 'package:gcloud/db.dart' as db; + +@db.ModelMetadata(const ADesc()) +class A extends db.Model { } + +class ADesc extends db.ModelDescription { + final id = const db.IntProperty(); + const ADesc() : super('A'); +} + +@db.ModelMetadata(const BDesc()) +class B extends db.Model { } + +class BDesc extends db.ModelDescription { + final id = const db.IntProperty(); + const BDesc() : super('A'); +} diff --git a/pkgs/gcloud/test/db/model_dbs/duplicate_property.dart b/pkgs/gcloud/test/db/model_dbs/duplicate_property.dart new file mode 100644 index 00000000..5b5e07b6 --- /dev/null +++ b/pkgs/gcloud/test/db/model_dbs/duplicate_property.dart @@ -0,0 +1,17 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +library gcloud.db.model_test.duplicate_property; + +import 'package:gcloud/db.dart' as db; + +@db.ModelMetadata(const ADesc()) +class A extends db.Model { } + +class ADesc extends db.ModelDescription { + final id = const db.IntProperty(); + final foo = const db.IntProperty(propertyName: 'foo'); + final bar = const db.IntProperty(propertyName: 'foo'); + const ADesc() : super('A'); +} diff --git a/pkgs/gcloud/test/db/model_dbs/invalid_id_property.dart b/pkgs/gcloud/test/db/model_dbs/invalid_id_property.dart new file mode 100644 index 00000000..d46332c3 --- /dev/null +++ b/pkgs/gcloud/test/db/model_dbs/invalid_id_property.dart @@ -0,0 +1,15 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +library gcloud.db.model_test.invalid_id; + +import 'package:gcloud/db.dart' as db; + +@db.ModelMetadata(const ADesc()) +class A extends db.Model { } + +class ADesc extends db.ModelDescription { + final id = const db.DateTimeProperty(); + const ADesc() : super('A'); +} diff --git a/pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart b/pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart new file mode 100644 index 00000000..07fad6cd --- /dev/null +++ b/pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart @@ -0,0 +1,16 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +library gcloud.db.model_test.multiple_annotations; + +import 'package:gcloud/db.dart' as db; + +@db.ModelMetadata(const ADesc()) +@db.ModelMetadata(const ADesc()) +class A extends db.Model { } + +class ADesc extends db.ModelDescription { + final id = const db.IntProperty(); + const ADesc() : super('A'); +} diff --git a/pkgs/gcloud/test/db/model_dbs/no_default_constructor.dart b/pkgs/gcloud/test/db/model_dbs/no_default_constructor.dart new file mode 100644 index 00000000..60bc9b67 --- /dev/null +++ b/pkgs/gcloud/test/db/model_dbs/no_default_constructor.dart @@ -0,0 +1,17 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +library gcloud.db.model_test.no_default_constructor; + +import 'package:gcloud/db.dart' as db; + +@db.ModelMetadata(const ADesc()) +class A extends db.Model { + A(int i); +} + +class ADesc extends db.ModelDescription { + final id = const db.IntProperty(); + const ADesc() : super('A'); +} diff --git a/pkgs/gcloud/test/db/properties_test.dart b/pkgs/gcloud/test/db/properties_test.dart new file mode 100644 index 00000000..e68dd609 --- /dev/null +++ b/pkgs/gcloud/test/db/properties_test.dart @@ -0,0 +1,177 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +library gcloud.db.properties_test; + +import 'dart:typed_data'; + +import 'package:gcloud/db.dart'; +import 'package:gcloud/datastore.dart' as datastore; +import 'package:unittest/unittest.dart'; + +main() { + group('properties', () { + test('bool_property', () { + var prop = const BoolProperty(required: true); + expect(prop.validate(null, null), isFalse); + + prop = const BoolProperty(required: false); + expect(prop.validate(null, null), isTrue); + expect(prop.validate(null, true), isTrue); + expect(prop.validate(null, false), isTrue); + expect(prop.encodeValue(null, null), equals(null)); + expect(prop.encodeValue(null, true), equals(true)); + expect(prop.encodeValue(null, false), equals(false)); + expect(prop.decodePrimitiveValue(null, null), equals(null)); + expect(prop.decodePrimitiveValue(null, true), equals(true)); + expect(prop.decodePrimitiveValue(null, false), equals(false)); + }); + + test('int_property', () { + var prop = const IntProperty(required: true); + expect(prop.validate(null, null), isFalse); + + prop = const IntProperty(required: false); + expect(prop.validate(null, null), isTrue); + expect(prop.validate(null, 33), isTrue); + expect(prop.encodeValue(null, null), equals(null)); + expect(prop.encodeValue(null, 42), equals(42)); + expect(prop.decodePrimitiveValue(null, null), equals(null)); + expect(prop.decodePrimitiveValue(null, 99), equals(99)); + }); + + test('string_property', () { + var prop = const StringProperty(required: true); + expect(prop.validate(null, null), isFalse); + + prop = const StringProperty(required: false); + expect(prop.validate(null, null), isTrue); + expect(prop.validate(null, 'foobar'), isTrue); + expect(prop.encodeValue(null, null), equals(null)); + expect(prop.encodeValue(null, 'foo'), equals('foo')); + expect(prop.decodePrimitiveValue(null, null), equals(null)); + expect(prop.decodePrimitiveValue(null, 'bar'), equals('bar')); + }); + + test('blob_property', () { + var prop = const BlobProperty(required: true); + expect(prop.validate(null, null), isFalse); + + prop = const BlobProperty(required: false); + expect(prop.validate(null, null), isTrue); + expect(prop.validate(null, [1,2]), isTrue); + expect(prop.encodeValue(null, null), equals(null)); + expect(prop.encodeValue(null, []).bytes, equals([])); + expect(prop.encodeValue(null, [1,2]).bytes, equals([1,2])); + expect(prop.encodeValue(null, new Uint8List.fromList([1,2])).bytes, + equals([1,2])); + expect(prop.decodePrimitiveValue(null, null), equals(null)); + expect(prop.decodePrimitiveValue(null, new datastore.BlobValue([])), + equals([])); + expect(prop.decodePrimitiveValue(null, new datastore.BlobValue([5,6])), + equals([5,6])); + expect(prop.decodePrimitiveValue( + null, new datastore.BlobValue(new Uint8List.fromList([5,6]))), + equals([5,6])); + }); + + test('datetime_property', () { + var utc99 = new DateTime.fromMillisecondsSinceEpoch(99, isUtc: true); + + var prop = const DateTimeProperty(required: true); + expect(prop.validate(null, null), isFalse); + + prop = const DateTimeProperty(required: false); + expect(prop.validate(null, null), isTrue); + expect(prop.validate(null, utc99), isTrue); + expect(prop.encodeValue(null, null), equals(null)); + expect(prop.encodeValue(null, utc99), equals(utc99)); + expect(prop.decodePrimitiveValue(null, null), equals(null)); + expect(prop.decodePrimitiveValue(null, 99*1000), equals(utc99)); + expect(prop.decodePrimitiveValue(null, 99*1000 + 1), equals(utc99)); + expect(prop.decodePrimitiveValue(null, utc99), equals(utc99)); + }); + + test('list_property', () { + var prop = const ListProperty(const BoolProperty()); + + expect(prop.validate(null, null), isFalse); + expect(prop.validate(null, []), isTrue); + expect(prop.validate(null, [true, false]), isTrue); + expect(prop.validate(null, [true, false, 1]), isFalse); + expect(prop.encodeValue(null, []), equals(null)); + expect(prop.encodeValue(null, [true]), equals(true)); + expect(prop.encodeValue(null, [true, false]), equals([true, false])); + expect(prop.decodePrimitiveValue(null, null), equals([])); + expect(prop.decodePrimitiveValue(null, []), equals([])); + expect(prop.decodePrimitiveValue(null, true), equals([true])); + expect(prop.decodePrimitiveValue(null, [true, false]), + equals([true, false])); + }); + + test('modelkey_property', () { + var datastoreKey = new datastore.Key( + [new datastore.KeyElement('MyKind', 42)], + partition: new datastore.Partition('foonamespace')); + var dbKey = new KeyMock(datastoreKey); + var modelDBMock = new ModelDBMock(datastoreKey, dbKey); + + var prop = const ModelKeyProperty(required: true); + expect(prop.validate(modelDBMock, null), isFalse); + + prop = const ModelKeyProperty(required: false); + expect(prop.validate(modelDBMock, null), isTrue); + expect(prop.validate(modelDBMock, dbKey), isTrue); + expect(prop.validate(modelDBMock, datastoreKey), isFalse); + expect(prop.encodeValue(modelDBMock, null), equals(null)); + expect(prop.encodeValue(modelDBMock, dbKey), equals(datastoreKey)); + expect(prop.decodePrimitiveValue(modelDBMock, null), equals(null)); + expect(prop.decodePrimitiveValue(modelDBMock, datastoreKey), + equals(dbKey)); + }); + }); +} + +class KeyMock implements Key { + datastore.Key _datastoreKey; + + KeyMock(this._datastoreKey); + + Object id = 1; + Type type = null; + Key get parent => this; + bool get isEmpty => false; + Partition get partition => null; + datastore.Key get datastoreKey => _datastoreKey; + Key append(Type modelType, {Object id}) => null; + int get hashCode => 1; +} + +class ModelDBMock implements ModelDB { + final datastore.Key _datastoreKey; + final Key _dbKey; + ModelDBMock(this._datastoreKey, this._dbKey); + + Key fromDatastoreKey(datastore.Key datastoreKey) { + if (!identical(_datastoreKey, datastoreKey)) { + throw "Broken test"; + } + return _dbKey; + } + + datastore.Key toDatastoreKey(Key key) { + if (!identical(_dbKey, key)) { + throw "Broken test"; + } + return _datastoreKey; + } + + modelDescriptionState(ModelDescription modelDescription) => null; + Iterable get modelDescriptions => null; + Map propertiesForModel(modelDescription) => null; + modelClass(ModelDescription md) => null; + Model fromDatastoreEntity(datastore.Entity entity) => null; + ModelDescription modelDescriptionForType(Type type) => null; + datastore.Entity toDatastoreEntity(Model model) => null; +} From bf0491dae1f58b5158c7ca2c7ce28fafb1c70b90 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Tue, 9 Sep 2014 10:36:18 +0200 Subject: [PATCH 004/239] Add support for querying subscriptions on topic name Besides listing all topics or all subscriptions this is tho only type of query currently supported in Cloud Pub/Sub. R=lrn@google.com, kustermann@google.com BUG= Review URL: https://codereview.chromium.org//542603002 --- pkgs/gcloud/lib/pubsub.dart | 21 +- pkgs/gcloud/lib/src/pubsub_impl.dart | 105 +++++-- pkgs/gcloud/test/pubsub_test.dart | 435 ++++++++++++++++++++++----- 3 files changed, 449 insertions(+), 112 deletions(-) diff --git a/pkgs/gcloud/lib/pubsub.dart b/pkgs/gcloud/lib/pubsub.dart index e298683e..fc16db37 100644 --- a/pkgs/gcloud/lib/pubsub.dart +++ b/pkgs/gcloud/lib/pubsub.dart @@ -162,12 +162,24 @@ abstract class PubSub { /// Returns a `Future` which completes with the subscription. Future lookupSubscription(String name); - /// List all subscriptions. + /// List subscriptions. + /// + /// If [query] is passed this will list all subscriptions matching the query. + /// + /// Otherwise this will list all subscriptions. + /// + /// The only supported query string is the name of a topic. If a name of a + /// topic is passed as [query], this will list all subscriptions on that + /// topic. /// /// Returns a `Stream` of subscriptions. - Stream listSubscriptions(); + Stream listSubscriptions([String query]); - /// Start paging through all subscriptions. + /// Start paging through subscriptions. + /// + /// If [topic] is passed this will list all subscriptions to that topic. + /// + /// Otherwise this will list all subscriptions. /// /// The maximum number of subscriptions in each page is specified in /// [pageSize] @@ -175,7 +187,8 @@ abstract class PubSub { /// Returns a `Future` which completes with a `Page` object holding the /// first page. Use the `Page` object to move to the next page of /// subscriptions. - Future> pageSubscriptions({int pageSize: 50}); + Future> pageSubscriptions( + {String topic, int pageSize: 50}); } /// A Pub/Sub topic. diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart index d75f6725..8283ae44 100644 --- a/pkgs/gcloud/lib/src/pubsub_impl.dart +++ b/pkgs/gcloud/lib/src/pubsub_impl.dart @@ -5,6 +5,7 @@ part of gcloud.pubsub; class _PubSubImpl implements PubSub { + static const int _DEFAULT_LIST_PAGE_SIZE = 50; final http.Client _client; final String project; final pubsub.PubsubApi _api; @@ -49,14 +50,14 @@ class _PubSubImpl implements PubSub { } Future _listTopics( - int pageSize, [String nextPageToken]) { + int pageSize, String nextPageToken) { var query = 'cloud.googleapis.com/project in (/projects/$project)'; return _api.topics.list( query: query, maxResults: pageSize, pageToken: nextPageToken); } Future _createSubscription( - String name, String topic, {Uri endpoint}) { + String name, String topic, Uri endpoint) { var subscription = new pubsub.Subscription() ..name = name ..topic = topic; @@ -77,8 +78,12 @@ class _PubSubImpl implements PubSub { } Future _listSubscriptions( - int pageSize, [String nextPageToken]) { - var query = 'cloud.googleapis.com/project in (/projects/$project)'; + String topic, int pageSize, String nextPageToken) { + // See https://developers.google.com/pubsub/v1beta1/subscriptions/list for + // the specification of the query format. + var query = topic == null + ? 'cloud.googleapis.com/project in (/projects/$project)' + : 'pubsub.googleapis.com/topic in (/topics/$project/$topic)'; return _api.subscriptions.list( query: query, maxResults: pageSize, pageToken: nextPageToken); } @@ -171,40 +176,58 @@ class _PubSubImpl implements PubSub { } Stream listTopics() { + bool pendingRequest = false; bool paused = false; + bool cancelled = false; Page currentPage; StreamController controller; + handleError(e, s) { + controller.addError(e, s); + controller.close(); + } + handlePage(Page page) { + if (cancelled) return; + pendingRequest = false; currentPage = page; page.items.forEach(controller.add); if (page.isLast) { controller.close(); } else if (!paused) { - page.next().then(handlePage); + page.next().then(handlePage, onError: handleError); } } + onListen() { + int pageSize = _DEFAULT_LIST_PAGE_SIZE; + pendingRequest = true; + _listTopics(pageSize, null) + .then((response) { + handlePage(new _TopicPageImpl(this, pageSize, response)); + }, + onError: handleError); + } onPause() => paused = true; onResume() { - print('res'); paused = false; - currentPage.next().then(handlePage); + if (pendingRequest) return; + pendingRequest = true; + currentPage.next().then(handlePage, onError: handleError); + } + onCancel() { + cancelled = true; } - controller = new StreamController( - sync: true, onPause: onPause, onResume: onResume); - - int pageSize = 50; - _listTopics(pageSize).then((response) { - handlePage(new _TopicPageImpl(this, pageSize, response)); - }); + controller = new StreamController(sync: true, onListen: onListen, + onPause: onPause, onResume: onResume, + onCancel: onCancel); return controller.stream; } Future> pageTopics({int pageSize: 50}) { - return _listTopics(pageSize).then((response) { + return _listTopics(pageSize, null).then((response) { return new _TopicPageImpl(this, pageSize, response); }); } @@ -215,7 +238,7 @@ class _PubSubImpl implements PubSub { _checkTopicName(topic); return _createSubscription(_fullSubscriptionName(name), _fullTopicName(topic), - endpoint: endpoint) + endpoint) .then((sub) => new _SubscriptionImpl(this, sub)); } @@ -230,37 +253,62 @@ class _PubSubImpl implements PubSub { .then((sub) => new _SubscriptionImpl(this, sub)); } - Stream listSubscriptions() { + Stream listSubscriptions([String query]) { + bool pendingRequest = false; bool paused = false; + bool cancelled = false; Page currentPage; StreamController controller; + handleError(e, s) { + controller.addError(e, s); + controller.close(); + } + handlePage(Page page) { + if (cancelled) return; + pendingRequest = false; currentPage = page; page.items.forEach(controller.add); if (page.isLast) { controller.close(); } else if (!paused) { - page.next().then(handlePage); + page.next().then(handlePage, onError: handleError); } } + onListen() { + int pageSize = _DEFAULT_LIST_PAGE_SIZE; + pendingRequest = true; + _listSubscriptions(query, pageSize, null) + .then((response) { + handlePage(new _SubscriptionPageImpl( + this, query, pageSize, response)); + }, + onError: handleError); + } onPause() => paused = true; onResume() { paused = false; - currentPage.next().then(handlePage); + if (pendingRequest) return; + pendingRequest = true; + currentPage.next().then(handlePage, onError: handleError); + } + onCancel() { + cancelled = true; } - controller = new StreamController(onPause: onPause, onResume: onResume); - - pageSubscriptions().then(handlePage); + controller = new StreamController(sync: true, onListen: onListen, + onPause: onPause, onResume: onResume, + onCancel: onCancel); return controller.stream; } - Future> pageSubscriptions({int pageSize: 50}) { - return _listSubscriptions(pageSize).then((response) { - return new _SubscriptionPageImpl(this, pageSize, response); + Future> pageSubscriptions( + {String topic, int pageSize: 50}) { + return _listSubscriptions(topic, pageSize, null).then((response) { + return new _SubscriptionPageImpl(this, topic, pageSize, response); }); } } @@ -503,11 +551,13 @@ class _TopicPageImpl implements Page { class _SubscriptionPageImpl implements Page { final _PubSubImpl _api; + final String _topic; final int _pageSize; final String _nextPageToken; final List items; _SubscriptionPageImpl(this._api, + this._topic, this._pageSize, pubsub.ListSubscriptionsResponse response) : items = new List(response.subscription != null @@ -527,8 +577,9 @@ class _SubscriptionPageImpl implements Page { if (_nextPageToken == null) return new Future.value(null); if (pageSize == null) pageSize = this._pageSize; - return _api._listSubscriptions(pageSize, _nextPageToken).then((response) { - return new _SubscriptionPageImpl(_api, pageSize, response); + return _api._listSubscriptions( + _topic, pageSize, _nextPageToken).then((response) { + return new _SubscriptionPageImpl(_api, _topic, pageSize, response); }); } } \ No newline at end of file diff --git a/pkgs/gcloud/test/pubsub_test.dart b/pkgs/gcloud/test/pubsub_test.dart index 996933ac..6f70affb 100644 --- a/pkgs/gcloud/test/pubsub_test.dart +++ b/pkgs/gcloud/test/pubsub_test.dart @@ -70,6 +70,13 @@ class MockClient extends http.BaseClient { return new Future.value( new http.Response('', 200, headers: RESPONSE_HEADERS)); } + + Future respondError(statusCode) { + var error = {'error' : {'code': statusCode, 'message': 'error'}}; + return new Future.value( + new http.Response( + JSON.encode(error), statusCode, headers: RESPONSE_HEADERS)); + } } main() { @@ -193,10 +200,15 @@ main() { // Mock that expect/generates [n] topics in pages of page size // [pageSize]. - registerQueryMock(mock, n, pageSize) { + registerQueryMock(mock, n, pageSize, [totalCalls]) { var totalPages = (n + pageSize - 1) ~/ pageSize; // No items still generate one request. if (totalPages == 0) totalPages = 1; + // Can pass in total calls if this mock is overwritten before all + // expected pages are done, e.g. when testing errors. + if (totalCalls == null) { + totalCalls = totalPages; + } var pageCount = 0; mock.register('GET', 'topics', expectAsync((request) { pageCount++; @@ -216,38 +228,148 @@ main() { addTopics(response, first, n - (totalPages - 1) * pageSize); } return mock.respond(response); - }, count: totalPages)); + }, count: totalCalls)); } group('list', () { - test('empty', () { + Future q(count) { var mock = new MockClient(); - registerQueryMock(mock, 0, 50); + registerQueryMock(mock, count, 50); var api = new PubSub(mock, PROJECT); return api.listTopics().listen( - ((_) => throw 'Unexpected'), - onDone: expectAsync(() => null)); + expectAsync((_) => null, count: count)).asFuture(); + } + + test('simple', () { + return q(0) + .then((_) => q(1)) + .then((_) => q(1)) + .then((_) => q(49)) + .then((_) => q(50)) + .then((_) => q(51)) + .then((_) => q(99)) + .then((_) => q(100)) + .then((_) => q(101)) + .then((_) => q(170)); }); - test('single', () { + test('immediate-pause-resume', () { var mock = new MockClient(); - registerQueryMock(mock, 10, 50); + registerQueryMock(mock, 70, 50); var api = new PubSub(mock, PROJECT); - return api.listTopics().listen( - expectAsync(((_) => null), count: 10), - onDone: expectAsync(() => null)); + api.listTopics().listen( + expectAsync(((_) => null), count: 70), + onDone: expectAsync(() => null)) + ..pause() + ..resume() + ..pause() + ..resume(); }); - test('multiple', () { + test('pause-resume', () { var mock = new MockClient(); - registerQueryMock(mock, 170, 50); + registerQueryMock(mock, 70, 50); var api = new PubSub(mock, PROJECT); - return api.listTopics().listen( - expectAsync(((_) => null), count: 170), - onDone: expectAsync(() => null)); + var count = 0; + var subscription; + subscription = api.listTopics().listen( + expectAsync(((_) { + subscription..pause()..resume()..pause(); + if ((count % 2) == 0) { + subscription.resume(); + } else { + scheduleMicrotask(() => subscription.resume()); + } + return null; + }), count: 70), + onDone: expectAsync(() => null)) + ..pause(); + scheduleMicrotask(() => subscription.resume()); + }); + + test('immediate-cancel', () { + var mock = new MockClient(); + registerQueryMock(mock, 70, 50, 1); + + var api = new PubSub(mock, PROJECT); + api.listTopics().listen( + (_) => throw 'Unexpected', + onDone: () => throw 'Unexpected') + ..cancel(); + }); + + test('cancel', () { + var mock = new MockClient(); + // There will be two calls to the mock as the cancel happen after + // processing the first result which will trigger a second request. + registerQueryMock(mock, 170, 50, 2); + + var api = new PubSub(mock, PROJECT); + var subscription; + subscription = api.listTopics().listen( + expectAsync((_) => subscription.cancel()), + onDone: () => throw 'Unexpected'); + }); + + test('error', () { + runTest(bool withPause) { + // Test error on first GET request. + var mock = new MockClient(); + mock.register('GET', 'topics', expectAsync((request) { + return mock.respondError(500); + })); + var api = new PubSub(mock, PROJECT); + var subscription; + subscription = api.listTopics().listen( + (_) => throw 'Unexpected', + onDone: expectAsync(() => null), + onError: expectAsync( + (e) => e is pubsub.DetailedApiRequestError)); + if (withPause) { + subscription.pause(); + scheduleMicrotask(() => subscription.resume()); + } + } + + runTest(false); + runTest(true); + }); + + test('error-2', () { + // Test error on second GET request. + void runTest(bool withPause) { + var mock = new MockClient(); + registerQueryMock(mock, 51, 50, 1); + + var api = new PubSub(mock, PROJECT); + + int count = 0; + var subscription; + subscription = api.listTopics().listen( + expectAsync(((_) { + count++; + if (count == 50) { + if (withPause) { + subscription.pause(); + scheduleMicrotask(() => subscription.resume()); + } + mock.clear(); + mock.register('GET', 'topics', expectAsync((request) { + return mock.respondError(500); + })); + } + return null; + }), count: 50), + onDone: expectAsync(() => null), + onError: expectAsync( + (e) => e is pubsub.DetailedApiRequestError)); + } + + runTest(false); + runTest(true); }); }); @@ -446,6 +568,8 @@ main() { group('query', () { var query = 'cloud.googleapis.com/project in (/projects/$PROJECT)'; + var topicQuery = + 'pubsub.googleapis.com/topic in (/topics/$PROJECT/topic)'; var defaultPageSize = 50; addSubscriptions( @@ -457,16 +581,23 @@ main() { } } + // Mock that expect/generates [n] subscriptions in pages of page size // [pageSize]. - registerQueryMock(mock, n, pageSize) { + registerQueryMock(mock, n, pageSize, {String topic, int totalCalls}) { var totalPages = (n + pageSize - 1) ~/ pageSize; // No items still generate one request. if (totalPages == 0) totalPages = 1; + // Can pass in total calls if this mock is overwritten before all + // expected pages are done, e.g. when testing errors. + if (totalCalls == null) { + totalCalls = totalPages; + } var pageCount = 0; mock.register('GET', 'subscriptions', expectAsync((request) { pageCount++; - expect(request.url.queryParameters['query'], query); + expect(request.url.queryParameters['query'], + topic == null ? query : topicQuery); expect(request.url.queryParameters['maxResults'], '$pageSize'); expect(request.body.length, 0); if (pageCount > 1) { @@ -483,125 +614,267 @@ main() { response, first, n - (totalPages - 1) * pageSize); } return mock.respond(response); - }, count: totalPages)); + }, count: totalCalls)); } group('list', () { - test('empty', () { + Future q(topic, count) { var mock = new MockClient(); - registerQueryMock(mock, 0, 50); + registerQueryMock(mock, count, 50, topic: topic); + + var api = new PubSub(mock, PROJECT); + return api.listSubscriptions(topic).listen( + expectAsync((_) => null, count: count)).asFuture(); + } + + test('simple', () { + return q(null, 0) + .then((_) => q('topic', 0)) + .then((_) => q(null, 1)) + .then((_) => q('topic', 1)) + .then((_) => q(null, 10)) + .then((_) => q('topic', 10)) + .then((_) => q(null, 49)) + .then((_) => q('topic', 49)) + .then((_) => q(null, 50)) + .then((_) => q('topic', 50)) + .then((_) => q(null, 51)) + .then((_) => q('topic', 51)) + .then((_) => q(null, 99)) + .then((_) => q('topic', 99)) + .then((_) => q(null, 100)) + .then((_) => q('topic', 100)) + .then((_) => q(null, 101)) + .then((_) => q('topic', 101)) + .then((_) => q(null, 170)) + .then((_) => q('topic', 170)); + }); + + test('immediate-pause-resume', () { + var mock = new MockClient(); + registerQueryMock(mock, 70, 50); var api = new PubSub(mock, PROJECT); - return api.listSubscriptions().listen( - ((_) => throw 'Unexpected'), - onDone: expectAsync(() => null)); + api.listSubscriptions().listen( + expectAsync(((_) => null), count: 70), + onDone: expectAsync(() => null)) + ..pause() + ..resume() + ..pause() + ..resume(); }); - test('single', () { + test('pause-resume', () { var mock = new MockClient(); - registerQueryMock(mock, 10, 50); + registerQueryMock(mock, 70, 50); + + var api = new PubSub(mock, PROJECT); + var count = 0; + var subscription; + subscription = api.listSubscriptions().listen( + expectAsync(((_) { + subscription..pause()..resume()..pause(); + if ((count % 2) == 0) { + subscription.resume(); + } else { + scheduleMicrotask(() => subscription.resume()); + } + return null; + }), count: 70), + onDone: expectAsync(() => null)) + ..pause(); + scheduleMicrotask(() => subscription.resume()); + }); + + test('immediate-cancel', () { + var mock = new MockClient(); + registerQueryMock(mock, 70, 50, totalCalls: 1); var api = new PubSub(mock, PROJECT); - return api.listSubscriptions().listen( - expectAsync(((_) => null), count: 10), - onDone: expectAsync(() => null)); + api.listSubscriptions().listen( + (_) => throw 'Unexpected', + onDone: () => throw 'Unexpected') + ..cancel(); }); - test('multiple', () { + test('cancel', () { var mock = new MockClient(); - registerQueryMock(mock, 170, 50); + // There will be two calls to the mock as the cancel happen after + // processing the first result which will trigger a second request. + registerQueryMock(mock, 170, 50, totalCalls: 2); var api = new PubSub(mock, PROJECT); - return api.listSubscriptions().listen( - expectAsync(((_) => null), count: 170), - onDone: expectAsync(() => null)); + var subscription; + subscription = api.listSubscriptions().listen( + expectAsync((_) => subscription.cancel()), + onDone: () => throw 'Unexpected'); + }); + + test('error', () { + runTest(bool withPause) { + // Test error on first GET request. + var mock = new MockClient(); + mock.register('GET', 'subscriptions', expectAsync((request) { + return mock.respondError(500); + })); + var api = new PubSub(mock, PROJECT); + var subscription; + subscription = api.listSubscriptions().listen( + (_) => throw 'Unexpected', + onDone: expectAsync(() => null), + onError: expectAsync( + (e) => e is pubsub.DetailedApiRequestError)); + if (withPause) { + subscription.pause(); + scheduleMicrotask(() => subscription.resume()); + } + } + + runTest(false); + runTest(true); + }); + + test('error-2', () { + runTest(bool withPause) { + // Test error on second GET request. + var mock = new MockClient(); + registerQueryMock(mock, 51, 50, totalCalls: 1); + + var api = new PubSub(mock, PROJECT); + + int count = 0; + var subscription; + subscription = api.listSubscriptions().listen( + expectAsync(((_) { + count++; + if (count == 50) { + if (withPause) { + subscription.pause(); + scheduleMicrotask(() => subscription.resume()); + } + mock.clear(); + mock.register( + 'GET', 'subscriptions', expectAsync((request) { + return mock.respondError(500); + })); + } + return null; + }), count: 50), + onDone: expectAsync(() => null), + onError: expectAsync( + (e) => e is pubsub.DetailedApiRequestError)); + } + + runTest(false); + runTest(true); }); }); group('page', () { - test('empty', () { + emptyTest(String topic) { var mock = new MockClient(); - registerQueryMock(mock, 0, 50); + registerQueryMock(mock, 0, 50, topic: topic); var api = new PubSub(mock, PROJECT); - return api.pageSubscriptions().then(expectAsync((page) { + return api.pageSubscriptions(topic: topic).then(expectAsync((page) { expect(page.items.length, 0); expect(page.isLast, isTrue); expect(page.next(), completion(isNull)); mock.clear(); - registerQueryMock(mock, 0, 20); - return api.pageSubscriptions(pageSize: 20) + registerQueryMock(mock, 0, 20, topic: topic); + return api.pageSubscriptions(topic: topic, pageSize: 20) .then(expectAsync((page) { expect(page.items.length, 0); expect(page.isLast, isTrue); expect(page.next(), completion(isNull)); })); })); + } + + test('empty', () { + emptyTest(null); + emptyTest('topic'); }); - test('single', () { + singleTest(String topic) { var mock = new MockClient(); - registerQueryMock(mock, 10, 50); + registerQueryMock(mock, 10, 50, topic: topic); var api = new PubSub(mock, PROJECT); - return api.pageSubscriptions().then(expectAsync((page) { + return api.pageSubscriptions(topic: topic).then(expectAsync((page) { expect(page.items.length, 10); expect(page.isLast, isTrue); expect(page.next(), completion(isNull)); mock.clear(); - registerQueryMock(mock, 20, 20); - return api.pageSubscriptions(pageSize: 20) + registerQueryMock(mock, 20, 20, topic: topic); + return api.pageSubscriptions(topic: topic, pageSize: 20) .then(expectAsync((page) { expect(page.items.length, 20); expect(page.isLast, isTrue); expect(page.next(), completion(isNull)); })); })); - }); + } - test('multiple', () { - runTest(n, pageSize) { - var totalPages = (n + pageSize - 1) ~/ pageSize; - var pageCount = 0; + test('single', () { + singleTest(null); + singleTest('topic'); + }); - var completer = new Completer(); - var mock = new MockClient(); - registerQueryMock(mock, n, pageSize); + multipleTest(n, pageSize, topic) { + var totalPages = (n + pageSize - 1) ~/ pageSize; + var pageCount = 0; - handlingPage(page) { - pageCount++; - expect(page.isLast, pageCount == totalPages); - expect(page.items.length, - page.isLast ? n - (totalPages - 1) * pageSize - : pageSize ); - page.next().then((page) { - if (page != null) { - handlingPage(page); - } else { - expect(pageCount, totalPages); - completer.complete(); - } - }); - } + var completer = new Completer(); + var mock = new MockClient(); + registerQueryMock(mock, n, pageSize, topic: topic); + + handlingPage(page) { + pageCount++; + expect(page.isLast, pageCount == totalPages); + expect(page.items.length, + page.isLast ? n - (totalPages - 1) * pageSize + : pageSize ); + page.next().then((page) { + if (page != null) { + handlingPage(page); + } else { + expect(pageCount, totalPages); + completer.complete(); + } + }); + } - var api = new PubSub(mock, PROJECT); - api.pageSubscriptions(pageSize: pageSize).then(handlingPage); + var api = new PubSub(mock, PROJECT); + api.pageSubscriptions(topic: topic, pageSize: pageSize) + .then(handlingPage); - return completer.future; - } + return completer.future; + } - return runTest(70, 50) - .then((_) => runTest(99, 1)) - .then((_) => runTest(99, 50)) - .then((_) => runTest(99, 98)) - .then((_) => runTest(99, 99)) - .then((_) => runTest(99, 100)) - .then((_) => runTest(100, 1)) - .then((_) => runTest(100, 50)) - .then((_) => runTest(100, 100)) - .then((_) => runTest(101, 50)); + test('multiple', () { + return multipleTest(70, 50, null) + .then((_) => multipleTest(99, 1, null)) + .then((_) => multipleTest(99, 50, null)) + .then((_) => multipleTest(99, 98, null)) + .then((_) => multipleTest(99, 99, null)) + .then((_) => multipleTest(99, 100, null)) + .then((_) => multipleTest(100, 1, null)) + .then((_) => multipleTest(100, 50, null)) + .then((_) => multipleTest(100, 100, null)) + .then((_) => multipleTest(101, 50, null)) + .then((_) => multipleTest(70, 50, 'topic')) + .then((_) => multipleTest(99, 1, 'topic')) + .then((_) => multipleTest(99, 50, 'topic')) + .then((_) => multipleTest(99, 98, 'topic')) + .then((_) => multipleTest(99, 99, 'topic')) + .then((_) => multipleTest(99, 100, 'topic')) + .then((_) => multipleTest(100, 1, 'topic')) + .then((_) => multipleTest(100, 50, 'topic')) + .then((_) => multipleTest(100, 100, 'topic')) + .then((_) => multipleTest(101, 50, 'topic')); }); }); }); From 4b486d0aa0fc648abc550c0bc43a4cccf47bd277 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Tue, 9 Sep 2014 11:46:25 +0200 Subject: [PATCH 005/239] Check for cancelled after calling into the sync stream controller R=lrn@google.com BUG= Review URL: https://codereview.chromium.org//552233002 --- pkgs/gcloud/lib/src/pubsub_impl.dart | 4 ++-- pkgs/gcloud/test/pubsub_test.dart | 8 ++------ 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart index 8283ae44..6234a8c2 100644 --- a/pkgs/gcloud/lib/src/pubsub_impl.dart +++ b/pkgs/gcloud/lib/src/pubsub_impl.dart @@ -194,7 +194,7 @@ class _PubSubImpl implements PubSub { page.items.forEach(controller.add); if (page.isLast) { controller.close(); - } else if (!paused) { + } else if (!paused && !cancelled) { page.next().then(handlePage, onError: handleError); } } @@ -272,7 +272,7 @@ class _PubSubImpl implements PubSub { page.items.forEach(controller.add); if (page.isLast) { controller.close(); - } else if (!paused) { + } else if (!paused && !cancelled) { page.next().then(handlePage, onError: handleError); } } diff --git a/pkgs/gcloud/test/pubsub_test.dart b/pkgs/gcloud/test/pubsub_test.dart index 6f70affb..cb025e0f 100644 --- a/pkgs/gcloud/test/pubsub_test.dart +++ b/pkgs/gcloud/test/pubsub_test.dart @@ -303,9 +303,7 @@ main() { test('cancel', () { var mock = new MockClient(); - // There will be two calls to the mock as the cancel happen after - // processing the first result which will trigger a second request. - registerQueryMock(mock, 170, 50, 2); + registerQueryMock(mock, 170, 50, 1); var api = new PubSub(mock, PROJECT); var subscription; @@ -699,9 +697,7 @@ main() { test('cancel', () { var mock = new MockClient(); - // There will be two calls to the mock as the cancel happen after - // processing the first result which will trigger a second request. - registerQueryMock(mock, 170, 50, totalCalls: 2); + registerQueryMock(mock, 170, 50, totalCalls: 1); var api = new PubSub(mock, PROJECT); var subscription; From 7973523827d127aca7929a68bfbf30f062f9d98b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Tue, 9 Sep 2014 12:19:43 +0200 Subject: [PATCH 006/239] Refactor code turning a series of pages into a stream The class StreamFromPages is public in the common library as I expect it to be used for other parts of the gcloud pacakge. R=lrn@google.com BUG= Review URL: https://codereview.chromium.org//556783002 --- pkgs/gcloud/lib/common.dart | 62 +++++++++++++++- pkgs/gcloud/lib/src/pubsub_impl.dart | 105 +++------------------------ 2 files changed, 69 insertions(+), 98 deletions(-) diff --git a/pkgs/gcloud/lib/common.dart b/pkgs/gcloud/lib/common.dart index afd532ec..430e3dfd 100644 --- a/pkgs/gcloud/lib/common.dart +++ b/pkgs/gcloud/lib/common.dart @@ -2,7 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -library gcloud.pubsub; +library gcloud.common; import 'dart:async'; @@ -24,4 +24,62 @@ abstract class Page { /// If [next] is called on the last page the returned future completes /// with `null`. Future> next({int pageSize}); -} \ No newline at end of file +} + +typedef Future> FirstPageProvider(int pageSize); + +/// Helper class to turn a series of pages into a stream. +class StreamFromPages { + static const int _PAGE_SIZE = 50; + final FirstPageProvider _firstPageProvider; + bool _pendingRequest = false; + bool _paused = false; + bool _cancelled = false; + Page _currentPage; + StreamController _controller; + + StreamFromPages(this._firstPageProvider) { + _controller = new StreamController(sync: true, onListen: _onListen, + onPause: _onPause, onResume: _onResume, + onCancel: _onCancel); + } + + Stream get stream => _controller.stream; + + void _handleError(e, s) { + _controller.addError(e, s); + _controller.close(); + } + + void _handlePage(Page page) { + if (_cancelled) return; + _pendingRequest = false; + _currentPage = page; + page.items.forEach(_controller.add); + if (page.isLast) { + _controller.close(); + } else if (!_paused && !_cancelled) { + page.next().then(_handlePage, onError: _handleError); + } + } + + _onListen() { + int pageSize = _PAGE_SIZE; + _pendingRequest = true; + _firstPageProvider(pageSize).then(_handlePage, onError: _handleError); + } + + _onPause() { _paused = true; } + + _onResume() { + _paused = false; + if (_pendingRequest) return; + _pendingRequest = true; + _currentPage.next().then(_handlePage, onError: _handleError); + } + + _onCancel() { + _cancelled = true; + } + +} diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart index 6234a8c2..3b42cdfe 100644 --- a/pkgs/gcloud/lib/src/pubsub_impl.dart +++ b/pkgs/gcloud/lib/src/pubsub_impl.dart @@ -5,7 +5,6 @@ part of gcloud.pubsub; class _PubSubImpl implements PubSub { - static const int _DEFAULT_LIST_PAGE_SIZE = 50; final http.Client _client; final String project; final pubsub.PubsubApi _api; @@ -176,54 +175,11 @@ class _PubSubImpl implements PubSub { } Stream listTopics() { - bool pendingRequest = false; - bool paused = false; - bool cancelled = false; - Page currentPage; - StreamController controller; - - handleError(e, s) { - controller.addError(e, s); - controller.close(); + Future> firstPage(pageSize) { + return _listTopics(pageSize, null) + .then((response) => new _TopicPageImpl(this, pageSize, response)); } - - handlePage(Page page) { - if (cancelled) return; - pendingRequest = false; - currentPage = page; - page.items.forEach(controller.add); - if (page.isLast) { - controller.close(); - } else if (!paused && !cancelled) { - page.next().then(handlePage, onError: handleError); - } - } - - onListen() { - int pageSize = _DEFAULT_LIST_PAGE_SIZE; - pendingRequest = true; - _listTopics(pageSize, null) - .then((response) { - handlePage(new _TopicPageImpl(this, pageSize, response)); - }, - onError: handleError); - } - onPause() => paused = true; - onResume() { - paused = false; - if (pendingRequest) return; - pendingRequest = true; - currentPage.next().then(handlePage, onError: handleError); - } - onCancel() { - cancelled = true; - } - - controller = new StreamController(sync: true, onListen: onListen, - onPause: onPause, onResume: onResume, - onCancel: onCancel); - - return controller.stream; + return new StreamFromPages(firstPage).stream; } Future> pageTopics({int pageSize: 50}) { @@ -254,55 +210,12 @@ class _PubSubImpl implements PubSub { } Stream listSubscriptions([String query]) { - bool pendingRequest = false; - bool paused = false; - bool cancelled = false; - Page currentPage; - StreamController controller; - - handleError(e, s) { - controller.addError(e, s); - controller.close(); + Future> firstPage(pageSize) { + return _listSubscriptions(query, pageSize, null) + .then((response) => + new _SubscriptionPageImpl(this, query, pageSize, response)); } - - handlePage(Page page) { - if (cancelled) return; - pendingRequest = false; - currentPage = page; - page.items.forEach(controller.add); - if (page.isLast) { - controller.close(); - } else if (!paused && !cancelled) { - page.next().then(handlePage, onError: handleError); - } - } - - onListen() { - int pageSize = _DEFAULT_LIST_PAGE_SIZE; - pendingRequest = true; - _listSubscriptions(query, pageSize, null) - .then((response) { - handlePage(new _SubscriptionPageImpl( - this, query, pageSize, response)); - }, - onError: handleError); - } - onPause() => paused = true; - onResume() { - paused = false; - if (pendingRequest) return; - pendingRequest = true; - currentPage.next().then(handlePage, onError: handleError); - } - onCancel() { - cancelled = true; - } - - controller = new StreamController(sync: true, onListen: onListen, - onPause: onPause, onResume: onResume, - onCancel: onCancel); - - return controller.stream; + return new StreamFromPages(firstPage).stream; } Future> pageSubscriptions( From d1fe520c9e2f2eeb07ffff1378057f10e2408568 Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Wed, 10 Sep 2014 12:44:22 +0200 Subject: [PATCH 007/239] Initial implementation of the gcloud Cloud Datastore API This CL adds an implementation to the already existent interface using the JSON-based REST API. This CL also adds end2end tests for the Datastore and DatastoreDB. R=sgjesse@google.com Review URL: https://codereview.chromium.org//552263002 --- pkgs/gcloud/lib/datastore.dart | 15 + pkgs/gcloud/lib/src/datastore_impl.dart | 565 ++++++++++ pkgs/gcloud/lib/src/db/db.dart | 2 +- pkgs/gcloud/lib/src/db/model_db.dart | 1 + .../test/datastore/e2e/datastore_test.dart | 999 ++++++++++++++++++ pkgs/gcloud/test/datastore/e2e/utils.dart | 97 ++ .../gcloud/test/datastore/error_matchers.dart | 50 + pkgs/gcloud/test/db/e2e/db_test.dart | 771 ++++++++++++++ pkgs/gcloud/test/db/e2e/metamodel_test.dart | 97 ++ 9 files changed, 2596 insertions(+), 1 deletion(-) create mode 100644 pkgs/gcloud/lib/src/datastore_impl.dart create mode 100644 pkgs/gcloud/test/datastore/e2e/datastore_test.dart create mode 100644 pkgs/gcloud/test/datastore/e2e/utils.dart create mode 100644 pkgs/gcloud/test/datastore/error_matchers.dart create mode 100644 pkgs/gcloud/test/db/e2e/db_test.dart create mode 100644 pkgs/gcloud/test/db/e2e/metamodel_test.dart diff --git a/pkgs/gcloud/lib/datastore.dart b/pkgs/gcloud/lib/datastore.dart index 80c02e4b..e8e13160 100644 --- a/pkgs/gcloud/lib/datastore.dart +++ b/pkgs/gcloud/lib/datastore.dart @@ -6,11 +6,21 @@ library gcloud.datastore; import 'dart:async'; +class ApplicationError implements Exception { + final String message; + ApplicationError(this.message); + + String toString() => "ApplicationError: $message"; +} + + class DatastoreError implements Exception { final String message; DatastoreError([String message]) : message = (message != null ?message : 'DatastoreError: An unknown error occured'); + + String toString() => '$message'; } class UnknownDatastoreError extends DatastoreError { @@ -145,7 +155,10 @@ class FilterRelation { static const FilterRelation In = const FilterRelation._('IN'); final String name; + const FilterRelation._(this.name); + + String toString() => name; } class Filter { @@ -209,6 +222,8 @@ abstract class Datastore { Future rollback(Transaction transaction); Future> lookup(List keys, {Transaction transaction}); + + // TODO: Make this pageable. Future> query( Query query, {Partition partition, Transaction transaction}); } diff --git a/pkgs/gcloud/lib/src/datastore_impl.dart b/pkgs/gcloud/lib/src/datastore_impl.dart new file mode 100644 index 00000000..8e2e3e5d --- /dev/null +++ b/pkgs/gcloud/lib/src/datastore_impl.dart @@ -0,0 +1,565 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +library gcloud.datastore_impl; + +import 'dart:async'; + +import 'package:http/http.dart' as http; + +import '../datastore.dart' as datastore; +import 'package:googleapis_beta/datastore/v1beta2.dart' as api; + +class TransactionImpl implements datastore.Transaction { + final String data; + TransactionImpl(this.data); +} + +class DatastoreImpl implements datastore.Datastore { + final api.DatastoreApi _api; + final String _project; + + DatastoreImpl(http.Client client, this._project) + : _api = new api.DatastoreApi(client); + + api.Key _convertDatastore2ApiKey(datastore.Key key) { + var apiKey = new api.Key(); + + apiKey.partitionId = new api.PartitionId() + ..datasetId = _project + ..namespace = key.partition.namespace; + + apiKey.path = key.elements.map((datastore.KeyElement element) { + var part = new api.KeyPathElement(); + part.kind = element.kind; + if (element.id is int) { + part.id = '${element.id}'; + } else if (element.id is String) { + part.name = element.id; + } + return part; + }).toList(); + + return apiKey; + } + + datastore.Key _convertApi2DatastoreKey(api.Key key) { + var elements = key.path.map((api.KeyPathElement element) { + if (element.id != null) { + return new datastore.KeyElement(element.kind, int.parse(element.id)); + } else if (element.name != null) { + return new datastore.KeyElement(element.kind, element.name); + } else { + throw new datastore.DatastoreError( + 'Invalid server response: Expected allocated name/id.'); + } + }).toList(); + + var partition; + if (key.partitionId != null) { + partition = new datastore.Partition(key.partitionId.namespace); + // TODO: assert projectId. + } + return new datastore.Key(elements, partition: partition); + } + + bool _compareApiKey(api.Key a, api.Key b) { + if (a.path.length != b.path.length) return false; + + // FIXME(Issue #2): Is this comparison working correctly? + if (a.partitionId != null) { + if (b.partitionId == null) return false; + if (a.partitionId.datasetId != b.partitionId.datasetId) return false; + if (a.partitionId.namespace != b.partitionId.namespace) return false; + } else { + if (b.partitionId != null) return false; + } + + for (int i = 0; i < a.path.length; i++) { + if (a.path[i].id != b.path[i].id || + a.path[i].name != b.path[i].name || + a.path[i].kind != b.path[i].kind) return false; + } + return true; + } + + _convertApi2DatastorePropertyValue(api.Value value) { + if (value.booleanValue != null) + return value.booleanValue; + else if (value.integerValue != null) + return int.parse(value.integerValue); + else if (value.doubleValue != null) + return value.doubleValue; + else if (value.stringValue != null) + return value.stringValue; + else if (value.dateTimeValue != null) + return value.dateTimeValue; + else if (value.blobValue != null) + return new datastore.BlobValue(value.blobValueAsBytes); + else if (value.keyValue != null) + return _convertApi2DatastoreKey(value.keyValue); + else if (value.listValue != null) + // FIXME(Issue #3): Consistently handle exceptions. + throw new Exception('Cannot have lists inside lists.'); + else if (value.blobKeyValue != null) + throw new UnsupportedError('Blob keys are not supported.'); + else if (value.entityValue != null) + throw new UnsupportedError('Entity values are not supported.'); + return null; + } + + api.Value _convertDatastore2ApiPropertyValue( + value, bool indexed, {bool lists: true}) { + var apiValue = new api.Value() + ..indexed = indexed; + if (value == null) { + return apiValue; + } else if (value is bool) { + return apiValue + ..booleanValue = value; + } else if (value is int) { + return apiValue + ..integerValue = '$value'; + } else if (value is double) { + return apiValue + ..doubleValue = value; + } else if (value is String) { + return apiValue + ..stringValue = value; + } else if (value is DateTime) { + return apiValue + ..dateTimeValue = value; + } else if (value is datastore.BlobValue) { + return apiValue + ..blobValueAsBytes = value.bytes; + } else if (value is datastore.Key) { + return apiValue + ..keyValue = _convertDatastore2ApiKey(value); + } else if (value is List) { + if (!lists) { + // FIXME(Issue #3): Consistently handle exceptions. + throw new Exception('List values are not allowed.'); + } + + convertItem(i) + => _convertDatastore2ApiPropertyValue(i, indexed, lists: false); + + return new api.Value() + ..listValue = value.map(convertItem).toList(); + } else { + throw new UnsupportedError( + 'Types ${value.runtimeType} cannot be used for serializing.'); + } + } + + _convertApi2DatastoreProperty(api.Property property) { + if (property.booleanValue != null) + return property.booleanValue; + else if (property.integerValue != null) + return int.parse(property.integerValue); + else if (property.doubleValue != null) + return property.doubleValue; + else if (property.stringValue != null) + return property.stringValue; + else if (property.dateTimeValue != null) + return property.dateTimeValue; + else if (property.blobValue != null) + return new datastore.BlobValue(property.blobValueAsBytes); + else if (property.keyValue != null) + return _convertApi2DatastoreKey(property.keyValue); + else if (property.listValue != null) + return + property.listValue.map(_convertApi2DatastorePropertyValue).toList(); + else if (property.blobKeyValue != null) + throw new UnsupportedError('Blob keys are not supported.'); + else if (property.entityValue != null) + throw new UnsupportedError('Entity values are not supported.'); + return null; + } + + api.Property _convertDatastore2ApiProperty( + value, bool indexed, {bool lists: true}) { + var apiProperty = new api.Property() + ..indexed = indexed; + if (value == null) { + } else if (value is bool) { + return apiProperty + ..booleanValue = value; + } else if (value is int) { + return apiProperty + ..integerValue = '$value'; + } else if (value is double) { + return apiProperty + ..doubleValue = value; + } else if (value is String) { + return apiProperty + ..stringValue = value; + } else if (value is DateTime) { + return apiProperty + ..dateTimeValue = value; + } else if (value is datastore.BlobValue) { + return apiProperty + ..blobValueAsBytes = value.bytes; + } else if (value is datastore.Key) { + return apiProperty + ..keyValue = _convertDatastore2ApiKey(value); + } else if (value is List) { + if (!lists) { + // FIXME(Issue #3): Consistently handle exceptions. + throw new Exception('List values are not allowed.'); + } + convertItem(i) + => _convertDatastore2ApiPropertyValue(i, indexed, lists: false); + return new api.Property()..listValue = value.map(convertItem).toList(); + } else { + throw new UnsupportedError( + 'Types ${value.runtimeType} cannot be used for serializing.'); + } + } + + datastore.Entity _convertApi2DatastoreEntity(api.Entity entity) { + var unindexedProperties = new Set(); + var properties = {}; + + if (entity.properties != null) { + entity.properties.forEach((String name, api.Property property) { + properties[name] = _convertApi2DatastoreProperty(property); + if (property.indexed == false) { + // TODO(Issue #$4): Should we support mixed indexed/non-indexed list + // values? + if (property.listValue != null) { + if (property.listValue.length > 0) { + var firstIndexed = property.listValue.first.indexed; + for (int i = 1; i < property.listValue.length; i++) { + if (property.listValue[i].indexed != firstIndexed) { + throw new Exception('Some list entries are indexed and some ' + 'are not. This is currently not supported.'); + } + } + if (firstIndexed == false) { + unindexedProperties.add(name); + } + } + } else { + unindexedProperties.add(name); + } + } + }); + } + return new datastore.Entity(_convertApi2DatastoreKey(entity.key), + properties, + unIndexedProperties: unindexedProperties); + } + + api.Entity _convertDatastore2ApiEntity(datastore.Entity entity) { + var apiEntity = new api.Entity(); + + apiEntity.key = _convertDatastore2ApiKey(entity.key); + apiEntity.properties = {}; + if (entity.properties != null) { + for (var key in entity.properties.keys) { + var value = entity.properties[key]; + bool indexed = false; + if (entity.unIndexedProperties != null) { + indexed = !entity.unIndexedProperties.contains(key); + } + var property = _convertDatastore2ApiPropertyValue(value, indexed); + apiEntity.properties[key] = property; + } + } + return apiEntity; + } + + static Map relationMapping = const { + datastore.FilterRelation.LessThan: 'LESS_THAN', + datastore.FilterRelation.LessThanOrEqual: 'LESS_THAN_OR_EQUAL', + datastore.FilterRelation.Equal: 'EQUAL', + datastore.FilterRelation.GreatherThan: 'GREATER_THAN', + datastore.FilterRelation.GreatherThanOrEqual: 'GREATER_THAN_OR_EQUAL', + // TODO(Issue #5): IN operator not supported currently. + }; + + api.Filter _convertDatastore2ApiFilter(datastore.Filter filter) { + var pf = new api.PropertyFilter(); + var operator = relationMapping[filter.relation]; + // FIXME(Issue #5): Is this OK? + if (filter.relation == datastore.FilterRelation.In) { + operator = 'EQUAL'; + } + + if (operator == null) { + throw new ArgumentError('Unknown filter relation: ${filter.relation}.'); + } + pf.operator = operator; + pf.property = new api.PropertyReference()..name = filter.name; + + // FIXME(Issue #5): Is this OK? + var value = filter.value; + if (filter.relation == datastore.FilterRelation.In) { + if (value is List && value.length == 1) { + value = value.first; + } else { + throw new ArgumentError('List values not supported'); + } + } + + pf.value = _convertDatastore2ApiPropertyValue(value, true, lists: false); + return new api.Filter()..propertyFilter = pf; + } + + api.Filter _convertDatastoreAncestorKey2ApiFilter(datastore.Key key) { + var pf = new api.PropertyFilter(); + pf.operator = 'HAS_ANCESTOR'; + pf.property = new api.PropertyReference()..name = '__key__'; + pf.value = new api.Value()..keyValue = _convertDatastore2ApiKey(key); + return new api.Filter()..propertyFilter = pf; + } + + api.Filter _convertDatastore2ApiFilters(List filters, + datastore.Key ancestorKey) { + if ((filters == null || filters.length == 0) && ancestorKey == null) { + return null; + } + + var compFilter = new api.CompositeFilter(); + if (filters != null) { + compFilter.filters = filters.map(_convertDatastore2ApiFilter).toList(); + } + if (ancestorKey != null) { + var filter = _convertDatastoreAncestorKey2ApiFilter(ancestorKey); + if (compFilter.filters == null) { + compFilter.filters = [filter]; + } else { + compFilter.filters.add(filter); + } + } + compFilter.operator = 'AND'; + return new api.Filter()..compositeFilter = compFilter; + } + + api.PropertyOrder _convertDatastore2ApiOrder(datastore.Order order) { + var property = new api.PropertyReference()..name = order.propertyName; + var direction = order.direction == datastore.OrderDirection.Ascending + ? 'ASCENDING' : 'DESCENDING'; + return new api.PropertyOrder() + ..direction = direction + ..property = property; + } + + List _convertDatastore2ApiOrders( + List orders) { + if (orders == null) return null; + + return orders.map(_convertDatastore2ApiOrder).toList(); + } + + Future _handleError(error, stack) { + if (error is api.DetailedApiRequestError) { + if (error.status == 400) { + return new Future.error( + new datastore.ApplicationError(error.message), stack); + } else if (error.status == 409) { + // NOTE: This is reported as: + // "too much contention on these datastore entities" + // TODO: + return new Future.error(new datastore.TransactionAbortedError(), stack); + } else if (error.status == 412) { + return new Future.error(new datastore.NeedIndexError(), stack); + } + } + return new Future.error(error, stack); + } + + Future> allocateIds(List keys) { + var request = new api.AllocateIdsRequest(); + request..keys = keys.map(_convertDatastore2ApiKey).toList(); + return _api.datasets.allocateIds(request, _project).then((response) { + return response.keys.map(_convertApi2DatastoreKey).toList(); + }, onError: _handleError); + } + + Future beginTransaction( + {bool crossEntityGroup: false}) { + var request = new api.BeginTransactionRequest(); + // TODO: Should this be made configurable? + request.isolationLevel = 'SERIALIZABLE'; + return _api.datasets.beginTransaction(request, _project).then((result) { + return new TransactionImpl(result.transaction); + }, onError: _handleError); + } + + Future commit({List inserts, + List autoIdInserts, + List deletes, + datastore.Transaction transaction}) { + var request = new api.CommitRequest(); + + if (transaction != null) { + request.mode = 'TRANSACTIONAL'; + request.transaction = (transaction as TransactionImpl).data; + } else { + request.mode = 'NON_TRANSACTIONAL'; + } + + request.mutation = new api.Mutation(); + if (inserts != null) { + request.mutation.upsert = new List(inserts.length); + for (int i = 0; i < inserts.length; i++) { + request.mutation.upsert[i] = _convertDatastore2ApiEntity(inserts[i]); + } + } + if (autoIdInserts != null) { + request.mutation.insertAutoId = new List(autoIdInserts.length); + for (int i = 0; i < autoIdInserts.length; i++) { + request.mutation.insertAutoId[i] = + _convertDatastore2ApiEntity(autoIdInserts[i]); + } + } + if (deletes != null) { + request.mutation.delete = new List(deletes.length); + for (int i = 0; i < deletes.length; i++) { + request.mutation.delete[i] = _convertDatastore2ApiKey(deletes[i]); + } + } + return _api.datasets.commit(request, _project).then((result) { + var keys; + if (autoIdInserts != null && autoIdInserts.length > 0) { + keys = result + .mutationResult + .insertAutoIdKeys + .map(_convertApi2DatastoreKey).toList(); + } + return new datastore.CommitResult(keys); + }, onError: _handleError); + } + + Future> lookup(List keys, + {datastore.Transaction transaction}) { + var apiKeys = keys.map(_convertDatastore2ApiKey).toList(); + var request = new api.LookupRequest(); + request.keys = apiKeys; + if (transaction != null) { + // TODO: Make readOptions more configurable. + request.readOptions = new api.ReadOptions(); + request.readOptions.transaction = (transaction as TransactionImpl).data; + } + return _api.datasets.lookup(request, _project).then((response) { + if (response.deferred != null && response.deferred.length > 0) { + throw new datastore.DatastoreError( + 'Could not successfully look up all keys due to resource ' + 'constraints.'); + } + + // NOTE: This is worst-case O(n^2)! + // Maybe we can optimize this somehow. But the API says: + // message LookupResponse { + // // The order of results in these fields is undefined and has no relation to + // // the order of the keys in the input. + // + // // Entities found as ResultType.FULL entities. + // repeated EntityResult found = 1; + // + // // Entities not found as ResultType.KEY_ONLY entities. + // repeated EntityResult missing = 2; + // + // // A list of keys that were not looked up due to resource constraints. + // repeated Key deferred = 3; + // } + var entities = new List(apiKeys.length); + for (int i = 0; i < apiKeys.length; i++) { + var apiKey = apiKeys[i]; + + bool found = false; + + if (response.found != null) { + for (var result in response.found) { + if (_compareApiKey(apiKey, result.entity.key)) { + entities[i] = _convertApi2DatastoreEntity(result.entity); + found = true; + break; + } + } + } + + if (found) continue; + + if (response.missing != null) { + for (var result in response.missing) { + if (_compareApiKey(apiKey, result.entity.key)) { + entities[i] = null; + found = true; + break; + } + } + } + + if (!found) { + throw new datastore.DatastoreError('Invalid server response: ' + 'Tried to lookup ${apiKey.toJson()} but entity was neither in ' + 'missing nor in found.'); + } + } + return entities; + }, onError: _handleError); + } + + Future> query(datastore.Query query, + {datastore.Partition partition, + datastore.Transaction transaction}) { + var apiQuery = new api.Query() + ..filter = _convertDatastore2ApiFilters(query.filters, + query.ancestorKey) + ..order = _convertDatastore2ApiOrders(query.orders) + ..limit = query.limit + ..offset = query.offset; + + if (query.kind != null) { + apiQuery.kinds = [new api.KindExpression()..name = query.kind]; + } + + var request = new api.RunQueryRequest(); + request.query = apiQuery; + if (transaction != null) { + // TODO: Make readOptions more configurable. + request.readOptions = new api.ReadOptions(); + request.readOptions.transaction = (transaction as TransactionImpl).data; + } + if (partition != null) { + request.partitionId = new api.PartitionId() + ..namespace = partition.namespace; + } + + var results = []; + Future next({String lastEndCursor}) { + apiQuery.startCursor = lastEndCursor; + return _api.datasets.runQuery(request, _project).then((result) { + var batch = result.batch; + if (batch.entityResults != null) { + for (var result in batch.entityResults) { + results.add(_convertApi2DatastoreEntity(result.entity)); + } + } + if (result.batch.moreResults == 'NOT_FINISHED') { + if (result.batch.endCursor == null) { + throw new datastore.DatastoreError( + 'Server did not supply an end cursor, even though the query ' + 'is not done.'); + } + return next(lastEndCursor: result.batch.endCursor); + } else { + return results; + } + }); + } + + return next().catchError(_handleError); + } + + Future rollback(datastore.Transaction transaction) { + // TODO: Handle [transaction] + var request = new api.RollbackRequest() + ..transaction = (transaction as TransactionImpl).data; + return _api.datasets.rollback(request, _project).catchError(_handleError); + } +} diff --git a/pkgs/gcloud/lib/src/db/db.dart b/pkgs/gcloud/lib/src/db/db.dart index 0999613b..7a73addf 100644 --- a/pkgs/gcloud/lib/src/db/db.dart +++ b/pkgs/gcloud/lib/src/db/db.dart @@ -266,7 +266,7 @@ class DatastoreDB { */ Future beginTransaction({bool crossEntityGroup: false}) { return datastore.beginTransaction(crossEntityGroup: crossEntityGroup) - .then((datastore.Transaction transaction) { + .then((transaction) { return new Transaction(this, transaction); }); } diff --git a/pkgs/gcloud/lib/src/db/model_db.dart b/pkgs/gcloud/lib/src/db/model_db.dart index 6aa9c48f..362a6168 100644 --- a/pkgs/gcloud/lib/src/db/model_db.dart +++ b/pkgs/gcloud/lib/src/db/model_db.dart @@ -69,6 +69,7 @@ class ModelDB { Key key = namespace.emptyKey; for (var element in datastoreKey.elements) { var type = _typeByModelDescription[_modelDescriptionByKind[element.kind]]; + assert (type != null); key = key.append(type, id: element.id); } return key; diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test.dart new file mode 100644 index 00000000..9b1bb91f --- /dev/null +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test.dart @@ -0,0 +1,999 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +library datastore_test; + +/// NOTE: In order to run these tests, the following datastore indices must +/// exist: +/// $ cat index.yaml +/// indexes: +/// - kind: TestQueryKind +/// ancestor: no +/// properties: +/// - name: indexedProp +/// direction: asc +/// - name: blobPropertyIndexed +/// direction: asc +/// +/// - kind: TestQueryKind +/// ancestor: no +/// properties: +/// - name: listproperty +/// - name: test_property +/// direction: desc +/// $ gcloud preview datastore create-indexes . +/// 02:19 PM Host: appengine.google.com +/// 02:19 PM Uploading index definitions. + + +import 'dart:async'; + +import 'package:gcloud/datastore.dart'; +import 'package:unittest/unittest.dart'; + +import '../error_matchers.dart'; +import 'utils.dart'; + +Future sleep(Duration duration) { + var completer = new Completer(); + new Timer(duration, completer.complete); + return completer.future; +} + +runTests(Datastore datastore) { + Future withTransaction(Function f, {bool xg: false}) { + return datastore.beginTransaction(crossEntityGroup: xg).then(f); + } + + Future> insert(List entities, + List autoIdEntities, + {bool transactional: true}) { + if (transactional) { + return withTransaction((Transaction transaction) { + return datastore.commit(inserts: entities, + autoIdInserts: autoIdEntities, + transaction: transaction).then((result) { + if (autoIdEntities != null && autoIdEntities.length > 0) { + expect(result.autoIdInsertKeys.length, + equals(autoIdEntities.length)); + } + return result.autoIdInsertKeys; + }); + }, xg: true); + } else { + return datastore.commit(inserts: entities, autoIdInserts: autoIdEntities) + .then((result) { + if (autoIdEntities != null && autoIdEntities.length > 0) { + expect(result.autoIdInsertKeys.length, + equals(autoIdEntities.length)); + } + return result.autoIdInsertKeys; + }); + } + } + + Future delete(List keys, {bool transactional: true}) { + if (transactional) { + return withTransaction((Transaction t) { + return datastore.commit(deletes: keys, transaction: t) + .then((result) => null); + }, xg: true); + } else { + return datastore.commit(deletes: keys).then((_) => _); + } + } + + Future> lookup(List keys, {bool transactional: true}) { + if (transactional) { + return withTransaction((Transaction transaction) { + return datastore.lookup(keys, transaction: transaction); + }, xg: true); + } else { + return datastore.lookup(keys); + } + } + + bool isValidKey(Key key, {bool ignoreIds: false}) { + if (key.elements.length == 0) return false; + + for (var element in key.elements) { + if (element.kind == null || element.kind is! String) return false; + if (!ignoreIds) { + if (element.id == null || + (element.id is! String && element.id is! int)) { + return false; + } + } + } + return true; + } + + bool compareKey(Key a, Key b, {bool ignoreIds: false}) { + if (a.partition != b.partition) return false; + if (a.elements.length != b.elements.length) return false; + for (int i = 0; i < a.elements.length; i++) { + if (a.elements[i].kind != b.elements[i].kind) return false; + if (!ignoreIds && a.elements[i].id != b.elements[i].id) return false; + } + return true; + } + + bool compareEntity(Entity a, Entity b, {bool ignoreIds: false}) { + if (!compareKey(a.key, b.key, ignoreIds: ignoreIds)) return false; + if (a.properties.length != b.properties.length) return false; + for (var key in a.properties.keys) { + if (!b.properties.containsKey(key)) return false; + if (a.properties[key] != null && a.properties[key] is List) { + var aList = a.properties[key]; + var bList = b.properties[key]; + if (aList.length != bList.length) return false; + for (var i = 0; i < aList.length; i++) { + if (aList[i] != bList[i]) return false; + } + } else if (a.properties[key] is BlobValue) { + if (b.properties[key] is BlobValue) { + var b1 = (a.properties[key] as BlobValue).bytes; + var b2 = (b.properties[key] as BlobValue).bytes; + if (b1.length != b2.length) return false; + for (var i = 0; i < b1.length; i++) { + if (b1[i] != b2[i]) return false; + } + return true; + } + return false; + } else { + if (a.properties[key] != b.properties[key]) { + return false; + } + } + } + return true; + } + + group('e2e_datastore', () { + group('insert', () { + Future> testInsert(List entities, + {bool transactional: false, bool xg: false, bool unnamed: true}) { + Future> test(Transaction transaction) { + return datastore.commit(autoIdInserts: entities, + transaction: transaction) + .then((CommitResult result) { + expect(result.autoIdInsertKeys.length, equals(entities.length)); + + for (var i = 0; i < result.autoIdInsertKeys.length; i++) { + var key = result.autoIdInsertKeys[i]; + expect(isValidKey(key), isTrue); + if (unnamed) { + expect(compareKey(key, entities[i].key, ignoreIds: true), + isTrue); + } else { + expect(compareKey(key, entities[i].key), isTrue); + } + } + return result.autoIdInsertKeys; + }); + } + + if (transactional) { + return withTransaction(test, xg: xg); + } + return test(null); + } + + Future> testInsertNegative(List entities, + {bool transactional: false, bool xg: false}) { + test(Transaction transaction) { + expect(datastore.commit(inserts: entities, + transaction: transaction), + throwsA(isApplicationError)); + } + + if (transactional) { + return withTransaction(test, xg: xg); + } + return test(null); + } + + var unnamedEntities1 = buildEntities(42, 43); + var unnamedEntities5 = buildEntities(1, 6); + var unnamedEntities20 = buildEntities(6, 26); + var named20000 = buildEntities( + 1000, 21001, idFunction: (i) => 'named_${i}_of_10000'); + + test('insert', () { + return testInsert(unnamedEntities5, transactional: false).then((keys) { + return delete(keys).then((_) { + return lookup(keys).then((List entities) { + entities.forEach((Entity e) => expect(e, isNull)); + }); + }); + }); + }); + + test('insert_transactional', () { + return testInsert(unnamedEntities1, transactional: true).then((keys) { + return delete(keys).then((_) { + return lookup(keys).then((List entities) { + entities.forEach((Entity e) => expect(e, isNull)); + }); + }); + }); + }); + + test('insert_transactional_xg', () { + return testInsert( + unnamedEntities5, transactional: true, xg: true).then((keys) { + return delete(keys).then((_) { + return lookup(keys).then((List entities) { + entities.forEach((Entity e) => expect(e, isNull)); + }); + }); + }); + }); + + // Does not work with cloud datastore REST api, why? + test('negative_insert_transactional', () { + return testInsertNegative(unnamedEntities5, transactional: true); + }); + + // Does not work with cloud datastore REST api, why? + test('negative_insert_transactional_xg', () { + return testInsertNegative( + unnamedEntities20, transactional: true, xg: true); + }); + + test('negative_insert_20000_entities', () { + // Maybe it should not be a [DataStoreError] here? + // FIXME/TODO: This was adapted + expect(datastore.commit(inserts: named20000), + throws); + }); + + // TODO: test invalid inserts (like entities without key, ...) + }); + + group('allocate_ids', () { + test('allocate_ids_query', () { + compareResult(List keys, List completedKeys) { + expect(completedKeys.length, equals(keys.length)); + for (int i = 0; i < keys.length; i++) { + var insertedKey = keys[i]; + var completedKey = completedKeys[i]; + + expect(completedKey.elements.length, + equals(insertedKey.elements.length)); + for (int j = 0; j < insertedKey.elements.length - 1; j++) { + expect(completedKey.elements[j], equals(insertedKey.elements[j])); + } + for (int j = insertedKey.elements.length - 1; + j < insertedKey.elements.length; + j++) { + expect(completedKey.elements[j].kind, + equals(insertedKey.elements[j].kind)); + expect(completedKey.elements[j].id, isNotNull); + expect(completedKey.elements[j].id, isInt); + } + } + } + + var keys = buildKeys(1, 4); + return datastore.allocateIds(keys).then((List completedKeys) { + compareResult(keys, completedKeys); + // TODO: Make sure we can insert these keys + // FIXME: Insert currently doesn't through if entities already exist! + }); + }); + }); + + group('lookup', () { + Future testLookup(List keysToLookup, + List entitiesToLookup, + {bool transactional: false, + bool xg: false, + bool negative: false, + bool named: false}) { + expect(keysToLookup.length, equals(entitiesToLookup.length)); + for (var i = 0; i < keysToLookup.length; i++) { + expect(compareKey(keysToLookup[i], + entitiesToLookup[i].key, + ignoreIds: !named), isTrue); + } + + Future test(Transaction transaction) { + return datastore.lookup(keysToLookup) + .then((List entities) { + expect(entities.length, equals(keysToLookup.length)); + if (negative) { + for (int i = 0; i < entities.length; i++) { + expect(entities[i], isNull); + } + } else { + for (var i = 0; i < entities.length; i++) { + expect(compareKey(entities[i].key, keysToLookup[i]), isTrue); + expect(compareEntity(entities[i], + entitiesToLookup[i], + ignoreIds: !named), isTrue); + } + } + if (transaction != null) { + return + datastore.commit(transaction: transaction).then((_) => null); + } + }); + } + + if (transactional) { + return withTransaction(test, xg: xg); + } + return test(null); + } + + var unnamedEntities1 = buildEntities(42, 43); + var unnamedEntities5 = buildEntities(1, 6); + var unnamedEntities20 = buildEntities(6, 26); + var entitiesWithAllPropertyTypes = buildEntityWithAllProperties(1, 6); + + test('lookup', () { + return insert([], unnamedEntities20, transactional: false).then((keys) { + keys.forEach((key) => expect(isValidKey(key), isTrue)); + return testLookup(keys, unnamedEntities20).then((_) { + return delete(keys, transactional: false); + }); + }); + }); + + test('lookup_with_all_properties', () { + return insert(entitiesWithAllPropertyTypes, [], transactional: false) + .then((_) { + var keys = entitiesWithAllPropertyTypes.map((e) => e.key).toList(); + return testLookup(keys, entitiesWithAllPropertyTypes).then((_) { + return delete(keys, transactional: false); + }); + }); + }); + + test('lookup_transactional', () { + return insert([], unnamedEntities1).then((keys) { + keys.forEach((key) => expect(isValidKey(key), isTrue)); + return testLookup(keys, unnamedEntities1, transactional: true) + .then((_) => delete(keys)); + }); + }); + + test('lookup_transactional_xg', () { + return insert([], unnamedEntities5).then((keys) { + keys.forEach((key) => expect(isValidKey(key), isTrue)); + return testLookup( + keys, unnamedEntities5, transactional: true, xg: true).then((_) { + return delete(keys); + }); + }); + }); + + // TODO: ancestor lookups, string id lookups + }); + + group('delete', () { + Future testDelete(List keys, + {bool transactional: false, bool xg: false}) { + Future test(Transaction transaction) { + return datastore.commit(deletes: keys).then((_) { + if (transaction != null) { + return datastore.commit(transaction: transaction); + } + }); + } + + if (transactional) { + return withTransaction(test, xg: xg); + } + return test(null); + } + + var unnamedEntities1 = buildEntities(42, 43); + var unnamedEntities5 = buildEntities(1, 6); + var unnamedEntities99 = buildEntities(6, 106); + + test('delete', () { + return insert([], unnamedEntities99, transactional: false).then((keys) { + keys.forEach((key) => expect(isValidKey(key), isTrue)); + return lookup(keys, transactional: false).then((entities) { + entities.forEach((e) => expect(e, isNotNull)); + return testDelete(keys).then((_) { + return lookup(keys, transactional: false).then((entities) { + entities.forEach((e) => expect(e, isNull)); + }); + }); + }); + }); + }); + + // This should not work with [unamedEntities20], but is working! + // FIXME TODO FIXME : look into this. + test('delete_transactional', () { + return insert([], unnamedEntities99, transactional: false).then((keys) { + keys.forEach((key) => expect(isValidKey(key), isTrue)); + return lookup(keys, transactional: false).then((entities) { + entities.forEach((e) => expect(e, isNotNull)); + return testDelete(keys, transactional: true).then((_) { + return lookup(keys, transactional: false).then((entities) { + entities.forEach((e) => expect(e, isNull)); + }); + }); + }); + }); + }); + + test('delete_transactional_xg', () { + return insert([], unnamedEntities99, transactional: false).then((keys) { + keys.forEach((key) => expect(isValidKey(key), isTrue)); + return lookup(keys, transactional: false).then((entities) { + expect(entities.length, equals(unnamedEntities99.length)); + entities.forEach((e) => expect(e, isNotNull)); + return testDelete(keys, transactional: true, xg: true).then((_) { + return lookup(keys, transactional: false).then((entities) { + expect(entities.length, equals(unnamedEntities99.length)); + entities.forEach((e) => expect(e, isNull)); + }); + }); + }); + }); + }); + + // TODO: ancestor deletes, string id deletes + }); + + group('rollback', () { + Future testRollback(List keys, {bool xg: false}) { + return withTransaction((Transaction transaction) { + return datastore.lookup(keys, transaction: transaction) + .then((List entitites) { + return datastore.rollback(transaction); + }); + }, xg: xg); + } + + var namedEntities1 = buildEntities(42, 43, idFunction: (i) => "i$i"); + var namedEntities5 = buildEntities(1, 6, idFunction: (i) => "i$i"); + + var namedEntities1Keys = namedEntities1.map((e) => e.key).toList(); + var namedEntities5Keys = namedEntities5.map((e) => e.key).toList(); + + test('rollback', () { + return testRollback(namedEntities1Keys); + }); + + test('rollback_xg', () { + return testRollback(namedEntities5Keys, xg: true); + }); + }); + + group('empty_commit', () { + Future testEmptyCommit( + List keys, {bool transactional: false, bool xg: false}) { + Future test(Transaction transaction) { + return datastore.lookup(keys, transaction: transaction) + .then((List entitites) { + return datastore.commit(transaction: transaction); + }); + } + + if (transactional) { + return withTransaction(test, xg: xg); + } else { + return test(null); + } + } + + var namedEntities1 = buildEntities(42, 43, idFunction: (i) => "i$i"); + var namedEntities5 = buildEntities(1, 6, idFunction: (i) => "i$i"); + var namedEntities20 = buildEntities(6, 26, idFunction: (i) => "i$i"); + + var namedEntities1Keys = namedEntities1.map((e) => e.key).toList(); + var namedEntities5Keys = namedEntities5.map((e) => e.key).toList(); + var namedEntities20Keys = namedEntities20.map((e) => e.key).toList(); + + test('empty_commit', () { + return testEmptyCommit(namedEntities20Keys); + }); + + test('empty_commit_transactional', () { + return testEmptyCommit(namedEntities1Keys); + }); + + test('empty_commit_transactional_xg', () { + return testEmptyCommit(namedEntities5Keys); + }); + + test('negative_empty_commit_xg', () { + expect(testEmptyCommit( + namedEntities20Keys, transactional: true, xg: true), + throwsA(isApplicationError)); + }); + }); + + group('conflicting_transaction', () { + Future testConflictingTransaction( + List entities, {bool xg: false}) { + Future test( + List entities, Transaction transaction, value) { + + // Change entities: + var changedEntities = new List(entities.length); + for (int i = 0; i < entities.length; i++) { + var entity = entities[i]; + var newProperties = new Map.from(entity.properties); + for (var prop in newProperties.keys) { + newProperties[prop] = "${newProperties[prop]}conflict$value"; + } + changedEntities[i] = + new Entity(entity.key, newProperties); + } + return datastore.commit(inserts: changedEntities, + transaction: transaction); + } + + // Insert first + return insert(entities, [], transactional: true).then((_) { + var keys = entities.map((e) => e.key).toList(); + + var NUM_TRANSACTIONS = 10; + + // Start transactions + var transactions = []; + for (var i = 0; i < NUM_TRANSACTIONS; i++) { + transactions.add(datastore.beginTransaction(crossEntityGroup: xg)); + } + return Future.wait(transactions) + .then((List transactions) { + // Do a lookup for the entities in every transaction + var lookups = []; + for (var transaction in transactions) { + lookups.add( + datastore.lookup(keys, transaction: transaction)); + } + return Future.wait(lookups).then((List> results) { + // Do a conflicting commit in every transaction. + var commits = []; + for (var i = 0; i < transactions.length; i++) { + var transaction = transactions[i]; + commits.add(test(results[i], transaction, i)); + } + return Future.wait(commits); + }); + }); + }); + } + + var namedEntities1 = buildEntities(42, 43, idFunction: (i) => "i$i"); + var namedEntities5 = buildEntities(1, 6, idFunction: (i) => "i$i"); + + test('conflicting_transaction', () { + expect(testConflictingTransaction(namedEntities1), + throwsA(isTransactionAbortedError)); + }); + + test('conflicting_transaction_xg', () { + expect(testConflictingTransaction(namedEntities5, xg: true), + throwsA(isTransactionAbortedError)); + }); + }); + + group('query', () { + Future testQuery(String kind, + {List filters, + List orders, + bool transactional: false, + bool xg: false, + int offset, + int limit}) { + Future> test(Transaction transaction) { + var query = new Query( + kind: kind, filters: filters, orders: orders, + offset: offset, limit: limit); + return datastore.query(query).then((List entities) { + if (transaction != null) { + return datastore.commit(transaction: transaction) + .then((_) => entities); + } + return entities; + }); + } + + if (transactional) { + return withTransaction(test, xg: xg); + } + return test(null); + } + + Future testQueryAndCompare(String kind, + List expectedEntities, + {List filters, + List orders, + bool transactional: false, + bool xg: false, + bool correctOrder: true, + int offset, + int limit}) { + return testQuery(kind, + filters: filters, + orders: orders, + transactional: transactional, + xg: xg, + offset: offset, + limit: limit).then((List entities) { + expect(entities.length, equals(expectedEntities.length)); + + if (correctOrder) { + for (int i = 0; i < entities.length; i++) { + expect(compareEntity(entities[i], expectedEntities[i]), isTrue); + } + } else { + for (int i = 0; i < entities.length; i++) { + bool found = false; + for (int j = 0; j < expectedEntities.length; j++) { + if (compareEntity(entities[i], expectedEntities[i])) { + found = true; + } + } + expect(found, isTrue); + } + } + }); + } + Future testOffsetLimitQuery(String kind, + List expectedEntities, + {List orders, + bool transactional: false, + bool xg: false}) { + // We query for all subsets of expectedEntities + // NOTE: This is O(0.5 * n^2) queries, but n is currently only 6. + List queryTests = []; + for (int start = 0; start < expectedEntities.length; start++) { + for (int end = start; end < expectedEntities.length; end++) { + int offset = start; + int limit = end - start; + var entities = expectedEntities.sublist(offset, offset + limit); + queryTests.add(() { + return testQueryAndCompare( + kind, entities, transactional: transactional, + xg: xg, orders: orders, + offset: offset, limit: limit); + }); + } + } + // Query with limit higher than the number of results. + queryTests.add(() { + return testQueryAndCompare( + kind, expectedEntities, transactional: transactional, + xg: xg, orders: orders, + offset: 0, limit: expectedEntities.length * 10); + }); + + return Future.forEach(queryTests, (f) => f()); + } + + const TEST_QUERY_KIND = 'TestQueryKind'; + var stringNamedEntities = buildEntities( + 1, 6, idFunction: (i) => 'str$i', kind: TEST_QUERY_KIND); + var stringNamedKeys = stringNamedEntities.map((e) => e.key).toList(); + + var QUERY_KEY = TEST_PROPERTY_KEY_PREFIX; + var QUERY_UPPER_BOUND = "${TEST_PROPERTY_VALUE_PREFIX}4"; + var QUERY_LOWER_BOUND = "${TEST_PROPERTY_VALUE_PREFIX}1"; + var QUERY_LIST_ENTRY = '${TEST_LIST_VALUE}2'; + var QUERY_INDEX_VALUE = '${TEST_INDEXED_PROPERTY_VALUE_PREFIX}1'; + + var reverseOrderFunction = (Entity a, Entity b) { + // Reverse the order + return -1 * (a.properties[QUERY_KEY] as String) + .compareTo(b.properties[QUERY_KEY]); + }; + + var filterFunction = (Entity entity) { + var value = entity.properties[QUERY_KEY]; + return value.compareTo(QUERY_UPPER_BOUND) == -1 && + value.compareTo(QUERY_LOWER_BOUND) == 1; + }; + var listFilterFunction = (Entity entity) { + var values = entity.properties[TEST_LIST_PROPERTY]; + return values.contains(QUERY_LIST_ENTRY); + }; + var indexFilterMatches = (Entity entity) { + return entity.properties[TEST_INDEXED_PROPERTY] == QUERY_INDEX_VALUE; + }; + + var sorted = stringNamedEntities.toList()..sort(reverseOrderFunction); + var filtered = stringNamedEntities.where(filterFunction).toList(); + var sortedAndFiltered = sorted.where(filterFunction).toList(); + var sortedAndListFiltered = sorted.where(listFilterFunction).toList(); + var indexedEntity = sorted.where(indexFilterMatches).toList(); + expect(indexedEntity.length, equals(1)); + + // Note: + // Non-ancestor queries (i.e. queries not lookups) result in index scans. + // The index tables are updated in a "eventually consistent" way. + // + // So this can make tests flaky, the index updates take longer than the + // following constant. + var INDEX_UPDATE_DELAY = const Duration(seconds: 10); + + var filters = [ + new Filter(FilterRelation.GreatherThan, QUERY_KEY, QUERY_LOWER_BOUND), + new Filter(FilterRelation.LessThan, QUERY_KEY, QUERY_UPPER_BOUND), + ]; + var listFilters = [ + new Filter(FilterRelation.In, TEST_LIST_PROPERTY, [QUERY_LIST_ENTRY]) + ]; + var indexedPropertyFilter = [ + new Filter(FilterRelation.Equal, + TEST_INDEXED_PROPERTY, + QUERY_INDEX_VALUE), + new Filter(FilterRelation.Equal, + TEST_BLOB_INDEXED_PROPERTY, + TEST_BLOB_INDEXED_VALUE) + ]; + var unIndexedPropertyFilter = [ + new Filter(FilterRelation.Equal, + TEST_UNINDEXED_PROPERTY, + QUERY_INDEX_VALUE) + ]; + + var orders = [new Order(OrderDirection.Decending, QUERY_KEY)]; + + test('query', () { + return insert(stringNamedEntities, []).then((keys) { + return sleep(INDEX_UPDATE_DELAY).then((_) { + var tests = [ + // EntityKind query + () => testQueryAndCompare( + TEST_QUERY_KIND, stringNamedEntities, transactional: false, + correctOrder: false), + () => testQueryAndCompare( + TEST_QUERY_KIND, stringNamedEntities, transactional: true, + correctOrder: false), + () => testQueryAndCompare( + TEST_QUERY_KIND, stringNamedEntities, transactional: true, + correctOrder: false, xg: true), + + // EntityKind query with order + () => testQueryAndCompare( + TEST_QUERY_KIND, sorted, transactional: false, + orders: orders), + () => testQueryAndCompare( + TEST_QUERY_KIND, sorted, transactional: true, + orders: orders), + () => testQueryAndCompare( + TEST_QUERY_KIND, sorted, transactional: false, xg: true, + orders: orders), + + // EntityKind query with filter + () => testQueryAndCompare( + TEST_QUERY_KIND, filtered, transactional: false, + filters: filters), + () => testQueryAndCompare( + TEST_QUERY_KIND, filtered, transactional: true, + filters: filters), + () => testQueryAndCompare( + TEST_QUERY_KIND, filtered, transactional: false, xg: true, + filters: filters), + + // EntityKind query with filter + order + () => testQueryAndCompare( + TEST_QUERY_KIND, sortedAndFiltered, transactional: false, + filters: filters, orders: orders), + () => testQueryAndCompare( + TEST_QUERY_KIND, sortedAndFiltered, transactional: true, + filters: filters, orders: orders), + () => testQueryAndCompare( + TEST_QUERY_KIND, sortedAndFiltered, transactional: false, + xg: true, filters: filters, orders: orders), + + // EntityKind query with IN filter + order + () => testQueryAndCompare( + TEST_QUERY_KIND, sortedAndListFiltered, transactional: false, + filters: listFilters, orders: orders), + () => testQueryAndCompare( + TEST_QUERY_KIND, sortedAndListFiltered, transactional: true, + filters: listFilters, orders: orders), + () => testQueryAndCompare( + TEST_QUERY_KIND, sortedAndListFiltered, transactional: false, + xg: true, filters: listFilters, orders: orders), + + // Limit & Offset test + () => testOffsetLimitQuery( + TEST_QUERY_KIND, sorted, transactional: false, + orders: orders), + () => testOffsetLimitQuery( + TEST_QUERY_KIND, sorted, transactional: true, orders: orders), + () => testOffsetLimitQuery( + TEST_QUERY_KIND, sorted, transactional: false, + xg: true, orders: orders), + + // Query for indexed property + () => testQueryAndCompare( + TEST_QUERY_KIND, indexedEntity, transactional: false, + filters: indexedPropertyFilter), + () => testQueryAndCompare( + TEST_QUERY_KIND, indexedEntity, transactional: true, + filters: indexedPropertyFilter), + () => testQueryAndCompare( + TEST_QUERY_KIND, indexedEntity, transactional: false, + xg: true, filters: indexedPropertyFilter), + + // Query for un-indexed property + () => testQueryAndCompare( + TEST_QUERY_KIND, [], transactional: false, + filters: unIndexedPropertyFilter), + () => testQueryAndCompare( + TEST_QUERY_KIND, [], transactional: true, + filters: unIndexedPropertyFilter), + () => testQueryAndCompare( + TEST_QUERY_KIND, [], transactional: false, + xg: true, filters: unIndexedPropertyFilter), + + // Delete results + () => delete(stringNamedKeys, transactional: true), + + // Wait until the entity deletes are reflected in the indices. + () => sleep(INDEX_UPDATE_DELAY), + + // Make sure queries don't return results + () => testQueryAndCompare( + TEST_QUERY_KIND, [], transactional: false), + () => testQueryAndCompare( + TEST_QUERY_KIND, [], transactional: true), + () => testQueryAndCompare( + TEST_QUERY_KIND, [], transactional: true, xg: true), + () => testQueryAndCompare( + TEST_QUERY_KIND, [], transactional: false, + filters: filters, orders: orders), + ]; + return Future.forEach(tests, (f) => f()); + }); + }); + + // TODO: query by multiple keys, multiple sort oders, ... + }); + + test('ancestor_query', () { + /* + * This test creates an + * RootKind:1 -- This defines the entity group (no entity with that key) + * + SubKind:1 -- This a subpath (no entity with that key) + * + SubSubKind:1 -- This is a real entity of kind SubSubKind + * + SubSubKind2:1 -- This is a real entity of kind SubSubKind2 + */ + var rootKey = new Key.fromParent('RootKind', 1); + var subKey = new Key.fromParent('SubKind', 1, parent: rootKey); + var subSubKey = new Key.fromParent('SubSubKind', 1, parent: subKey); + var subSubKey2 = new Key.fromParent('SubSubKind2', 1, parent: subKey); + var properties = { 'foo' : 'bar' }; + + var entity = new Entity(subSubKey, properties); + var entity2 = new Entity(subSubKey2, properties); + + var orders = [new Order(OrderDirection.Ascending, '__key__')]; + + return datastore.commit(inserts: [entity, entity2]).then((_) { + var futures = [ + () { + return sleep(INDEX_UPDATE_DELAY); + }, + // Test that lookup only returns inserted entities. + () { + return datastore.lookup([rootKey, subKey, subSubKey, subSubKey2]) + .then((List entities) { + expect(entities.length, 4); + expect(entities[0], isNull); + expect(entities[1], isNull); + expect(entities[2], isNotNull); + expect(entities[3], isNotNull); + expect(compareEntity(entity, entities[2]), isTrue); + expect(compareEntity(entity2, entities[3]), isTrue); + }); + }, + + // Query by ancestor. + // - by [rootKey] + () { + var ancestorQuery = + new Query(ancestorKey: rootKey, orders: orders); + return datastore.query(ancestorQuery).then((results) { + expect(results.length, 2); + expect(compareEntity(entity, results[0]), isTrue); + expect(compareEntity(entity2, results[1]), isTrue); + }); + }, + // - by [subKey] + () { + var ancestorQuery = + new Query(ancestorKey: subKey, orders: orders); + return datastore.query(ancestorQuery).then((results) { + expect(results.length, 2); + expect(compareEntity(entity, results[0]), isTrue); + expect(compareEntity(entity2, results[1]), isTrue); + }); + }, + // - by [subSubKey] + () { + var ancestorQuery = new Query(ancestorKey: subSubKey); + return datastore.query(ancestorQuery).then((results) { + expect(results.length, 1); + expect(compareEntity(entity, results[0]), isTrue); + }); + }, + // - by [subSubKey2] + () { + var ancestorQuery = new Query(ancestorKey: subSubKey2); + return datastore.query(ancestorQuery).then((results) { + expect(results.length, 1); + expect(compareEntity(entity2, results[0]), isTrue); + }); + }, + + // Query by ancestor and kind. + // - by [rootKey] + 'SubSubKind' + () { + var query = new Query(ancestorKey: rootKey, kind: 'SubSubKind'); + return datastore.query(query).then((List results) { + expect(results.length, 1); + expect(compareEntity(entity, results[0]), isTrue); + }); + }, + // - by [rootKey] + 'SubSubKind2' + () { + var query = new Query(ancestorKey: rootKey, kind: 'SubSubKind2'); + return datastore.query(query).then((List results) { + expect(results.length, 1); + expect(compareEntity(entity2, results[0]), isTrue); + }); + }, + // - by [subSubKey] + 'SubSubKind' + () { + var query = new Query(ancestorKey: subSubKey, kind: 'SubSubKind'); + return datastore.query(query).then((List results) { + expect(results.length, 1); + expect(compareEntity(entity, results[0]), isTrue); + }); + }, + // - by [subSubKey2] + 'SubSubKind2' + () { + var query = + new Query(ancestorKey: subSubKey2, kind: 'SubSubKind2'); + return datastore.query(query).then((List results) { + expect(results.length, 1); + expect(compareEntity(entity2, results[0]), isTrue); + }); + }, + // - by [subSubKey] + 'SubSubKind2' + () { + var query = + new Query(ancestorKey: subSubKey, kind: 'SubSubKind2'); + return datastore.query(query).then((List results) { + expect(results.length, 0); + }); + }, + // - by [subSubKey2] + 'SubSubKind' + () { + var query = + new Query(ancestorKey: subSubKey2, kind: 'SubSubKind'); + return datastore.query(query).then((List results) { + expect(results.length, 0); + }); + }, + + // Cleanup + () { + return datastore.commit(deletes: [subSubKey, subSubKey2]); + } + ]; + return Future.forEach(futures, (f) => f()).then(expectAsync((_) {})); + }); + }); + + }); + + }); +} diff --git a/pkgs/gcloud/test/datastore/e2e/utils.dart b/pkgs/gcloud/test/datastore/e2e/utils.dart new file mode 100644 index 00000000..32fc4911 --- /dev/null +++ b/pkgs/gcloud/test/datastore/e2e/utils.dart @@ -0,0 +1,97 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +library raw_datastore_test_utils; + +import 'package:gcloud/datastore.dart'; + +const TEST_KIND = 'TestKind'; +const TEST_PROPERTY_KEY_PREFIX = 'test_property'; +const TEST_LIST_PROPERTY = 'listproperty'; +const TEST_LIST_VALUE = 'listvalue'; +const TEST_PROPERTY_VALUE_PREFIX = 'test_property'; + +const TEST_INDEXED_PROPERTY = 'indexedProp'; +const TEST_INDEXED_PROPERTY_VALUE_PREFIX = 'indexedValue'; +const TEST_UNINDEXED_PROPERTY = 'unindexedProp'; +const TEST_BLOB_INDEXED_PROPERTY = 'blobPropertyIndexed'; +final TEST_BLOB_INDEXED_VALUE = new BlobValue([0xaa, 0xaa, 0xff, 0xff]); + + +buildKey(int i, {Function idFunction, String kind : TEST_KIND}) { + return new Key( + [new KeyElement(kind, idFunction == null ? null : idFunction(i))]); +} + +Map buildProperties(int i) { + var listValues = [ + 'foo', + '$TEST_LIST_VALUE$i', + ]; + + return { + TEST_PROPERTY_KEY_PREFIX : '$TEST_PROPERTY_VALUE_PREFIX$i', + TEST_LIST_PROPERTY : listValues, + TEST_INDEXED_PROPERTY : '$TEST_INDEXED_PROPERTY_VALUE_PREFIX$i', + TEST_UNINDEXED_PROPERTY : '$TEST_INDEXED_PROPERTY_VALUE_PREFIX$i', + TEST_BLOB_INDEXED_PROPERTY : TEST_BLOB_INDEXED_VALUE, + }; +} + +List buildKeys( + int from, int to, {Function idFunction, String kind : TEST_KIND}) { + var keys = []; + for (var i = from; i < to; i++) { + keys.add(buildKey(i, idFunction: idFunction, kind: kind)); + } + return keys; +} + +List buildEntities( + int from, int to, {Function idFunction, String kind : TEST_KIND}) { + var entities = []; + var unIndexedProperties = new Set(); + for (var i = from; i < to; i++) { + var key = buildKey(i, idFunction: idFunction, kind: kind); + var properties = buildProperties(i); + unIndexedProperties.add(TEST_UNINDEXED_PROPERTY); + entities.add( + new Entity(key, properties, unIndexedProperties: unIndexedProperties)); + } + return entities; +} + +List buildEntityWithAllProperties( + int from, int to, {String kind : TEST_KIND}) { + var us42 = const Duration(microseconds: 42); + var unIndexed = new Set.from(['blobProperty']); + + Map buildProperties(int i) { + return { + 'boolProperty' : true, + 'intProperty' : 42, + 'doubleProperty' : 4.2, + 'stringProperty' : 'foobar', + 'blobProperty' : new BlobValue([0xff, 0xff, 0xaa, 0xaa]), + 'blobPropertyIndexed' : new BlobValue([0xaa, 0xaa, 0xff, 0xff]), + 'dateProperty' : + new DateTime.fromMillisecondsSinceEpoch(1, isUtc: true).add(us42), + 'keyProperty' : buildKey(1, idFunction: (i) => 's$i', kind: kind), + 'listProperty' : [ + 42, + 4.2, + 'foobar', + buildKey(1, idFunction: (i) => 's$i', kind: 'TestKind'), + ], + }; + } + + var entities = []; + for (var i = from; i < to; i++) { + var key = buildKey(i, idFunction: (i) => 'allprop$i', kind: kind); + var properties = buildProperties(i); + entities.add(new Entity(key, properties, unIndexedProperties: unIndexed)); + } + return entities; +} diff --git a/pkgs/gcloud/test/datastore/error_matchers.dart b/pkgs/gcloud/test/datastore/error_matchers.dart new file mode 100644 index 00000000..2a7e8656 --- /dev/null +++ b/pkgs/gcloud/test/datastore/error_matchers.dart @@ -0,0 +1,50 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +library error_matchers; + +import 'package:unittest/unittest.dart'; +import 'package:gcloud/datastore.dart'; +import 'package:gcloud/db.dart'; + +class _ApplicationError extends TypeMatcher { + const _ApplicationError() : super("ApplicationError"); + bool matches(item, Map matchState) => item is ApplicationError; +} + + +class _DataStoreError extends TypeMatcher { + const _DataStoreError() : super("DataStoreError"); + bool matches(item, Map matchState) => item is DatastoreError; +} + +class _TransactionAbortedError extends TypeMatcher { + const _TransactionAbortedError() : super("TransactionAbortedError"); + bool matches(item, Map matchState) => item is TransactionAbortedError; +} + +class _NeedIndexError extends TypeMatcher { + const _NeedIndexError() : super("NeedIndexError"); + bool matches(item, Map matchState) => item is NeedIndexError; +} + +class _TimeoutError extends TypeMatcher { + const _TimeoutError() : super("TimeoutError"); + bool matches(item, Map matchState) => item is TimeoutError; +} + + +class _IntMatcher extends TypeMatcher { + const _IntMatcher() : super("IntMatcher"); + bool matches(item, Map matchState) => item is int; +} + +const isApplicationError = const _ApplicationError(); + +const isDataStoreError = const _DataStoreError(); +const isTransactionAbortedError = const _TransactionAbortedError(); +const isNeedIndexError = const _NeedIndexError(); +const isTimeoutError = const _TimeoutError(); + +const isInt = const _IntMatcher(); diff --git a/pkgs/gcloud/test/db/e2e/db_test.dart b/pkgs/gcloud/test/db/e2e/db_test.dart new file mode 100644 index 00000000..715a9cc4 --- /dev/null +++ b/pkgs/gcloud/test/db/e2e/db_test.dart @@ -0,0 +1,771 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +library db_test; + +/// NOTE: In order to run these tests, the following datastore indices must +/// exist: +/// $ cat index.yaml +/// indexes: +/// - kind: User +/// ancestor: no +/// properties: +/// - name: name +/// direction: asc +/// - name: nickname +/// direction: desc +/// +/// - kind: User +/// ancestor: no +/// properties: +/// - name: name +/// direction: desc +/// - name: nickname +/// direction: desc +/// +/// - kind: User +/// ancestor: no +/// properties: +/// - name: name +/// direction: desc +/// - name: nickname +/// direction: asc +/// +/// - kind: User +/// ancestor: no +/// properties: +/// - name: language +/// direction: asc +/// - name: name +/// direction: asc +/// +/// - kind: PolyPerson +/// ancestor: no +/// properties: +/// - name: class +/// direction: asc +/// - name: unIndexedName +/// direction: asc +/// +/// - kind: PolyPerson +/// ancestor: no +/// properties: +/// - name: class +/// direction: asc +/// - name: indexedName +/// direction: asc +/// +/// - kind: PolyPerson +/// ancestor: no +/// properties: +/// - name: class +/// direction: asc +/// - name: name +/// direction: asc +/// +/// $ gcloud preview datastore create-indexes . +/// 02:19 PM Host: appengine.google.com +/// 02:19 PM Uploading index definitions. + +import 'dart:async'; + +import 'package:unittest/unittest.dart'; + +import 'package:gcloud/db.dart' as db; + +@db.ModelMetadata(const PersonDesc()) +class Person extends db.Model { + String name; + int age; + db.Key wife; + + operator==(Object other) => sameAs(other); + + sameAs(Object other) { + return other is Person && + id == other.id && + parentKey == other.parentKey && + name == other.name && + age == other.age && + wife == other.wife; + } +} + +@db.ModelMetadata(const UserDesc()) +class User extends Person { + String nickname; + List languages = const []; + + sameAs(Object other) { + if (!(super.sameAs(other) && other is User && nickname == other.nickname)) + return false; + + User user = other; + if (languages == null) { + if (user.languages == null) return true; + return false; + } + if (languages.length != user.languages.length) { + return false; + } + + for (int i = 0; i < languages.length; i++) { + if (languages[i] != user.languages[i]) { + return false; + } + } + return true; + } +} + +class PersonDesc extends db.ModelDescription { + final id = const db.IntProperty(); + final name = const db.StringProperty(); + final age = const db.IntProperty(); + final wife = const db.ModelKeyProperty(); + + const PersonDesc({String kind: 'Person'}) : super(kind); +} + +class UserDesc extends PersonDesc { + final nickname = const db.StringProperty(); + final languages = + const db.StringListProperty(propertyName: 'language'); + const UserDesc({String kind: 'User'}) : super(kind: kind); +} + + +@db.ModelMetadata(const PolyPersonDesc()) +class PolyPerson extends db.PolyModel { + String name; + + // NOTE: There is no need to store these values, we make these two an alias + // for [name]. They are only used for querying. + String get indexedName => name; + String get unIndexedName => name; + set indexedName(String newName) => name = newName; + set unIndexedName(String newName) => name = newName; + + operator==(Object other) => isSame(other); + + isSame(Object other) { + return + other is PolyPerson && + id == other.id && + name == other.name; + } +} + +@db.ModelMetadata(const PolyUserDesc()) +class PolyUser extends PolyPerson { + String nickname; + + isSame(Object other) => + super.isSame(other) && other is PolyUser && nickname == other.nickname; +} + +class PolyPersonDesc extends db.PolyModelDescription { + static String PolyModelName = 'PolyPerson'; + + final id = const db.IntProperty(); + final name = const db.StringProperty(); + final indexedName = const db.StringProperty(indexed: true); + final unIndexedName = const db.StringProperty(indexed: false); + + const PolyPersonDesc() : super(); +} + +class PolyUserDesc extends PolyPersonDesc { + static String PolyModelName = 'PolyUser'; + + final nickname = const db.StringProperty(); + const PolyUserDesc(); +} + + +@db.ModelMetadata(const ExpandoPersonDesc()) +class ExpandoPerson extends db.ExpandoModel { + String name; + String nickname; + + operator==(Object other) { + if (other is ExpandoPerson && id == other.id && name == other.name) { + if (additionalProperties.length != other.additionalProperties.length) { + return false; + } + for (var key in additionalProperties.keys) { + if (additionalProperties[key] != other.additionalProperties[key]) { + return false; + } + } + return true; + } + return false; + } +} + +class ExpandoPersonDesc extends db.ExpandoModelDescription { + final id = const db.IntProperty(); + final name = const db.StringProperty(); + final nickname = const db.StringProperty(propertyName: 'NN'); + + const ExpandoPersonDesc() : super('ExpandoPerson'); +} + + +Future sleep(Duration duration) { + var completer = new Completer(); + new Timer(duration, completer.complete); + return completer.future; +} + +runTests(db.DatastoreDB store) { + void compareModels(List expectedModels, + List models, + {bool anyOrder: false}) { + expect(models.length, equals(expectedModels.length)); + if (anyOrder) { + // Do expensive O(n^2) search. + for (var searchModel in expectedModels) { + bool found = false; + for (var m in models) { + if (m == searchModel) { + found = true; + break; + } + } + expect(found, isTrue); + } + } else { + for (var i = 0; i < expectedModels.length; i++) { + expect(models[i], equals(expectedModels[i])); + } + } + } + + Future testInsertLookupDelete( + List objects, {bool transactional: false, bool xg: false}) { + var keys = objects.map((db.Model obj) => obj.key).toList(); + + if (transactional) { + return store.beginTransaction(crossEntityGroup: xg) + .then((db.Transaction commitTransaction) { + commitTransaction.queueMutations(inserts: objects); + return commitTransaction.commit().then((_) { + return store.beginTransaction(crossEntityGroup: xg) + .then((db.Transaction deleteTransaction) { + return deleteTransaction.lookup(keys).then((List models) { + compareModels(objects, models); + deleteTransaction.queueMutations(deletes: keys); + return deleteTransaction.commit(); + }); + }); + }); + }); + } else { + return store.commit(inserts: objects).then(expectAsync((_) { + return store.lookup(keys).then(expectAsync((List models) { + compareModels(objects, models); + return store.commit(deletes: keys).then(expectAsync((_) { + return store.lookup(keys).then(expectAsync((List models) { + for (var i = 0; i < models.length; i++) { + expect(models[i], isNull); + } + })); + })); + })); + })); + } + } + + group('key', () { + test('equal_and_hashcode', () { + var k1 = store.emptyKey.append(User, id: 10).append(Person, id: 12); + var k2 = store.newPartition(null) + .emptyKey.append(User, id: 10).append(Person, id: 12); + expect(k1, equals(k2)); + expect(k1.hashCode, equals(k2.hashCode)); + }); + }); + + group('e2e_db', () { + group('insert_lookup_delete', () { + test('persons', () { + var root = store.emptyKey; + var persons = []; + for (var i = 1; i <= 10; i++) { + persons.add(new Person() + ..id = i + ..parentKey = root + ..age = 42 + i + ..name = 'user$i'); + } + persons.first.wife = persons.last.key; + return testInsertLookupDelete(persons); + }); + test('users', () { + var root = store.emptyKey; + var users = []; + for (var i = 1; i <= 10; i++) { + users.add(new User() + ..id = i + ..parentKey = root + ..age = 42 + i + ..name = 'user$i' + ..nickname = 'nickname${i%3}'); + } + return testInsertLookupDelete(users); + }); + test('poly_insert', () { + var root = store.emptyKey; + var persons = []; + for (var i = 1; i <= 10; i++) { + persons.add(new PolyPerson() + ..id = i + ..parentKey = root + ..name = 'user$i'); + } + for (var i = 11; i <= 20; i++) { + persons.add(new PolyUser() + ..id = i + ..parentKey = root + ..name = 'user$i' + ..nickname = 'nickname${i%3}'); + } + return testInsertLookupDelete(persons); + }); + test('expando_insert', () { + var root = store.emptyKey; + var expandoPersons = []; + for (var i = 1; i <= 10; i++) { + var expandoPerson = new ExpandoPerson() + ..parentKey = root + ..id = i + ..name = 'user$i'; + expandoPerson.foo = 'foo$i'; + expandoPerson.bar = i; + expect(expandoPerson.additionalProperties['foo'], equals('foo$i')); + expect(expandoPerson.additionalProperties['bar'], equals(i)); + expandoPersons.add(expandoPerson); + } + return testInsertLookupDelete(expandoPersons); + }); + test('transactional_insert', () { + var root = store.emptyKey; + var models = []; + + models.add(new Person() + ..id = 1 + ..parentKey = root + ..age = 1 + ..name = 'user1'); + models.add(new User() + ..id = 2 + ..parentKey = root + ..age = 2 + ..name = 'user2' + ..nickname = 'nickname2'); + models.add(new PolyPerson() + ..id = 3 + ..parentKey = root + ..name = 'user3'); + models.add(new PolyUser() + ..id = 4 + ..parentKey = root + ..name = 'user4' + ..nickname = 'nickname4'); + var expandoPerson = new ExpandoPerson() + ..parentKey = root + ..id = 3 + ..name = 'user1'; + expandoPerson.foo = 'foo1'; + expandoPerson.bar = 2; + + return testInsertLookupDelete(models, transactional: true, xg: true); + }); + + test('parent_key', () { + var root = store.emptyKey; + var users = []; + for (var i = 333; i <= 334; i++) { + users.add(new User() + ..id = i + ..parentKey = root + ..age = 42 + i + ..name = 'user$i' + ..nickname = 'nickname${i%3}'); + } + var persons = []; + for (var i = 335; i <= 336; i++) { + persons.add(new Person() + ..id = i + ..parentKey = root + ..age = 42 + i + ..name = 'person$i'); + } + + // We test that we can insert + lookup + // users[0], (persons[0] + users[0] as parent) + // persons[1], (users[1] + persons[0] as parent) + persons[0].parentKey = users[0].key; + users[1].parentKey = persons[1].key; + + return testInsertLookupDelete([]..addAll(users)..addAll(persons)); + }); + + test('auto_ids', () { + var root = store.emptyKey; + var persons = []; + persons.add(new Person() + ..id = 42 + ..parentKey = root + ..age = 80 + ..name = 'user80'); + // Auto id person with parentKey + persons.add(new Person() + ..parentKey = root + ..age = 81 + ..name = 'user81'); + // Auto id person without parentKey + persons.add(new Person() + ..age = 82 + ..name = 'user82'); + // Auto id person with non-root parentKey + var fatherKey = persons.first.parentKey; + persons.add(new Person() + ..parentKey = fatherKey + ..age = 83 + ..name = 'user83'); + persons.add(new Person() + ..id = 43 + ..parentKey = root + ..age = 84 + ..name = 'user84'); + return store.commit(inserts: persons).then(expectAsync((_) { + // At this point, autoIds are allocated and are relfected in the + // models (as well as parentKey if it was empty). + + var keys = persons.map((db.Model obj) => obj.key).toList(); + + for (var i = 0; i < persons.length; i++) { + expect(persons[i].age, equals(80 + i)); + expect(persons[i].name, equals('user${80 + i}')); + } + + expect(persons[0].id, equals(42)); + expect(persons[0].parentKey, equals(root)); + + expect(persons[1].id, isNotNull); + expect(persons[1].id is int, isTrue); + expect(persons[1].parentKey, equals(root)); + + expect(persons[2].id, isNotNull); + expect(persons[2].id is int, isTrue); + expect(persons[2].parentKey, equals(root)); + + expect(persons[3].id, isNotNull); + expect(persons[3].id is int, isTrue); + expect(persons[3].parentKey, equals(fatherKey)); + + expect(persons[4].id, equals(43)); + expect(persons[4].parentKey, equals(root)); + + expect(persons[1].id != persons[2].id, isTrue); + // NOTE: We can't make assumptions about the id of persons[3], + // because an id doesn't need to be globally unique, only under + // entities with the same parent. + + return store.lookup(keys).then(expectAsync((List models) { + // Since the id/parentKey fields are set after commit and a lookup + // returns new model instances, we can do full model comparision + // here. + compareModels(persons, models); + return store.commit(deletes: keys).then(expectAsync((_) { + return store.lookup(keys).then(expectAsync((List models) { + for (var i = 0; i < models.length; i++) { + expect(models[i], isNull); + } + })); + })); + })); + })); + }); + }); + + test('query', () { + var root = store.emptyKey; + var users = []; + for (var i = 1; i <= 10; i++) { + var languages = []; + if (i == 10) { + languages = ['foo']; + } else if (i == 10) { + languages = ['foo', 'bar']; + } + users.add(new User() + ..id = i + ..parentKey = root + ..age = 42 + i + ..name = 'user$i' + ..nickname = 'nickname${i%3}' + ..languages = languages); + } + + var polyPersons = []; + var polyUsers = []; + for (var i = 1; i <= 10; i++) { + polyPersons.add(new PolyPerson() + ..id = i + ..parentKey = root + ..name = 'person$i'); + } + for (var i = 11; i <= 20; i++) { + polyPersons.add(new PolyUser() + ..id = i + ..parentKey = root + ..name = 'user$i' + ..nickname = 'nickname${i%3}'); + polyUsers.add(polyPersons.last); + } + var sortedPolyPersons = [] + ..addAll(polyPersons) + ..sort((a, b) => a.name.compareTo(b.name)); + + var expandoPersons = []; + for (var i = 1; i <= 3; i++) { + var expandoPerson = new ExpandoPerson() + ..parentKey = root + ..id = i + ..name = 'user$i' + ..nickname = 'nickuser$i'; + expandoPerson.foo = 'foo$i'; + expandoPerson.bar = i; + expect(expandoPerson.additionalProperties['foo'], equals('foo$i')); + expect(expandoPerson.additionalProperties['bar'], equals(i)); + expandoPersons.add(expandoPerson); + } + + var LOWER_BOUND = 'user2'; + + var usersSortedNameDescNicknameAsc = new List.from(users); + usersSortedNameDescNicknameAsc.sort((User a, User b) { + var result = b.name.compareTo(b.name); + if (result == 0) return a.nickname.compareTo(b.nickname); + return result; + }); + + var usersSortedNameDescNicknameDesc = new List.from(users); + usersSortedNameDescNicknameDesc.sort((User a, User b) { + var result = b.name.compareTo(b.name); + if (result == 0) return b.nickname.compareTo(a.nickname); + return result; + }); + + var usersSortedAndFilteredNameDescNicknameAsc = + usersSortedNameDescNicknameAsc.where((User u) { + return LOWER_BOUND.compareTo(u.name) <= 0; + }).toList(); + + var usersSortedAndFilteredNameDescNicknameDesc = + usersSortedNameDescNicknameDesc.where((User u) { + return LOWER_BOUND.compareTo(u.name) <= 0; + }).toList(); + + var fooUsers = users.where( + (User u) => u.languages.contains('foo')).toList(); + var barUsers = users.where( + (User u) => u.languages.contains('bar')).toList(); + + // Note: + // Non-ancestor queries (i.e. queries not lookups) result in index scans. + // The index tables are updated in a "eventually consistent" way. + // + // So this can make tests flaky, if the index updates take longer than the + // following constant. + var INDEX_UPDATE_DELAY = const Duration(seconds: 5); + + var allInserts = [] + ..addAll(users) + ..addAll(polyPersons) + ..addAll(expandoPersons); + var allKeys = allInserts.map((db.Model model) => model.key).toList(); + return store.commit(inserts: allInserts).then((_) { + return sleep(INDEX_UPDATE_DELAY).then((_) { + var tests = [ + // Queries for [Person] return no results, we only have [User] + // objects. + () { + return store.query(Person).run().then((List models) { + compareModels([], models); + }); + }, + + // All users query + () { + return store.query(User).run().then((List models) { + compareModels(users, models, anyOrder: true); + }); + }, + + // Sorted query + () { + return store.query(User) + ..order('-name') + ..order('nickname') + ..run().then((List models) { + compareModels( + usersSortedNameDescNicknameAsc, models, anyOrder: true); + }); + }, + () { + return store.query(User) + ..order('-name') + ..order('-nickname') + ..run().then((List models) { + compareModels( + usersSortedNameDescNicknameDesc, models, anyOrder: true); + }); + }, + + // Sorted query with filter + () { + return store.query(User) + ..filter('name >=', LOWER_BOUND) + ..order('-name') + ..order('-nickname') + ..run().then((List models) { + compareModels(usersSortedAndFilteredNameDescNicknameAsc, + models, anyOrder: true); + }); + }, + () { + return store.query(User) + ..filter('name >=', LOWER_BOUND) + ..order('name') + ..order('-nickname') + ..run().then((List models) { + compareModels(usersSortedAndFilteredNameDescNicknameDesc, + models, anyOrder: true); + }); + }, + + // Filter lists + /* FIXME: TODO: FIXME: "IN" not supported in public proto/apiary */ + () { + return store.query(User) + ..filter('languages IN', ['foo']) + ..order('name') + ..run().then((List models) { + compareModels(fooUsers, models, anyOrder: true); + }); + }, + () { + return store.query(User) + ..filter('languages IN', ['bar']) + ..order('name') + ..run().then((List models) { + compareModels(barUsers, models, anyOrder: true); + }); + }, + // PolyModel queries + () { + return store.query(PolyPerson) + ..run().then((List models) { + // We compare here the query result in [models] to + // *all* persons (i.e. [polyPersons] contains all Person and + // User model objects) + compareModels(polyPersons, models, anyOrder: true); + }); + }, + () { + return store.query(PolyUser) + ..run().then((List models) { + // Here we ensure that [models] contains only Users. + compareModels(polyUsers, models, anyOrder: true); + }); + }, + + // PolyModel indexed/unindexed queries + () { + return store.query(PolyPerson) + ..filter('indexedName =', 'person1') + ..run().then((List models) { + compareModels([polyPersons[0]], models, anyOrder: true); + }); + }, + () { + return store.query(PolyPerson) + ..filter('unIndexedName =', 'person1') + ..run().then((List models) { + compareModels([], models, anyOrder: true); + }); + }, + + // Simple limit/offset test. + () { + return store.query(PolyPerson) + ..order('name') + ..offset(3) + ..limit(10) + ..run().then((List models) { + var expectedModels = sortedPolyPersons.sublist(3, 13); + compareModels(expectedModels, models, anyOrder: true); + }); + }, + + // Expando queries: Filter on normal property. + () { + return store.query(ExpandoPerson) + ..filter('name =', expandoPersons.last.name) + ..run().then((List models) { + compareModels([expandoPersons.last], models); + }); + }, + // Expando queries: Filter on expanded String property + () { + return store.query(ExpandoPerson) + ..filter('foo =', expandoPersons.last.foo) + ..run().then((List models) { + compareModels([expandoPersons.last], models); + }); + }, + // Expando queries: Filter on expanded int property + () { + return store.query(ExpandoPerson) + ..filter('bar =', expandoPersons.last.bar) + ..run().then((List models) { + compareModels([expandoPersons.last], models); + }); + }, + // Expando queries: Filter normal property with different + // propertyName (datastore name is 'NN'). + () { + return store.query(ExpandoPerson) + ..filter('nickname =', expandoPersons.last.nickname) + ..run().then((List models) { + compareModels([expandoPersons.last], models); + }); + }, + + // Delete results + () => store.commit(deletes: allKeys), + + // Wait until the entity deletes are reflected in the indices. + () => sleep(INDEX_UPDATE_DELAY), + + // Make sure queries don't return results + () => store.lookup(allKeys).then((List models) { + expect(models.length, equals(allKeys.length)); + for (var model in models) { + expect(model, isNull); + } + }), + ]; + return Future.forEach(tests, (f) => f()); + }); + }); + }); + }); +} + diff --git a/pkgs/gcloud/test/db/e2e/metamodel_test.dart b/pkgs/gcloud/test/db/e2e/metamodel_test.dart new file mode 100644 index 00000000..77eb6c99 --- /dev/null +++ b/pkgs/gcloud/test/db/e2e/metamodel_test.dart @@ -0,0 +1,97 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +library metamodel_test; + +import 'dart:async'; + +import 'package:unittest/unittest.dart'; + +import 'package:gcloud/datastore.dart'; +import 'package:gcloud/datastore.dart' show Key, Query, Partition; +import 'package:gcloud/db.dart' as db; +import 'package:gcloud/db/metamodel.dart'; + +List buildEntitiesWithDifferentNamespaces() { + newKey(String namespace, String kind, int id) { + var partition = new Partition(namespace); + return new Key([new KeyElement(kind, id)], partition: partition); + } + + newEntity(String namespace, String kind, {int id: 1}) { + return new Entity(newKey(namespace, kind, id), {'ping': 'pong'}); + } + + return [ + newEntity(null, 'NullKind', id: 1), + newEntity(null, 'NullKind', id: 2), + newEntity(null, 'NullKind2', id: 1), + newEntity(null, 'NullKind2', id: 2), + + newEntity('FooNamespace', 'FooKind', id: 1), + newEntity('FooNamespace', 'FooKind', id: 2), + newEntity('FooNamespace', 'FooKind2', id: 1), + newEntity('FooNamespace', 'FooKind2', id: 2), + + newEntity('BarNamespace', 'BarKind', id: 1), + newEntity('BarNamespace', 'BarKind', id: 2), + newEntity('BarNamespace', 'BarKind2', id: 1), + newEntity('BarNamespace', 'BarKind2', id: 2), + ]; +} + +Future sleep(Duration duration) { + var completer = new Completer(); + new Timer(duration, completer.complete); + return completer.future; +} + +runTests(datastore, db.DatastoreDB store) { + // Shorten this name, so we don't have to break lines at 80 chars. + final cond = predicate; + + group('e2e_db_metamodel', () { + test('namespaces__insert_lookup_delete', () { + var entities = buildEntitiesWithDifferentNamespaces(); + var keys = entities.map((e) => e.key).toList(); + + return datastore.commit(inserts: entities).then((_) { + return sleep(const Duration(seconds: 10)).then((_) { + var namespaceQuery = store.query(Namespace); + return namespaceQuery.run().then((List namespaces) { + expect(namespaces.length, 3); + expect(namespaces, contains(cond((ns) => ns.name == null))); + expect(namespaces, + contains(cond((ns) => ns.name == 'FooNamespace'))); + expect(namespaces, + contains(cond((ns) => ns.name == 'BarNamespace'))); + + var futures = []; + for (var namespace in namespaces) { + var partition = store.newPartition(namespace.name); + var kindQuery = store.query(Kind, partition: partition); + futures.add(kindQuery.run().then((List kinds) { + expect(kinds.length, greaterThanOrEqualTo(2)); + if (namespace.name == null) { + expect(kinds, contains(cond((k) => k.name == 'NullKind'))); + expect(kinds, contains(cond((k) => k.name == 'NullKind2'))); + } else if (namespace.name == 'FooNamespace') { + expect(kinds, contains(cond((k) => k.name == 'FooKind'))); + expect(kinds, contains(cond((k) => k.name == 'FooKind2'))); + } else if (namespace.name == 'BarNamespace') { + expect(kinds, contains(cond((k) => k.name == 'BarKind'))); + expect(kinds, contains(cond((k) => k.name == 'BarKind2'))); + } + })); + } + return Future.wait(futures).then((_) { + expect(datastore.commit(deletes: keys), completes); + }); + }); + }); + }); + }); + }); +} + From de3585d7f81cb4ba52e707819d792573e4ca00f0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Wed, 10 Sep 2014 15:58:40 +0200 Subject: [PATCH 008/239] Add a some tests of publish and push events R=kustermann@google.com BUG= Review URL: https://codereview.chromium.org//548483002 --- pkgs/gcloud/lib/pubsub.dart | 3 +- pkgs/gcloud/lib/src/pubsub_impl.dart | 17 ++- pkgs/gcloud/test/pubsub_test.dart | 165 +++++++++++++++++++++++++-- 3 files changed, 169 insertions(+), 16 deletions(-) diff --git a/pkgs/gcloud/lib/pubsub.dart b/pkgs/gcloud/lib/pubsub.dart index fc16db37..ae24da11 100644 --- a/pkgs/gcloud/lib/pubsub.dart +++ b/pkgs/gcloud/lib/pubsub.dart @@ -5,6 +5,7 @@ library gcloud.pubsub; import 'dart:async'; +import 'dart:collection'; import 'dart:convert'; import 'package:crypto/crypto.dart'; import 'package:http/http.dart' as http; @@ -351,7 +352,7 @@ abstract class Message { /// The labels for this message. The values in the Map are either /// Strings or integers. /// - /// TODO: Values can be 64-bit integers. Deal with this for dart2js? + /// Values can be 64-bit integers. Map get labels; } diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart index 3b42cdfe..2ce7b44e 100644 --- a/pkgs/gcloud/lib/src/pubsub_impl.dart +++ b/pkgs/gcloud/lib/src/pubsub_impl.dart @@ -331,6 +331,7 @@ class _PullEventImpl implements PullEvent { /// /// decoded from JSON encoded push HTTP request body. class _PushEventImpl implements PushEvent { + static const PREFIX = '/subscriptions/'; final Message _message; final String _subscriptionName; @@ -341,18 +342,22 @@ class _PushEventImpl implements PushEvent { _PushEventImpl(this._message, this._subscriptionName); factory _PushEventImpl.fromJson(String json) { - var body = JSON.decode(json); - var data = body['message']['data']; - var labels = {}; + Map body = JSON.decode(json); + String data = body['message']['data']; + Map labels = new HashMap(); body['message']['labels'].forEach((label) { var key = label['key']; var value = label['strValue']; if (value == null) value = label['numValue']; labels[key] = value; }); - return new _PushEventImpl( - new _PushMessage(data, labels), - '/subscriptions/' + body['subscription']); + String subscription = body['subscription']; + // TODO(#1): Remove this when the push event subscription name is prefixed + // with '/subscriptions/'. + if (!subscription.startsWith(PREFIX)) { + subscription = PREFIX + subscription; + } + return new _PushEventImpl(new _PushMessage(data, labels), subscription); } } diff --git a/pkgs/gcloud/test/pubsub_test.dart b/pkgs/gcloud/test/pubsub_test.dart index cb025e0f..ffef153e 100644 --- a/pkgs/gcloud/test/pubsub_test.dart +++ b/pkgs/gcloud/test/pubsub_test.dart @@ -1,6 +1,7 @@ import 'dart:async'; import 'dart:convert'; +import 'package:crypto/crypto.dart' as crypto; import 'package:http/http.dart' as http; import 'package:http/testing.dart' as http_testing; import 'package:unittest/unittest.dart'; @@ -880,6 +881,105 @@ main() { group('topic', () { var name = 'test-topic'; var absoluteName = '/topics/$PROJECT/test-topic'; + var message = 'Hello, world!'; + var messageBytes = UTF8.encode(message); + var messageBase64 = crypto.CryptoUtils.bytesToBase64(messageBytes); + var labels = {'a': 1, 'b': 'text'}; + + registerLookup(mock) { + mock.register( + 'GET', new RegExp(r'topics/[a-z/-]*$'), expectAsync((request) { + expect(request.url.path, '${ROOT_PATH}topics/$absoluteName'); + expect(request.body.length, 0); + return mock.respond(new pubsub.Topic()..name = absoluteName); + })); + } + + registerPublish(mock, count, fn) { + mock.register('POST', 'topics/publish', expectAsync((request) { + var publishRequest = + new pubsub.PublishRequest.fromJson(JSON.decode(request.body)); + return fn(publishRequest); + }, count: count)); + } + + test('publish', () { + var mock = new MockClient(); + registerLookup(mock); + + var api = new PubSub(mock, PROJECT); + return api.lookupTopic(name).then(expectAsync((topic) { + mock.clear(); + registerPublish(mock, 4, ((request) { + expect(request.topic, absoluteName); + expect(request.message.data, messageBase64); + expect(request.message.label, isNull); + return mock.respondEmpty(); + })); + + return topic.publishString(message).then(expectAsync((result) { + expect(result, isNull); + return topic.publishBytes(messageBytes).then(expectAsync((result) { + expect(result, isNull); + return topic.publish( + new Message.withString(message)).then(expectAsync((result) { + expect(result, isNull); + return topic.publish( + new Message.withBytes( + messageBytes)).then(expectAsync((result) { + expect(result, isNull); + })); + })); + })); + })); + })); + }); + + test('publish-with-labels', () { + var mock = new MockClient(); + registerLookup(mock); + + var api = new PubSub(mock, PROJECT); + return api.lookupTopic(name).then(expectAsync((topic) { + mock.clear(); + registerPublish(mock, 4, ((request) { + expect(request.topic, absoluteName); + expect(request.message.data, messageBase64); + expect(request.message.label, isNotNull); + expect(request.message.label.length, labels.length); + request.message.label.forEach((label) { + expect(labels.containsKey(label.key), isTrue); + if (label.numValue != null) { + expect(label.strValue, isNull); + expect(labels[label.key], int.parse(label.numValue)); + } else { + expect(label.strValue, isNotNull); + expect(labels[label.key], label.strValue); + } + }); + return mock.respondEmpty(); + })); + + return topic.publishString(message, labels: labels) + .then(expectAsync((result) { + expect(result, isNull); + return topic.publishBytes(messageBytes, labels: labels) + .then(expectAsync((result) { + expect(result, isNull); + return topic.publish( + new Message.withString(message, labels: labels)) + .then(expectAsync((result) { + expect(result, isNull); + return topic.publish( + new Message.withBytes(messageBytes, labels: labels)) + .then(expectAsync((result) { + expect(result, isNull); + })); + })); + })); + })); + })); + }); test('delete', () { var mock = new MockClient(); @@ -945,14 +1045,61 @@ main() { }); group('push', () { - var requestBody = - '{"message":{"data":"SGVsbG8sIHdvcmxkIDMwIG9mIDUwIQ==",' - '"labels":[{"key":"messageNo","numValue":30},' - '{"key":"test","strValue":"hello"}]},' - '"subscription":"sgjesse-managed-vm/test-push-subscription"}'; - var event = new PushEvent.fromJson(requestBody); - expect(event.message.asString, "Hello, world 30 of 50!"); - expect(event.message.labels['messageNo'], 30); - expect(event.message.labels['test'], 'hello'); + var relativeSubscriptionName = 'sgjesse-managed-vm/test-push-subscription'; + var absoluteSubscriptionName = '/subscriptions/$relativeSubscriptionName'; + + test('event', () { + var requestBody = +''' +{ + "message": { + "data":"SGVsbG8sIHdvcmxkIDMwIG9mIDUwIQ==", + "labels": [ + { + "key":"messageNo", + "numValue":30 + }, + { + "key":"test", + "strValue":"hello" + } + ] + }, + "subscription":"$absoluteSubscriptionName" +} +'''; + var event = new PushEvent.fromJson(requestBody); + expect(event.message.asString, "Hello, world 30 of 50!"); + expect(event.message.labels['messageNo'], 30); + expect(event.message.labels['test'], 'hello'); + expect(event.subscriptionName, absoluteSubscriptionName); + }); + + test('event-short-subscription-name', () { + var requestBody = + ''' +{ + "message": { + "data":"SGVsbG8sIHdvcmxkIDMwIG9mIDUwIQ==", + "labels": [ + { + "key":"messageNo", + "numValue":30 + }, + { + "key":"test", + "strValue":"hello" + } + ] + }, + "subscription":"$relativeSubscriptionName" +} +'''; + var event = new PushEvent.fromJson(requestBody); + expect(event.message.asString, "Hello, world 30 of 50!"); + expect(event.message.labels['messageNo'], 30); + expect(event.message.labels['test'], 'hello'); + expect(event.subscriptionName, absoluteSubscriptionName); + }); }); } From fab30cf776ec98778ff5152198525f3659c48898 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Wed, 10 Sep 2014 16:00:51 +0200 Subject: [PATCH 009/239] Remove pub dependency overrides R=kustermann@google.com BUG= Review URL: https://codereview.chromium.org//557323002 --- pkgs/gcloud/pubspec.yaml | 5 ----- 1 file changed, 5 deletions(-) diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 7768df81..b6a292a9 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -9,8 +9,3 @@ dependencies: googleapis_beta: '>=0.1.0 <0.2.0' dev_dependencies: unittest: '>=0.11.0 <0.12.0' -dependency_overrides: - googleapis_auth: - path: ../googleapis_auth - googleapis_beta: - path: ../googleapis/generated/googleapis_beta From 1b87a69b210f91adc03fd0d2ba12bff094dc1591 Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Wed, 17 Sep 2014 13:55:09 +0200 Subject: [PATCH 010/239] Prepare for package waterfall: Add status file, disable dart2js transformer R=ricow@google.com Review URL: https://codereview.chromium.org//580633004 --- pkgs/gcloud/.status | 12 ++++++++++++ pkgs/gcloud/pubspec.yaml | 4 ++++ 2 files changed, 16 insertions(+) create mode 100644 pkgs/gcloud/.status diff --git a/pkgs/gcloud/.status b/pkgs/gcloud/.status new file mode 100644 index 00000000..7f29452f --- /dev/null +++ b/pkgs/gcloud/.status @@ -0,0 +1,12 @@ +*/packages/*: Skip +*/*/packages/*: Skip +*/*/*/packages/*: Skip + +build/test/datastore/e2e/*: Skip +build/test/db/e2e/*: Skip + +test/datastore/e2e/*: Skip +test/db/e2e/*: Skip + +[ $compiler == dart2js ] +*: Skip diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index b6a292a9..6946ee7f 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -9,3 +9,7 @@ dependencies: googleapis_beta: '>=0.1.0 <0.2.0' dev_dependencies: unittest: '>=0.11.0 <0.12.0' + +transformers: +- $dart2js: + $include: [] From 2b0cc287ece9e83793279b5c0870409da6569d56 Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Mon, 29 Sep 2014 15:09:15 +0200 Subject: [PATCH 011/239] Add documentation to low-level Datastore API R=sgjesse@google.com Review URL: https://codereview.chromium.org//604253002 --- pkgs/gcloud/lib/datastore.dart | 168 +++++++++++++++++- pkgs/gcloud/lib/src/db/annotations.dart | 26 +++ pkgs/gcloud/lib/src/db/model_db.dart | 2 + pkgs/gcloud/lib/src/db/model_description.dart | 4 + pkgs/gcloud/lib/src/db/properties.dart | 18 +- 5 files changed, 213 insertions(+), 5 deletions(-) diff --git a/pkgs/gcloud/lib/datastore.dart b/pkgs/gcloud/lib/datastore.dart index e8e13160..578e7979 100644 --- a/pkgs/gcloud/lib/datastore.dart +++ b/pkgs/gcloud/lib/datastore.dart @@ -2,6 +2,11 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +/// This library provides a low-level API for accessing Google's Cloud +/// Datastore. +/// +/// For more information on Cloud Datastore, please refer to the following +/// developers page: https://cloud.google.com/datastore/docs library gcloud.datastore; import 'dart:async'; @@ -35,6 +40,10 @@ class TimeoutError extends DatastoreError { TimeoutError() : super("The operation timed out."); } +/// Thrown when a query would require an index which was not set. +/// +/// An application needs to specify indices in a `index.yaml` file and needs to +/// create indices using the `gcloud preview datastore create-indexes` command. class NeedIndexError extends DatastoreError { NeedIndexError() : super("An index is needed for the query to succeed."); @@ -52,7 +61,23 @@ class QuotaExceededError extends DatastoreError { QuotaExceededError(error) : super("Quota was exceeded ($error)."); } - +/// A datastore Entity +/// +/// An entity is identified by a unique `key` and consists of a number of +/// `properties`. If a property should not be indexed, it needs to be included +/// in the `unIndexedProperties` set. +/// +/// The `properties` field maps names to values. Values can be of a primitive +/// type or of a composed type. +/// +/// The following primitive types are supported: +/// bool, int, double, String, DateTime, BlobValue, Key +/// +/// It is possible to have a `List` of values. The values must be primitive. +/// Lists inside lists are not supported. +/// +/// Whether a property is indexed or not applies to all values (this is only +/// relevant if the value is a list of primitive values). class Entity { final Key key; final Map properties; @@ -61,8 +86,28 @@ class Entity { Entity(this.key, this.properties, {this.unIndexedProperties}); } +/// A complete or partial key. +/// +/// A key can uniquely identifiy a datastore `Entity`s. It consists of a +/// partition and path. The path consists of one or more `KeyElement`s. +/// +/// A key may be incomplete. This is usesfull when inserting `Entity`s which IDs +/// should be automatically allocated. +/// +/// Example of a fully populated [Key]: +/// +/// var fullKey = new Key([new KeyElement('Person', 1), +/// new KeyElement('Address', 2)]); +/// +/// Example of a partially populated [Key] / an imcomplete [Key]: +/// +/// var partialKey = new Key([new KeyElement('Person', 1), +/// new KeyElement('Address', null)]); class Key { + /// The partition of this `Key`. final Partition partition; + + /// The path of `KeyElement`s. final List elements; Key(this.elements, {Partition partition}) @@ -103,9 +148,17 @@ class Key { } } +/// A datastore partition. +/// +/// A partition is used for partitioning a dataset into multiple namespaces. +/// The default namespace is `null`. Using empty Strings as namespaces is +/// invalid. +/// +/// TODO(Issue #6): Add dataset-id here. class Partition { static const Partition DEFAULT = const Partition._default(); + /// The namespace of this partition. final String namespace; Partition(this.namespace) { @@ -122,9 +175,17 @@ class Partition { other is Partition && namespace == other.namespace; } +/// An element in a `Key`s path. class KeyElement { + /// The kind of this element. final String kind; - final id; // either int or string + + /// The ID of this element. It must be either an `int` or a `String. + /// + /// This may be `null`, in which case it does not identify an Entity. It is + /// possible to insert [Entity]s with incomplete keys and let Datastore + /// automatically select a unused integer ID. + final id; KeyElement(this.kind, this.id) { if (kind == null) { @@ -145,6 +206,7 @@ class KeyElement { String toString() => "$kind.$id"; } +/// A relation used in query filters. class FilterRelation { static const FilterRelation LessThan = const FilterRelation._('<'); static const FilterRelation LessThanOrEqual = const FilterRelation._('<='); @@ -161,69 +223,167 @@ class FilterRelation { String toString() => name; } +/// A filter used in queries. class Filter { + /// The relation used for comparing `name` with `value`. final FilterRelation relation; + + /// The name of the datastore property used in the comparision. final String name; + + /// The value used for comparing against the property named by `name`. final Object value; Filter(this.relation, this.name, this.value); } +/// The direction of a order. +/// +/// TODO(Issue #6): Make this class Private and add the two statics to the +/// 'Order' class. +/// [i.e. so one can write Order.Ascending, Order.Descending]. class OrderDirection { static const OrderDirection Ascending = const OrderDirection._('Ascending'); static const OrderDirection Decending = const OrderDirection._('Decending'); final String name; + const OrderDirection._(this.name); } +/// A order used in queries. class Order { + /// The direction of the order. final OrderDirection direction; + + /// The name of the property used for the order. final String propertyName; + /// TODO(Issue #6): Make [direction] the second argument and make it optional. Order(this.direction, this.propertyName); } +/// A datastore query. +/// +/// A query consists of filters (kind, ancestor and property filters), one or +/// more orders and a offset/limit pair. +/// +/// All fields may be optional. +/// +/// Example of building a [Query]: +/// var person = ....; +/// var query = new Query(ancestorKey: personKey, kind: 'Address') class Query { + /// Restrict the result set to entities of this kind. final String kind; + + /// Restrict the result set to entities which have this ancestorKey / parent. final Key ancestorKey; + + /// Restrict the result set by a list of property [Filter]s. final List filters; + + /// Order the matching entities following the given property [Order]s. final List orders; + + /// Skip the first [offset] entities in the result set. final int offset; + + /// Limit the number of entities returned to [limit]. final int limit; Query({this.ancestorKey, this.kind, this.filters, this.orders, this.offset, this.limit}); } +/// The result of a commit. class CommitResult { + /// If the commit included `autoIdInserts`, this list will be the fully + /// populated Keys, including the automatically allocated integer IDs. final List autoIdInsertKeys; CommitResult(this.autoIdInsertKeys); } +/// A blob value which can be used as a property value in `Entity`s. class BlobValue { + /// The binary data of this blob. final List bytes; + BlobValue(this.bytes); } +/// An opaque token returned by the `beginTransaction` method of a [Datastore]. +/// +/// This token can be passed to the `commit` and `lookup` calls if they should +/// operate within this transaction. abstract class Transaction { } +/// Interface used to talk to the Google Cloud Datastore service. +/// +/// It can be used to insert/update/delete [Entity]s, lookup/query [Entity]s +/// and allocate IDs from the auto ID allocation policy. abstract class Datastore { + /// Allocate integer IDs for the partially populated [keys] given as argument. + /// + /// The returned [Key]s will be fully populated with the allocated IDs. Future> allocateIds(List keys); + /// Starts a new transaction and returns an opaque value representing it. + /// + /// If [crossEntityGroup] is `true`, the transaction can work on up to 5 + /// entity groups. Otherwise the transaction will be limited to only operate + /// on a single entity group. Future beginTransaction({bool crossEntityGroup: false}); - // Can throw a [TransactionAbortedError] error. + /// Make modifications to the datastore. + /// + /// - `inserts` are [Entity]s which have a fully populated [Key] and should + /// be either added to the datastore or updated. + /// + /// - `autoIdInserts` are [Entity]s which do not have a fully populated [Key] + /// and should be added to the dataset, automatically assiging integer IDs. + /// The returned [CommitResult] will contain the fuly populated keys. + /// + /// - `deletes` are a list of fully populated [Key]s which uniquely identify + /// the [Entity]s which should be deleted. + /// + /// If a [transaction] is given, all modifications will be done within that + /// transaction. + /// + /// This method might complete with a [TransactionAbortedError] error. + /// Users must take care of retrying transactions. + /// TODO(Issue #6): Consider splitting `inserts` into insert/update/upsert. Future commit({List inserts, List autoIdInserts, List deletes, Transaction transaction}); + + /// Roll a started transaction back. Future rollback(Transaction transaction); + /// Looks up the fully populated [keys] in the datastore and returns either + /// the [Entity] corresponding to the [Key] or `null`. The order in the + /// returned [Entity]s is the same as in [keys]. + /// + /// If a [transaction] is given, the lookup will be within this transaction. Future> lookup(List keys, {Transaction transaction}); - // TODO: Make this pageable. + /// Runs a query on the dataset and returns matching [Entity]s. + /// + /// - `query` is used to restrict the number of returned [Entity]s and may + /// may specify an order. + /// + /// - `partition` can be used to specify the namespace used for the lookup. + /// + /// If a [transaction] is given, the query will be within this transaction. + /// But note that arbitrary queries within a transaction are not possible. + /// A transaction is limited to a very small number of entity groups. Usually + /// queries with transactions are restricted by providing an ancestor filter. + /// + /// Outside of transactions, the result set might be stale. Queries are by + /// default eventually consistent. + /// TODO(Issue #6): Make this pageable. Future> query( Query query, {Partition partition, Transaction transaction}); } diff --git a/pkgs/gcloud/lib/src/db/annotations.dart b/pkgs/gcloud/lib/src/db/annotations.dart index 3ec409bc..91c20cc7 100644 --- a/pkgs/gcloud/lib/src/db/annotations.dart +++ b/pkgs/gcloud/lib/src/db/annotations.dart @@ -4,7 +4,33 @@ part of gcloud.db; +/// This class should be used to annotate DB Model classes. +/// +/// It will attach a description on how to map dart Objects to Datastore +/// Entities. +/// +/// Note that the model class needs to have an empty default constructor. +/// +/// Here is an example of a Dart Model class and a ModelScription which +/// describes the mapping. +/// +/// @ModelMetadata(const PersonDesc()) +/// class Person extends Model { +/// String name; +/// DateTime dateOfBirth; +/// } +/// +/// class PersonDesc extends ModelDescription { +/// final id = const IntProperty(); +/// +/// final name = const StringProperty(); +/// final dateOfBirth = const DateTimeProperty(); +/// +/// const GreetingDesc() : super('Person'); +/// } +/// class ModelMetadata { final ModelDescription description; + const ModelMetadata(this.description); } diff --git a/pkgs/gcloud/lib/src/db/model_db.dart b/pkgs/gcloud/lib/src/db/model_db.dart index 362a6168..6b8d51d4 100644 --- a/pkgs/gcloud/lib/src/db/model_db.dart +++ b/pkgs/gcloud/lib/src/db/model_db.dart @@ -11,6 +11,8 @@ part of gcloud.db; /** * Represents an in-memory database of all model classes and it's corresponding * [ModelDescriptions]s. + * + * NOTE: This is an internal class and should not be used by normal users. */ class ModelDB { // Map of properties for a given [ModelDescription] diff --git a/pkgs/gcloud/lib/src/db/model_description.dart b/pkgs/gcloud/lib/src/db/model_description.dart index 24b38cdb..f060fb28 100644 --- a/pkgs/gcloud/lib/src/db/model_description.dart +++ b/pkgs/gcloud/lib/src/db/model_description.dart @@ -4,6 +4,10 @@ part of gcloud.db; +/// Subclasses of [ModelDescription] describe how to map a dart model object +/// to a Datastore Entity. +/// +/// Please see [ModelMetadata] for an example on how to use them. abstract class ModelDescription { static String ID_FIELDNAME = 'id'; diff --git a/pkgs/gcloud/lib/src/db/properties.dart b/pkgs/gcloud/lib/src/db/properties.dart index 1e337444..5f55ca9c 100644 --- a/pkgs/gcloud/lib/src/db/properties.dart +++ b/pkgs/gcloud/lib/src/db/properties.dart @@ -4,10 +4,26 @@ part of gcloud.db; +/// Describes a property of an Entity. +/// +/// Please see [ModelMetadata] for an example on how to use them. abstract class Property { - // The name in the ModelClass is used if [propertyName] is null! + /// The name of the property. + /// + /// If it is `null`, the name will be the same as used in the + /// [ModelDescription]. final String propertyName; + + /// Specifies whether this property is required or not. + /// + /// If required is `true`, it will be enforced when saving model objects to + /// the datastore and when retrieving them. final bool required; + + /// Specifies whether this property should be indexed or not. + /// + /// When running queries no this property, it is necessary to set [indexed] to + /// `true`. final bool indexed; const Property({this.propertyName, this.required: false, this.indexed: true}); From 15c3c640d1a5849d1ad78392de04ebc1a1522e36 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Fri, 24 Oct 2014 11:46:48 +0200 Subject: [PATCH 012/239] Initial implementation of the gcloud Cloud Storage API R=kustermann@google.com, lrn@google.com BUG= Review URL: https://codereview.chromium.org//581373002 --- pkgs/gcloud/lib/src/storage_impl.dart | 538 +++++++++++++++++++++++++ pkgs/gcloud/lib/storage.dart | 384 ++++++++++++++++++ pkgs/gcloud/pubspec.yaml | 6 +- pkgs/gcloud/test/common.dart | 178 +++++++++ pkgs/gcloud/test/pubsub_test.dart | 147 ++----- pkgs/gcloud/test/storage_test.dart | 546 ++++++++++++++++++++++++++ 6 files changed, 1693 insertions(+), 106 deletions(-) create mode 100644 pkgs/gcloud/lib/src/storage_impl.dart create mode 100644 pkgs/gcloud/lib/storage.dart create mode 100644 pkgs/gcloud/test/common.dart create mode 100644 pkgs/gcloud/test/storage_test.dart diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart new file mode 100644 index 00000000..07d0216a --- /dev/null +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -0,0 +1,538 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +part of gcloud.storage; + +const String _ABSOLUTE_PREFIX = 'gs://'; +const String _DIRECTORY_DELIMITER = 'gs://'; + +/// Representation of an absolute name consisting of bucket name and object +/// name. +class _AbsoluteName { + String bucketName; + String objectName; + + _AbsoluteName.parse(String absoluteName) { + if (!absoluteName.startsWith(_ABSOLUTE_PREFIX)) { + throw new FormatException("Absolute name '$absoluteName' does not start " + "with '$_ABSOLUTE_PREFIX'"); + } + int index = absoluteName.indexOf('/', _ABSOLUTE_PREFIX.length); + if (index == -1 || index == _ABSOLUTE_PREFIX.length) { + throw new FormatException("Absolute name '$absoluteName' does not have " + "a bucket name"); + } + if (index == absoluteName.length - 1) { + throw new FormatException("Absolute name '$absoluteName' does not have " + "an object name"); + } + bucketName = absoluteName.substring(_ABSOLUTE_PREFIX.length, index); + objectName = absoluteName.substring(index + 1); + } +} + +/// Storage API implementation providing access to buckets. +class _StorageImpl implements Storage { + final String project; + final storage.StorageApi _api; + + _StorageImpl(client, this.project) + : _api = new storage.StorageApi(client); + + Future createBucket(String bucketName, {BucketAcl acl}) { + var bucket = new storage.Bucket()..name = bucketName; + var predefinedAcl; + if (acl != null) { + assert(acl.isPredefined); + predefinedAcl = acl._predefined; + } + return _api.buckets.insert(bucket, + project, + predefinedAcl: predefinedAcl) + .then((bucket) => null); + } + + Future deleteBucket(String bucketName) { + return _api.buckets.delete(bucketName); + } + + Bucket bucket(String bucketName, {ObjectAcl defaultObjectAcl}) { + return new _BucketImpl(this, bucketName, defaultObjectAcl); + } + + Future bucketExists(String bucketName) { + notFoundError(e) => e is common.DetailedApiRequestError && e.status == 404; + return _api.buckets.get(bucketName) + .then((_) => true) + .catchError((e) => false, test: notFoundError); + + } + + Future bucketInfo(String bucketName) { + return _api.buckets.get(bucketName) + .then((bucket) => new _BucketInformationImpl(bucket)); + } + + Stream listBucketNames() { + Future> firstPage(pageSize) { + return _listBuckets(pageSize, null) + .then((response) => new _BucketPageImpl(this, pageSize, response)); + } + return new StreamFromPages(firstPage).stream; + } + + Future> pageBucketNames({int pageSize: 50}) { + return _listBuckets(pageSize, null).then((response) { + return new _BucketPageImpl(this, pageSize, response); + }); + } + + Future copyObject(String src, String dest) { + var srcName = new _AbsoluteName.parse(src); + var destName = new _AbsoluteName.parse(dest); + return _api.objects.copy(null, + srcName.bucketName, srcName.objectName, + destName.bucketName, destName.objectName) + .then((_) => null); + } + + Future _listBuckets(int pageSize, String nextPageToken) { + return _api.buckets.list( + project, + maxResults: pageSize, + pageToken: nextPageToken); + } +} + +class _BucketInformationImpl implements BucketInfo { + storage.Bucket _bucket; + + _BucketInformationImpl(this._bucket); + + String get bucketName => _bucket.name; + + DateTime get created => _bucket.timeCreated; +} + +/// Bucket API implementation providing access to objects. +class _BucketImpl implements Bucket { + final storage.StorageApi _api; + ObjectAcl _defaultObjectAcl; + final String bucketName; + + _BucketImpl(_StorageImpl storage, this.bucketName, this._defaultObjectAcl) : + this._api = storage._api; + + String absoluteObjectName(String objectName) { + return '${_ABSOLUTE_PREFIX}$bucketName/$objectName'; + } + + StreamSink> write( + String objectName, + {int length, ObjectMetadata metadata, String contentType}) { + storage.Object object; + if (metadata == null) { + metadata = new _ObjectMetadata(contentType: contentType); + } else if (contentType != null) { + metadata = metadata.replace(contentType: contentType); + } + object = (metadata as _ObjectMetadata)._object; + + // Fill properties not passed in metadata. + object.name = objectName; + + var sink = new _MediaUploadStreamSink( + _api, bucketName, objectName, object, length); + return sink; + } + + Future writeBytes( + String objectName, List bytes, + {ObjectMetadata metadata, String contentType}) { + var sink = write(objectName, length: bytes.length, + metadata: metadata, contentType: contentType); + sink.add(bytes); + return sink.close(); + } + + Stream read(String objectName, {int offset: 0, int length}) { + var controller = new StreamController(); + _api.objects.get( + bucketName, + objectName, + downloadOptions: common.DownloadOptions.FullMedia).then( + (media) => media.stream.pipe(controller.sink)); + return controller.stream; + } + + Future info(String objectName) { + return _api.objects.get(bucketName, objectName) + .then((object) => new _ObjectStatImpl(object)); + } + + Stream list({String prefix}) { + Future> firstPage(pageSize) { + return _listObjects(bucketName, prefix, _DIRECTORY_DELIMITER, 50, null) + .then((response) => new _ObjectPageImpl( + this, prefix, pageSize, response)); + } + return new StreamFromPages(firstPage).stream; + } + + Future> page({String prefix, int pageSize: 50}) { + return _listObjects( + bucketName, prefix, _DIRECTORY_DELIMITER, pageSize, null) + .then((response) { + return new _ObjectPageImpl(this, prefix, pageSize, response); + }); + } + + Future updateMetadata(String objectName, ObjectMetadata metadata) { + // TODO: support other ObjectMetadata implementations? + _ObjectMetadata md = metadata; + var object = md._object; + if (md._predefined == null && _defaultObjectAcl == null) { + throw new ArgumentError('ACL is required for update'); + } + if (md.contentType == null) { + throw new ArgumentError('Content-Type is required for update'); + } + var acl = md._predefined != null ? md._predefined._predefined + : _defaultObjectAcl._predefined; + return _api.objects.update( + object, bucketName, objectName, predefinedAcl: acl); + } + + Future _listObjects( + String bucketName, String prefix, String delimiter, + int pageSize, String nextPageToken) { + return _api.objects.list( + bucketName, + prefix: prefix, + delimiter: delimiter, + maxResults: pageSize, + pageToken: nextPageToken); + } +} + +class _BucketPageImpl implements Page { + final _StorageImpl _storage; + final int _pageSize; + final String _nextPageToken; + final List items; + + _BucketPageImpl(this._storage, this._pageSize, storage.Buckets response) + : items = new List(response.items != null ? response.items.length : 0), + _nextPageToken = response.nextPageToken { + for (int i = 0; i < items.length; i++) { + items[i] = response.items[i].name; + } + } + + bool get isLast => _nextPageToken == null; + + Future> next({int pageSize}) { + if (isLast) return new Future.value(null); + if (pageSize == null) pageSize = this._pageSize; + + return _storage._listBuckets(pageSize, _nextPageToken).then((response) { + return new _BucketPageImpl(_storage, pageSize, response); + }); + } +} + +class _ObjectPageImpl implements Page { + final _BucketImpl _bucket; + final String _prefix; + final int _pageSize; + final String _nextPageToken; + final List items; + + _ObjectPageImpl( + this._bucket, this._prefix, this._pageSize, + storage.Objects response) + : items = new List( + (response.items != null ? response.items.length : 0) + + (response.prefixes != null ? response.prefixes.length : 0)), + _nextPageToken = response.nextPageToken { + var prefixes = 0; + if (response.prefixes != null) { + for (int i = 0; i < response.prefixes.length; i++) { + items[i] = new BucketEntry._directory(response.prefixes[i]); + } + prefixes = response.prefixes.length; + } + if (response.items != null) { + for (int i = 0; i < response.items.length; i++) { + items[prefixes + i] = new BucketEntry._object(response.items[i].name); + } + } + } + + bool get isLast => _nextPageToken == null; + + Future> next({int pageSize}) { + if (isLast) return new Future.value(null); + if (pageSize == null) pageSize = this._pageSize; + + return _bucket._listObjects( + _bucket.bucketName, + _prefix, + _DIRECTORY_DELIMITER, + pageSize, + _nextPageToken).then((response) { + return new _ObjectPageImpl( + _bucket, _prefix, pageSize, response); + }); + } +} + +class _ObjectGenerationImpl implements ObjectGeneration { + final String objectGeneration; + final int metaGeneration; + + _ObjectGenerationImpl(this.objectGeneration, this.metaGeneration); +} + +class _ObjectStatImpl implements ObjectInfo { + storage.Object _object; + Uri _downloadLink; + ObjectGeneration _generation; + ObjectMetadata _metadata; + + _ObjectStatImpl(object) : + _object = object, _metadata = new _ObjectMetadata._(object); + + String get name => _object.name; + + int get size => int.parse(_object.size); + + DateTime get updated => _object.updated; + + List get md5Hash => + crypto.CryptoUtils.base64StringToBytes(_object.md5Hash); + + int get crc32CChecksum => int.parse(_object.crc32c); + + Uri get downloadLink { + if (_downloadLink == null) { + _downloadLink = Uri.parse(_object.mediaLink); + } + return _downloadLink; + } + + ObjectGeneration get generation { + if (_generation == null) { + _generation = new _ObjectGenerationImpl( + _object.generation, int.parse(_object.metageneration)); + } + return _generation; + } + + /// Additional metadata. + ObjectMetadata get metadata => _metadata; +} + +class _ObjectMetadata implements ObjectMetadata { + storage.Object _object; + ObjectAcl _predefined; + + _ObjectMetadata({ObjectAcl acl, + String contentType, + String contentEncoding, + String cacheControl, + String contentDisposition, + String contentLanguage, + Map custom}) { + _object = new storage.Object(); + _predefined = acl; // Only canned ACLs supported. + _object.contentType = contentType; + _object.contentEncoding = contentEncoding; + _object.cacheControl = cacheControl; + _object.contentDisposition = contentDisposition; + _object.contentLanguage = contentLanguage; + if (custom != null) _object.metadata = custom; + } + + _ObjectMetadata._(this._object); + + set acl(ObjectAcl value) => _predefined = value; + + String get contentType => _object.contentType; + set contentType(String value) => _object.contentType = value; + + String get contentEncoding => _object.contentEncoding; + set contentEncoding(String value) => _object.contentEncoding = value; + + String get cacheControl => _object.cacheControl; + set cacheControl(String value) => _object.cacheControl = value; + + String get contentDisposition => _object.contentDisposition; + set contentDisposition(String value) => _object.contentDisposition = value; + + String get contentLanguage => _object.contentLanguage; + set contentLanguage(String value) => _object.contentLanguage = value; + + Map get custom => _object.metadata; + set custom(Map value) => _object.metadata = value; + + ObjectMetadata replace({ObjectAcl acl, + String contentType, + String contentEncoding, + String cacheControl, + String contentDisposition, + String contentLanguage, + Map custom}) { + return new _ObjectMetadata( + acl: acl != null ? acl : _predefined, + contentType: contentType != null ? contentType : this.contentType, + contentEncoding: contentEncoding != null ? contentEncoding + : this.contentEncoding, + cacheControl: cacheControl != null ? cacheControl : this.cacheControl, + contentDisposition: contentDisposition != null ? contentDisposition + : this.contentEncoding, + contentLanguage: contentLanguage != null ? contentLanguage + : this.contentEncoding, + custom: custom != null ? custom : this.custom); + } +} + +/// Implementation of StreamSink which handles Google media upload. +/// It provides a StreamSink and logic which selects whether to use normal +/// media upload (multipart mime) or resumable media upload. +class _MediaUploadStreamSink implements StreamSink> { + static const int _DEFAULT_MAX_NORMAL_UPLOAD_LENGTH = 1024 * 1024; + final storage.StorageApi _api; + final String _bucketName; + final String _objectName; + final storage.Object _object; + final int _length; + final int _maxNormalUploadLength; + int _bufferLength = 0; + final List> buffer = new List>(); + final StreamController _controller = new StreamController(sync: true); + StreamSubscription _subscription; + StreamController _resumableController; + final _doneCompleter = new Completer(); + + static const int _STATE_LENGTH_KNOWN = 0; + static const int _STATE_PROBING_LENGTH = 1; + static const int _STATE_DECIDED_RESUMABLE = 2; + int _state; + + _MediaUploadStreamSink( + this._api, this._bucketName, this._objectName, this._object, this._length, + [this._maxNormalUploadLength = _DEFAULT_MAX_NORMAL_UPLOAD_LENGTH]) { + if (_length != null) { + // If the length is known in advance decide on the upload strategy + // immediately + _state = _STATE_LENGTH_KNOWN; + if (_length <= _maxNormalUploadLength) { + _startNormalUpload(_controller.stream, _length); + } else { + _startResumableUpload(_controller.stream, _length); + } + } else { + _state = _STATE_PROBING_LENGTH; + // If the length is not known in advance decide on the upload strategy + // later. Start buffering until enough data has been read to decide. + _subscription = _controller.stream.listen( + _onData, onDone: _onDone, onError: _onError); + } + } + + void add(List event) { + _controller.add(event); + } + + void addError(errorEvent, [StackTrace stackTrace]) { + _controller.addError(errorEvent, stackTrace); + } + + Future addStream(Stream> stream) { + return _controller.addStream(stream); + } + + Future close() { + _controller.close(); + return _doneCompleter.future; + } + + Future get done => _doneCompleter.future; + + _onData(List data) { + assert(_state != _STATE_LENGTH_KNOWN); + if (_state == _STATE_PROBING_LENGTH) { + buffer.add(data); + _bufferLength += data.length; + if (_bufferLength > _maxNormalUploadLength) { + // Start resumable upload. + // TODO: Avoid using another stream-controller. + _resumableController = new StreamController(sync: true); + buffer.forEach(_resumableController.add); + var media = new common.Media(_resumableController.stream, null); + _startResumableUpload(_resumableController.stream, _length); + _state = _STATE_DECIDED_RESUMABLE; + } + } else { + assert(_state == _STATE_DECIDED_RESUMABLE); + _resumableController.add(data); + } + } + + _onDone() { + if (_state == _STATE_PROBING_LENGTH) { + // As the data is already cached don't bother to wait on somebody + // listening on the stream before adding the data. + var controller = new StreamController(); + buffer.forEach(controller.add); + controller.close(); + _startNormalUpload(controller.stream, _bufferLength); + } else { + _resumableController.close(); + } + } + + _onError(e, s) { + // If still deciding on the strategy complete with error. Otherwise + // forward the error for default processing. + if (_state == _STATE_PROBING_LENGTH) { + _completeError(e, s); + } else { + _resumableController.addError(e, s); + } + } + + _completeError(e, s) { + if (_state != _STATE_LENGTH_KNOWN) { + // Always cancel subscription on error. + _subscription.cancel(); + } + _doneCompleter.completeError(e, s); + } + + void _startNormalUpload(Stream stream, int length) { + var media = new common.Media(stream, length); + _api.objects.insert(_object, + _bucketName, + name: _objectName, + uploadMedia: media, + uploadOptions: common.UploadOptions.Default) + .then((response) { + _doneCompleter.complete(new _ObjectStatImpl(response)); + }, onError: _completeError); + } + + void _startResumableUpload(Stream stream, int length) { + var media = new common.Media(stream, length); + _api.objects.insert(_object, + _bucketName, + name: _objectName, + uploadMedia: media, + uploadOptions: common.UploadOptions.Resumable) + .then((response) { + _doneCompleter.complete(new _ObjectStatImpl(response)); + }, onError: _completeError); + } +} diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart new file mode 100644 index 00000000..1b1e9b44 --- /dev/null +++ b/pkgs/gcloud/lib/storage.dart @@ -0,0 +1,384 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +/// This library provides access to Google Cloud Storage. +/// +/// Google Cloud Storage is an object store for binary objects. Each +/// object has a set of metadata attached to it. For more information on +/// Google Cloud Sorage see https://developers.google.com/storage/. +/// +/// There are two main concepts in Google Cloud Storage: Buckets and Objects. +/// A bucket is a container for objects and objects are the actual binary +/// objects. +/// +/// The API has two main classes for dealing with buckets and objects. +/// +/// The class `Storage` is the main API class providing access to working +/// with buckets. This is the 'bucket service' interface. +/// +/// The class `Bucket` provide access to working with objcts in a specific +/// bucket. This is the 'object service' interface. +/// +/// Both buckets have objects, have names. The bucket namespace is flat and +/// global across all projects. This means that a bucket is always +/// addressable using its name without requiring further context. +/// +/// Within buckets the object namespace is also flat. Object are *not* +/// organized hierachical. However, as object names allow the slash `/` +/// character this is often used to simulate a hierachical structure +/// based on common prefixes. +/// +/// This package uses relative and absolute names to refer to objects. A +/// relative name is just the object name within a bucket, and requires the +/// context of a bucket to be used. A relative name just looks like this: +/// +/// object_name +/// +/// An absolute name includes the bucket name and uses the `gs://` prefix +/// also used by the `gsutil` tool. An absolute name looks like this. +/// +/// gs://bucket_name/object_name +/// +/// In most cases relative names are used. Absolute names are typically +/// only used for operations involving objects in different buckets. +library gcloud.storage; + +import 'dart:async'; + +import 'package:http/http.dart' as http; + +import 'package:crypto/crypto.dart' as crypto; +import 'package:googleapis/storage/v1.dart' as storage; +import 'package:googleapis/common/common.dart' as common; + +import 'common.dart'; +export 'common.dart'; + +part 'src/storage_impl.dart'; + +/// Bucket Access Control List +/// +/// Describe an access control list for a bucket. The access control list +/// defines the level of access for different entities. +/// +/// Currently only supports pre-defined ACLs. +/// +/// TODO: Support for building custom ACLs. +class BucketAcl { + static const AUTHENTICATED_READ = const BucketAcl._('authenticatedRead'); + static const PRIVATE = const BucketAcl._('private'); + static const PROJECT_PRIVATE = const BucketAcl._('projectPrivate'); + static const PUBLIC_READ = const BucketAcl._('publicRead'); + static const PUBLIC_READ_WRITE = const BucketAcl._('publicReadWrite'); + + // Enum value for a predefined bucket ACL. + final String _predefined; + + /// Whether this ACL is one of the predefined ones. + bool get isPredefined => true; + + const BucketAcl._(String this._predefined); +} + +/// Object Access Control List +/// +/// Currently only supports pre-defined ACLs. +/// +/// Describe an access control list for an object. The access control list +/// define the level of access for different entities. +/// +/// TODO: Support for building custom ACLs. +class ObjectAcl { + static const AUTHENTICATED_READ = const ObjectAcl._('authenticatedRead'); + static const BUCKET_OWNER_FULL_CONTROL = + const ObjectAcl._('bucketOwnerFullControl'); + static const BUCKET_OWNER_READ = const ObjectAcl._('bucketOwnerRead'); + static const PRIVATE = const ObjectAcl._('private'); + static const PROJECT_PRIVATE = const ObjectAcl._('projectPrivate'); + static const PUBLIC_READ = const ObjectAcl._('publicRead'); + + // Enum value for a predefined bucket ACL. + final String _predefined; + + /// Whether this ACL is one of the predefined ones. + bool get isPredefined => true; + + const ObjectAcl._(String this._predefined); +} + +/// Information on a bucket. +abstract class BucketInfo { + /// Name of the bucket. + String get bucketName; + + /// When this bucket was created. + DateTime get created; +} + +/// Access to Cloud Storage +abstract class Storage { + /// List of required OAuth2 scopes for Cloud Storage operation. + static const Scopes = const [ storage.StorageApi.DevstorageFullControlScope ]; + + /// Initializes access to cloud storage. + factory Storage(http.Client client, String project) = _StorageImpl; + + /// Create a cloud storage bucket. + /// + /// Creates a cloud storage bucket named [bucketName]. + /// + /// Returns a [Future] which completes when the bucket has been created. + Future createBucket(String bucketName, {BucketAcl acl}); + + /// Delete a cloud storage bucket. + /// + /// Deletes the cloud storage bucket named [bucketName]. + /// + /// If the bucket is not empty the operation will fail. + /// + /// The returned [Future] completes when the operation is finished. + Future deleteBucket(String bucketName); + + /// Access bucket object operations. + /// + /// Instantiates a `Bucket` object refering to the bucket named [bucketName]. + /// + /// If the [defaultObjectAcl] argument is passed the resulting `Bucket` will + /// attach this ACL to all objects created using this `Bucket` object. + /// + /// Otherwise the default object ACL attached to the bucket will be used. + /// + /// Returns a `Bucket` instance. + Bucket bucket(String bucketName, {ObjectAcl defaultObjectAcl}); + + /// Check whether a cloud storage bucket exists. + /// + /// Checks whether the bucket named [bucketName] exists. + /// + /// Returns a [Future] which completes with `true` if the bucket exists. + Future bucketExists(String bucketName); + + /// Get information on a bucket + /// + /// Provide metadata information for bucket named [bucketName]. + /// + /// Returns a [Future] which completes with a `BuckerInfo` object. + Future bucketInfo(String bucketName); + + /// List names of all buckets. + /// + /// Returns a [Stream] of bucket names. + Stream listBucketNames(); + + /// Start paging through names of all buckets. + /// + /// The maximum number of buckets in each page is specified in [pageSize]. + /// + /// Returns a [Future] which completes with a `Page` object holding the + /// first page. Use the `Page` object to move to the next page of buckets. + Future> pageBucketNames({int pageSize: 50}); + + /// Copy an object. + /// + /// Copy object [src] to object [dest]. + /// + /// The names of [src] and [dest] must be absolute. + Future copyObject(String src, String dest); +} + +/// Information on a specific object. +/// +/// This class provides access to information on an object. This includes +/// both the properties which are provided by Cloud Storage (such as the +/// MD5 hash) and the properties which can be changed (such as content type). +/// +/// The properties provided by Cloud Storage are direct properties on this +/// object. +/// +/// The mutable properties are properties on the `metadata` property. +abstract class ObjectInfo { + /// Name of the object. + String get name; + + /// Size of the data. + int get size; + + /// When this object was updated. + DateTime get updated; + + /// MD5 hash of the object. + List get md5Hash; + + /// CRC32c checksum, as described in RFC 4960. + int get crc32CChecksum; + + /// URL for direct download. + Uri get downloadLink; + + /// Object generation. + ObjectGeneration get generation; + + /// Additional metadata. + ObjectMetadata get metadata; +} + +/// Generational information on an object. +abstract class ObjectGeneration { + /// Object generation. + String get objectGeneration; + + /// Metadata generation. + int get metaGeneration; +} + +/// Access to object metadata +abstract class ObjectMetadata { + factory ObjectMetadata({ObjectAcl acl, + String contentType, + String contentEncoding, + String cacheControl, + String contentDisposition, + String contentLanguage, + Map custom}) = _ObjectMetadata; + /// ACL + /// + /// Currently it is only possible to set the ACL on one of the predefined + /// values from the class `ObjectAcl`. + void set acl(ObjectAcl value); + + /// `Content-Type` for this object. + String contentType; + + /// `Content-Encoding` for this object. + String contentEncoding; + + /// `Cache-Control` for this object. + String cacheControl; + + /// `Content-Disposition` for this object. + String contentDisposition; + + /// `Content-Language` for this object. + /// + /// The value of this field must confirm to RFC 3282. + String contentLanguage; + + /// Custom metadata. + Map custom; + + /// Create a copy of this object with some values replaces. + /// + /// TODO: This cannot be used to set values to null. + ObjectMetadata replace({ObjectAcl acl, + String contentType, + String contentEncoding, + String cacheControl, + String contentDisposition, + String contentLanguage, + Map custom}); +} + +/// Result from List objects in a bucket. +/// +/// Listing operate like a directory listing, despite the object +/// namespace being flat. +/// +/// See [Bucket.list] for information on how the hierarchical structure +/// is determined. +class BucketEntry { + /// Whether this is information on an object. + final bool isObject; + + /// Name of object or directory. + final String name; + + BucketEntry._object(this.name) : isObject = true; + + BucketEntry._directory(this.name) : isObject = false; + + /// Whether this is a prefix. + bool get isDirectory => !isObject; +} + +/// Access to operations on a specific cloud storage buket. +abstract class Bucket { + /// Name of this bucket. + String get bucketName; + + /// Absolute name of an object in this bucket. This includes the gs:// prefix. + String absoluteObjectName(String objectName); + + /// Create a new object. + /// + /// Create an object named [objectName] in the bucket. + /// + /// If an object named [objectName] already exists this object will be + /// replaced. + /// + /// If the length of the data to write is known in advance this can be passed + /// as [length]. This can help to optimize the upload process. + /// + /// Additional metadata on the object can be passed either through the + /// `metadata` argument or through the specific named arguments + /// (such as `contentType`). Values passed through the specific named + /// arguments takes precedence over the values in `metadata`. + /// + /// If [contentType] is not passed the default value of + /// `application/octet-stream` will be used. + /// + /// Returns a `StreamSink` where the object content can be written. When + /// The object content has been written the `StreamSink` completes with + /// an `ObjectStat` instance with the information on the object created. + StreamSink> write( + String objectName, + {int length, ObjectMetadata metadata, String contentType}); + + /// Create an new object in the bucket with specified content. + /// + /// Writes [bytes] to the created object. + /// + /// See [write] for more information on the additional arguments. + /// + /// Returns a `Future` which completes when the object is written. + Future writeBytes(String name, List bytes, + {String contentType, ObjectMetadata metadata}); + + /// Read object content. + /// + /// TODO: More documentation + Stream> read(String objectName, {int offset: 0, int length}); + + /// Lookup object metadata. + /// + /// TODO: More documentation + Future info(String name); + + /// Update object metadata. + /// + /// TODO: More documentation + Future updateMetadata(String objectName, ObjectMetadata metadata); + + /// List objects in the bucket. + /// + /// Listing operates like a directory listing, despite the object + /// namespace being flat. The character `/` is being used to separate + /// object names into directory components. + /// + /// Retrieves a list of objects and directory components starting + /// with [prefix]. + /// + /// Returns a [Stream] of [BucketEntry]. Each element of the stream + /// represents either an object or a directory component. + Stream list({String prefix}); + + /// Start paging through objects in the bucket. + /// + /// The maximum number of objects in each page is specified in [pageSize]. + /// + /// See [list] for more information on the other arguments. + /// + /// Returns a `Future` which completes with a `Page` object holding the + /// first page. Use the `Page` object to move to the next page. + Future> page({String prefix, int pageSize: 50}); +} diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 6946ee7f..ecbe120c 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -6,10 +6,12 @@ environment: dependencies: crypto: '>=0.9.0 <0.10.0' http: '>=0.11.0 <0.12.0' - googleapis_beta: '>=0.1.0 <0.2.0' + googleapis: '>=0.2.0 <0.3.0' + googleapis_beta: '>=0.3.0 <0.4.0' dev_dependencies: unittest: '>=0.11.0 <0.12.0' - + mime: '>=0.9.0+3 <0.10.0' + http_parser: '>=0.0.2+5 <0.1.0' transformers: - $dart2js: $include: [] diff --git a/pkgs/gcloud/test/common.dart b/pkgs/gcloud/test/common.dart new file mode 100644 index 00000000..1a4bc48f --- /dev/null +++ b/pkgs/gcloud/test/common.dart @@ -0,0 +1,178 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'dart:async'; +import 'dart:convert'; + +import 'package:crypto/crypto.dart' as crypto; +import 'package:http/http.dart' as http; +import 'package:http/testing.dart' as http_testing; +import 'package:http_parser/http_parser.dart' as http_parser; +import 'package:mime/mime.dart' as mime; +import 'package:unittest/unittest.dart'; + +const PROJECT = 'test-project'; +const CONTENT_TYPE_JSON_UTF8 = 'application/json; charset=utf-8'; + +const RESPONSE_HEADERS = const { + 'content-type': CONTENT_TYPE_JSON_UTF8 +}; + +class MockClient extends http.BaseClient { + final String rootPath; + final Uri rootUri; + + Map> mocks = {}; + http_testing.MockClient client; + + MockClient(String rootPath) : + rootPath = rootPath, + rootUri = Uri.parse('https://www.googleapis.com${rootPath}') { + client = new http_testing.MockClient(handler); + } + + void register(String method, Pattern path, + http_testing.MockClientHandler handler) { + var map = mocks.putIfAbsent(method, () => new Map()); + if (path is RegExp) { + map[new RegExp('$rootPath${path.pattern}')] = handler; + } else { + map['$rootPath$path'] = handler; + } + } + + void registerUpload(String method, Pattern path, + http_testing.MockClientHandler handler) { + var map = mocks.putIfAbsent(method, () => new Map()); + map['/upload$rootPath$path'] = handler; + } + + void registerResumableUpload(String method, Pattern path, + http_testing.MockClientHandler handler) { + var map = mocks.putIfAbsent(method, () => new Map()); + map['/resumable/upload$rootPath$path'] = handler; + } + + void clear() { + mocks = {}; + } + + Future handler(http.Request request) { + expect(request.url.host, 'www.googleapis.com'); + var path = request.url.path; + if (mocks[request.method] == null) { + throw 'No mock handler for method ${request.method} found. ' + 'Request URL was: ${request.url}'; + } + var mockHandler; + mocks[request.method].forEach((pattern, handler) { + if (pattern.matchAsPrefix(path) != null) { + mockHandler = handler; + } + }); + if (mockHandler == null) { + throw 'No mock handler for method ${request.method} and path ' + '[$path] found. Request URL was: ${request.url}'; + } + return mockHandler(request); + } + + Future send(http.BaseRequest request) { + return client.send(request); + } + + Future respond(response) { + return new Future.value( + new http.Response( + JSON.encode(response.toJson()), + 200, + headers: RESPONSE_HEADERS)); + } + + Future respondEmpty() { + return new Future.value( + new http.Response('', 200, headers: RESPONSE_HEADERS)); + } + + Future respondInitiateResumableUpload(project) { + Map headers = new Map.from(RESPONSE_HEADERS); + headers['location'] = + 'https://www.googleapis.com/resumable/upload$rootPath' + 'b/$project/o?uploadType=resumable&alt=json&' + 'upload_id=AEnB2UqucpaWy7d5cr5iVQzmbQcQlLDIKiClrm0SAX3rJ7UN' + 'Mu5bEoC9b4teJcJUKpqceCUeqKzuoP_jz2ps_dV0P0nT8OTuZQ'; + return new Future.value( + new http.Response('', 200, headers: headers)); + } + + Future respondContinueResumableUpload() { + return new Future.value( + new http.Response('', 308, headers: RESPONSE_HEADERS)); + } + + Future respondBytes(List bytes) { + return new Future.value( + new http.Response.bytes(bytes, 200, headers: RESPONSE_HEADERS)); + } + + Future respondError(statusCode) { + var error = { + 'error': { + 'code': statusCode, + 'message': 'error' + } + }; + return new Future.value( + new http.Response( + JSON.encode(error), statusCode, headers: RESPONSE_HEADERS)); + } + + Future processNormalMediaUpload(http.Request request) { + var completer = new Completer(); + + var contentType = new http_parser.MediaType.parse( + request.headers['content-type']); + expect(contentType.mimeType, 'multipart/related'); + var boundary = contentType.parameters['boundary']; + + var partCount = 0; + var json; + new Stream.fromIterable([request.bodyBytes, [13, 10]]) + .transform(new mime.MimeMultipartTransformer(boundary)) + .listen( + ((mime.MimeMultipart mimeMultipart) { + var contentType = mimeMultipart.headers['content-type']; + partCount++; + if (partCount == 1) { + // First part in the object JSON. + expect(contentType, 'application/json; charset=utf-8'); + mimeMultipart + .transform(UTF8.decoder) + .fold('', (p, e) => '$p$e') + .then((j) => json = j); + } else if (partCount == 2) { + // Second part is the base64 encoded bytes. + expect(contentType, 'application/octet-stream'); + mimeMultipart + .transform(ASCII.decoder) + .fold('', (p, e) => '$p$e') + .then(crypto.CryptoUtils.base64StringToBytes) + .then((bytes) { + completer.complete(new NormalMediaUpload(json, bytes)); + }); + } else { + // Exactly two parts expected. + throw 'Unexpected part count'; + } + })); + + return completer.future; + } +} + +class NormalMediaUpload { + String json; + List bytes; + NormalMediaUpload(this.json, this.bytes); +} \ No newline at end of file diff --git a/pkgs/gcloud/test/pubsub_test.dart b/pkgs/gcloud/test/pubsub_test.dart index ffef153e..82ee3437 100644 --- a/pkgs/gcloud/test/pubsub_test.dart +++ b/pkgs/gcloud/test/pubsub_test.dart @@ -1,84 +1,23 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + import 'dart:async'; import 'dart:convert'; import 'package:crypto/crypto.dart' as crypto; import 'package:http/http.dart' as http; -import 'package:http/testing.dart' as http_testing; import 'package:unittest/unittest.dart'; import 'package:gcloud/pubsub.dart'; import 'package:googleapis_beta/pubsub/v1beta1.dart' as pubsub; -const PROJECT = 'test-project'; -const CONTENT_TYPE_JSON_UTF8 = 'application/json; charset=utf-8'; -const RESPONSE_HEADERS = const { - 'content-type': CONTENT_TYPE_JSON_UTF8 -}; +import 'common.dart'; const String ROOT_PATH = '/pubsub/v1beta1/'; -final Uri ROOT_URI = Uri.parse('https://www.googleapis.com$ROOT_PATH'); - -class MockClient extends http.BaseClient { - Map> mocks = {}; - http_testing.MockClient client; - - MockClient() { - client = new http_testing.MockClient(handler); - } - - void register(String method, Pattern path, - http_testing.MockClientHandler handler) { - mocks.putIfAbsent(method, () => new Map())[path] = handler; - } - - void clear() { - mocks = {}; - } - - Future handler(http.Request request) { - expect(request.url.host, 'www.googleapis.com'); - expect(request.url.path.startsWith(ROOT_PATH), isTrue); - var path = request.url.path.substring(ROOT_PATH.length); - if (mocks[request.method] == null) { - throw 'No mock handler for method ${request.method} found. ' - 'Request URL was: ${request.url}'; - } - var mockHandler; - mocks[request.method].forEach((pattern, handler) { - if (pattern.matchAsPrefix(path) != null) { - mockHandler = handler; - } - }); - if (mockHandler == null) { - throw 'No mock handler for method ${request.method} and path ' - '[$path] found. Request URL was: ${request.url}'; - } - return mockHandler(request); - } - - Future send(http.BaseRequest request) { - return client.send(request); - } - - Future respond(response) { - return new Future.value( - new http.Response( - JSON.encode(response.toJson()), 200, headers: RESPONSE_HEADERS)); - } - - Future respondEmpty() { - return new Future.value( - new http.Response('', 200, headers: RESPONSE_HEADERS)); - } - - Future respondError(statusCode) { - var error = {'error' : {'code': statusCode, 'message': 'error'}}; - return new Future.value( - new http.Response( - JSON.encode(error), statusCode, headers: RESPONSE_HEADERS)); - } -} + +http.Client mockClient() => new MockClient(ROOT_PATH); main() { group('api', () { @@ -96,7 +35,7 @@ main() { var absoluteName = '/topics/$PROJECT/test-topic'; test('create', () { - var mock = new MockClient(); + var mock = mockClient(); mock.register('POST', 'topics', expectAsync((request) { var requestTopic = new pubsub.Topic.fromJson(JSON.decode(request.body)); @@ -117,7 +56,7 @@ main() { }); test('create-error', () { - var mock = new MockClient(); + var mock = mockClient(); var api = new PubSub(mock, PROJECT); badTopicNames.forEach((name) { expect(() => api.createTopic(name), throwsArgumentError); @@ -128,7 +67,7 @@ main() { }); test('delete', () { - var mock = new MockClient(); + var mock = mockClient(); mock.register( 'DELETE', new RegExp(r'topics/[a-z/-]*$'), expectAsync((request) { expect(request.url.path, '${ROOT_PATH}topics/$absoluteName'); @@ -146,7 +85,7 @@ main() { }); test('delete-error', () { - var mock = new MockClient(); + var mock = mockClient(); var api = new PubSub(mock, PROJECT); badTopicNames.forEach((name) { expect(() => api.deleteTopic(name), throwsArgumentError); @@ -157,7 +96,7 @@ main() { }); test('lookup', () { - var mock = new MockClient(); + var mock = mockClient(); mock.register( 'GET', new RegExp(r'topics/[a-z/-]*$'), expectAsync((request) { expect(request.url.path, '${ROOT_PATH}topics/$absoluteName'); @@ -178,7 +117,7 @@ main() { }); test('lookup-error', () { - var mock = new MockClient(); + var mock = mockClient(); var api = new PubSub(mock, PROJECT); badTopicNames.forEach((name) { expect(() => api.lookupTopic(name), throwsArgumentError); @@ -234,7 +173,7 @@ main() { group('list', () { Future q(count) { - var mock = new MockClient(); + var mock = mockClient(); registerQueryMock(mock, count, 50); var api = new PubSub(mock, PROJECT); @@ -256,7 +195,7 @@ main() { }); test('immediate-pause-resume', () { - var mock = new MockClient(); + var mock = mockClient(); registerQueryMock(mock, 70, 50); var api = new PubSub(mock, PROJECT); @@ -270,7 +209,7 @@ main() { }); test('pause-resume', () { - var mock = new MockClient(); + var mock = mockClient(); registerQueryMock(mock, 70, 50); var api = new PubSub(mock, PROJECT); @@ -292,7 +231,7 @@ main() { }); test('immediate-cancel', () { - var mock = new MockClient(); + var mock = mockClient(); registerQueryMock(mock, 70, 50, 1); var api = new PubSub(mock, PROJECT); @@ -303,7 +242,7 @@ main() { }); test('cancel', () { - var mock = new MockClient(); + var mock = mockClient(); registerQueryMock(mock, 170, 50, 1); var api = new PubSub(mock, PROJECT); @@ -316,7 +255,7 @@ main() { test('error', () { runTest(bool withPause) { // Test error on first GET request. - var mock = new MockClient(); + var mock = mockClient(); mock.register('GET', 'topics', expectAsync((request) { return mock.respondError(500); })); @@ -340,7 +279,7 @@ main() { test('error-2', () { // Test error on second GET request. void runTest(bool withPause) { - var mock = new MockClient(); + var mock = mockClient(); registerQueryMock(mock, 51, 50, 1); var api = new PubSub(mock, PROJECT); @@ -374,7 +313,7 @@ main() { group('page', () { test('empty', () { - var mock = new MockClient(); + var mock = mockClient(); registerQueryMock(mock, 0, 50); var api = new PubSub(mock, PROJECT); @@ -394,7 +333,7 @@ main() { }); test('single', () { - var mock = new MockClient(); + var mock = mockClient(); registerQueryMock(mock, 10, 50); var api = new PubSub(mock, PROJECT); @@ -419,7 +358,7 @@ main() { var pageCount = 0; var completer = new Completer(); - var mock = new MockClient(); + var mock = mockClient(); registerQueryMock(mock, n, pageSize); handlePage(page) { @@ -466,7 +405,7 @@ main() { var absoluteTopicName = '/topics/$PROJECT/test-topic'; test('create', () { - var mock = new MockClient(); + var mock = mockClient(); mock.register('POST', 'subscriptions', expectAsync((request) { var requestSubscription = new pubsub.Subscription.fromJson(JSON.decode(request.body)); @@ -489,7 +428,7 @@ main() { }); test('create-error', () { - var mock = new MockClient(); + var mock = mockClient(); var api = new PubSub(mock, PROJECT); badSubscriptionNames.forEach((name) { expect(() => api.createSubscription(name, 'test-topic'), @@ -502,7 +441,7 @@ main() { }); test('delete', () { - var mock = new MockClient(); + var mock = mockClient(); mock.register( 'DELETE', new RegExp(r'subscriptions/[a-z/-]*$'), expectAsync((request) { @@ -521,7 +460,7 @@ main() { }); test('delete-error', () { - var mock = new MockClient(); + var mock = mockClient(); var api = new PubSub(mock, PROJECT); badSubscriptionNames.forEach((name) { expect(() => api.deleteSubscription(name), throwsArgumentError); @@ -532,7 +471,7 @@ main() { }); test('lookup', () { - var mock = new MockClient(); + var mock = mockClient(); mock.register( 'GET', new RegExp(r'subscriptions/[a-z/-]*$'), expectAsync((request) { @@ -555,7 +494,7 @@ main() { }); test('lookup-error', () { - var mock = new MockClient(); + var mock = mockClient(); var api = new PubSub(mock, PROJECT); badSubscriptionNames.forEach((name) { expect(() => api.lookupSubscription(name), throwsArgumentError); @@ -618,7 +557,7 @@ main() { group('list', () { Future q(topic, count) { - var mock = new MockClient(); + var mock = mockClient(); registerQueryMock(mock, count, 50, topic: topic); var api = new PubSub(mock, PROJECT); @@ -650,7 +589,7 @@ main() { }); test('immediate-pause-resume', () { - var mock = new MockClient(); + var mock = mockClient(); registerQueryMock(mock, 70, 50); var api = new PubSub(mock, PROJECT); @@ -664,7 +603,7 @@ main() { }); test('pause-resume', () { - var mock = new MockClient(); + var mock = mockClient(); registerQueryMock(mock, 70, 50); var api = new PubSub(mock, PROJECT); @@ -686,7 +625,7 @@ main() { }); test('immediate-cancel', () { - var mock = new MockClient(); + var mock = mockClient(); registerQueryMock(mock, 70, 50, totalCalls: 1); var api = new PubSub(mock, PROJECT); @@ -697,7 +636,7 @@ main() { }); test('cancel', () { - var mock = new MockClient(); + var mock = mockClient(); registerQueryMock(mock, 170, 50, totalCalls: 1); var api = new PubSub(mock, PROJECT); @@ -710,7 +649,7 @@ main() { test('error', () { runTest(bool withPause) { // Test error on first GET request. - var mock = new MockClient(); + var mock = mockClient(); mock.register('GET', 'subscriptions', expectAsync((request) { return mock.respondError(500); })); @@ -734,7 +673,7 @@ main() { test('error-2', () { runTest(bool withPause) { // Test error on second GET request. - var mock = new MockClient(); + var mock = mockClient(); registerQueryMock(mock, 51, 50, totalCalls: 1); var api = new PubSub(mock, PROJECT); @@ -769,7 +708,7 @@ main() { group('page', () { emptyTest(String topic) { - var mock = new MockClient(); + var mock = mockClient(); registerQueryMock(mock, 0, 50, topic: topic); var api = new PubSub(mock, PROJECT); @@ -795,7 +734,7 @@ main() { }); singleTest(String topic) { - var mock = new MockClient(); + var mock = mockClient(); registerQueryMock(mock, 10, 50, topic: topic); var api = new PubSub(mock, PROJECT); @@ -825,7 +764,7 @@ main() { var pageCount = 0; var completer = new Completer(); - var mock = new MockClient(); + var mock = mockClient(); registerQueryMock(mock, n, pageSize, topic: topic); handlingPage(page) { @@ -904,7 +843,7 @@ main() { } test('publish', () { - var mock = new MockClient(); + var mock = mockClient(); registerLookup(mock); var api = new PubSub(mock, PROJECT); @@ -936,7 +875,7 @@ main() { }); test('publish-with-labels', () { - var mock = new MockClient(); + var mock = mockClient(); registerLookup(mock); var api = new PubSub(mock, PROJECT); @@ -982,7 +921,7 @@ main() { }); test('delete', () { - var mock = new MockClient(); + var mock = mockClient(); mock.register( 'GET', new RegExp(r'topics/[a-z/-]*$'), expectAsync((request) { expect(request.url.path, '${ROOT_PATH}topics/$absoluteName'); @@ -1016,7 +955,7 @@ main() { var absoluteTopicName = '/topics/$PROJECT/test-topic'; test('delete', () { - var mock = new MockClient(); + var mock = mockClient(); mock.register( 'GET', new RegExp(r'subscriptions/[a-z/-]*$'), expectAsync((request) { expect(request.url.path, '${ROOT_PATH}subscriptions/$absoluteName'); diff --git a/pkgs/gcloud/test/storage_test.dart b/pkgs/gcloud/test/storage_test.dart new file mode 100644 index 00000000..40b19c69 --- /dev/null +++ b/pkgs/gcloud/test/storage_test.dart @@ -0,0 +1,546 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +library gcloud.storage; + +import 'dart:async'; +import 'dart:convert'; + +import 'package:http/http.dart' as http; +import 'package:unittest/unittest.dart'; + +import 'package:gcloud/storage.dart'; + +import 'package:googleapis/storage/v1.dart' as storage; +import 'package:googleapis/common/common.dart' as common; + +import 'common.dart'; + + +const String ROOT_PATH = '/storage/v1/'; + + +http.Client mockClient() => new MockClient(ROOT_PATH); + +withMockClient(function) { + var mock = mockClient(); + function(mock, new Storage(mock, PROJECT)); +} + +main() { + group('bucket', () { + var bucketName = 'test-bucket'; + var absoluteName = 'gs://test-bucket'; + + test('create', () { + withMockClient((mock, api) { + mock.register('POST', 'b', expectAsync((request) { + var requestBucket = + new storage.Bucket.fromJson(JSON.decode(request.body)); + expect(requestBucket.name, bucketName); + return mock.respond(new storage.Bucket()..name = bucketName); + })); + + expect(api.createBucket(bucketName), completion(isNull)); + }); + }); + + test('create-with-acl', () { + var predefined = + [[BucketAcl.AUTHENTICATED_READ, 'authenticatedRead'], + [BucketAcl.PRIVATE, 'private'], + [BucketAcl.PROJECT_PRIVATE, 'projectPrivate'], + [BucketAcl.PUBLIC_READ, 'publicRead'], + [BucketAcl.PUBLIC_READ_WRITE, 'publicReadWrite']]; + + withMockClient((mock, api) { + int count = 0; + + mock.register('POST', 'b', expectAsync((request) { + var requestBucket = + new storage.Bucket.fromJson(JSON.decode(request.body)); + expect(requestBucket.name, bucketName); + expect(request.url.queryParameters['predefinedAcl'], + predefined[count++][1]); + return mock.respond(new storage.Bucket()..name = bucketName); + }, count: predefined.length)); + + var futures = []; + for (int i = 0; i < predefined.length; i++) { + futures.add(api.createBucket(bucketName, acl: predefined[i][0])); + } + return Future.wait(futures); + }); + }); + + test('delete', () { + withMockClient((mock, api) { + mock.register( + 'DELETE', new RegExp(r'b/[a-z/-]*$'), expectAsync((request) { + expect(request.url.path, '${ROOT_PATH}b/$bucketName'); + expect(request.body.length, 0); + return mock.respond(new storage.Bucket()..name = bucketName);; + })); + + expect(api.deleteBucket(bucketName), completion(isNull)); + }); + }); + + test('exists', () { + var exists = true; + + withMockClient((mock, api) { + mock.register( + 'GET', new RegExp(r'b/[a-z/-]*$'), expectAsync((request) { + expect(request.url.path, '${ROOT_PATH}b/$bucketName'); + expect(request.body.length, 0); + if (exists) { + return mock.respond(new storage.Bucket()..name = bucketName); + } else { + return mock.respondError(404); + } + }, count: 2)); + + return api.bucketExists(bucketName).then(expectAsync((result) { + expect(result, isTrue); + exists = false; + expect(api.bucketExists(bucketName), completion(isFalse)); + })); + }); + }); + + test('stat', () { + withMockClient((mock, api) { + mock.register( + 'GET', new RegExp(r'b/[a-z/-]*$'), expectAsync((request) { + expect(request.url.path, '${ROOT_PATH}b/$bucketName'); + expect(request.body.length, 0); + return mock.respond(new storage.Bucket() + ..name = bucketName + ..timeCreated = new DateTime(2014)); + })); + + return api.bucketInfo(bucketName).then(expectAsync((result) { + expect(result.bucketName, bucketName); + expect(result.created, new DateTime(2014)); + })); + }); + }); + + group('list', () { + test('empty', () { + withMockClient((mock, api) { + mock.register('GET', 'b', expectAsync((request) { + expect(request.body.length, 0); + return mock.respond(new storage.Buckets()); + })); + + api.listBucketNames().listen( + (_) => throw 'Unexpected', + onDone: expectAsync(() => null)); + }); + }); + + test('immediate-cancel', () { + withMockClient((mock, api) { + api.listBucketNames().listen( + (_) => throw 'Unexpected', + onDone: () => throw 'Unexpected') + ..cancel(); + }); + }); + + test('list', () { + // TODO: Test list. + }); + + test('page', () { + // TODO: Test page. + }); + }); + + test('copy', () { + withMockClient((mock, api) { + mock.register( + 'POST', + 'b/srcBucket/o/srcObject/copyTo/b/destBucket/o/destObject', + expectAsync((request) { + return mock.respond(new storage.Object()..name = 'destObject'); + })); + expect(api.copyObject('gs://srcBucket/srcObject', + 'gs://destBucket/destObject'), + completion(isNull)); + }); + }); + + test('copy-invalid-args', () { + withMockClient((mock, api) { + expect(() => api.copyObject('a', 'b'), throwsA(isFormatException)); + expect(() => api.copyObject('a/b', 'c/d'), throwsA(isFormatException)); + expect(() => api.copyObject('gs://a/b', 'gs://c/'), + throwsA(isFormatException)); + expect(() => api.copyObject('gs://a/b', 'gs:///c'), + throwsA(isFormatException)); + }); + }); + }); + + group('object', () { + var bucketName = 'test-bucket'; + var objectName = 'test-object'; + + var bytesNormalUpload = [1, 2, 3]; + + // Generate a list just above the limit when changing to resumable upload. + const int MB = 1024 * 1024; + const int maxNormalUpload = 1 * MB; + const int minResumableUpload = maxNormalUpload + 1; + var bytesResumableUpload = + new List.generate(minResumableUpload, (e) => e & 255); + + bool testArgumentError(e) => e is ArgumentError; + bool testApiError(e) => e is common.ApiRequestError; + bool testDetailedApiError(e) => e is common.DetailedApiRequestError; + Function expectStatus(status) => (e) => expect(e.status, status); + Function expectNotNull(status) => (o) => expect(o, isNotNull); + + expectNormalUpload(mock, data, objectName) { + var bytes = data.fold([], (p, e) => p..addAll(e)); + mock.registerUpload( + 'POST', 'b/$bucketName/o', expectAsync((request) { + return mock.processNormalMediaUpload(request) + .then(expectAsync((mediaUpload) { + var object = + new storage.Object.fromJson(JSON.decode(mediaUpload.json)); + expect(object.name, objectName); + expect(mediaUpload.bytes, bytes); + return mock.respond(new storage.Object()..name = objectName); + })); + })); + } + + expectResumableUpload(mock, data, objectName) { + var bytes = data.fold([], (p, e) => p..addAll(e)); + expect(bytes.length, bytesResumableUpload.length); + int count = 0; + mock.registerResumableUpload( + 'POST', 'b/$bucketName/o', expectAsync((request) { + var requestObject = + new storage.Object.fromJson(JSON.decode(request.body)); + expect(requestObject.name, objectName); + return mock.respondInitiateResumableUpload(PROJECT); + })); + mock.registerResumableUpload( + 'PUT', 'b/$PROJECT/o', expectAsync((request) { + count++; + if (count == 1) { + expect(request.bodyBytes.length, MB); + return mock.respondContinueResumableUpload(); + } else { + expect(request.bodyBytes.length, 1); + return mock.respond(new storage.Object()..name = objectName); + } + }, count: 2)); + } + + checkResult(result) { + expect(result.name, objectName); + } + + Future pipeToSink(sink, List> data) { + sink.done.then(expectAsync(checkResult)); + sink.done.catchError((e) => throw 'Unexpected $e'); + return new Stream.fromIterable(data) + .pipe(sink) + .then(expectAsync(checkResult)) + .catchError((e) => throw 'Unexpected $e'); + } + + Future addStreamToSink(sink, List> data) { + sink.done.then(expectAsync(checkResult)); + sink.done.catchError((e) => throw 'Unexpected $e'); + return sink.addStream(new Stream.fromIterable(data)) + .then((_) => sink.close()) + .then(expectAsync(checkResult)) + .catchError((e) => throw 'Unexpected $e'); + } + + Future addToSink(sink, List> data) { + sink.done.then(expectAsync(checkResult)); + sink.done.catchError((e) => throw 'Unexpected $e'); + data.forEach((bytes) => sink.add(bytes)); + return sink.close() + .then(expectAsync(checkResult)) + .catchError((e) => throw 'Unexpected $e'); + } + + Future runTest(mock, api, data, length) { + var bucket = api.bucket(bucketName); + + Future upload(fn, sendLength) { + mock.clear(); + if (length <= maxNormalUpload) { + expectNormalUpload(mock, data, objectName); + } else { + expectResumableUpload(mock, data, objectName); + } + var sink; + if (sendLength) { + sink = bucket.write(objectName, length: length); + } else { + sink = bucket.write(objectName); + } + return fn(sink, data); + } + + return upload(pipeToSink, true) + .then(expectAsync((_) => upload(pipeToSink, false))) + .then(expectAsync((_) => upload(addStreamToSink, true))) + .then(expectAsync((_) => upload(addStreamToSink, false))) + .then(expectAsync((_) => upload(addToSink, true))) + .then(expectAsync((_) => upload(addToSink, false))); + }; + + test('write-short-1', () { + withMockClient((mock, api) { + runTest(mock, api, [bytesNormalUpload], bytesNormalUpload.length); + }); + }); + + test('write-short-2', () { + withMockClient((mock, api) { + runTest(mock, + api, + [bytesNormalUpload, bytesNormalUpload], + bytesNormalUpload.length * 2); + }); + }); + + test('write-long', () { + withMockClient((mock, api) { + runTest(mock, api, [bytesResumableUpload], bytesResumableUpload.length); + }); + }); + + test('write-short-error', () { + withMockClient((mock, api) { + + Future test(length) { + mock.clear(); + mock.registerUpload( + 'POST', 'b/$bucketName/o', expectAsync((request) { + return mock.respondError(500); + })); + + var bucket = api.bucket(bucketName); + var sink = bucket.write(bucketName, length: length); + sink.done + .then((_) => throw 'Unexpected') + .catchError(expectAsync(expectNotNull), + test: testDetailedApiError); + sink.done + .catchError(expectAsync(expectNotNull), + test: testDetailedApiError); + return new Stream.fromIterable([bytesNormalUpload]) + .pipe(sink) + .then((_) => throw 'Unexpected') + .catchError(expectAsync(expectNotNull), + test: testDetailedApiError); + } + + test(null) // Unknown length. + .then(expectAsync((_) => test(1))) + .then(expectAsync((_) => test(10))) + .then(expectAsync((_) => test(maxNormalUpload))); + }); + }); + + // TODO: Mock the resumable upload timeout. + test('write-long-error', () { + withMockClient((mock, api) { + + Future test(length) { + mock.clear(); + mock.registerResumableUpload( + 'POST', 'b/$bucketName/o', expectAsync((request) { + return mock.respondInitiateResumableUpload(PROJECT); + })); + mock.registerResumableUpload( + 'PUT', 'b/$PROJECT/o', expectAsync((request) { + return mock.respondError(502); + }, count: 3)); // Default 3 retries in googleapis library. + + + var bucket = api.bucket(bucketName); + var sink = bucket.write(bucketName); + sink.done + .then((_) => throw 'Unexpected') + .catchError(expectAsync(expectNotNull), + test: testDetailedApiError); + return new Stream.fromIterable([bytesResumableUpload]) + .pipe(sink) + .then((_) => throw 'Unexpected') + .catchError(expectAsync(expectNotNull), + test: testDetailedApiError); + } + + test(null) // Unknown length. + .then(expectAsync((_) => test(minResumableUpload))); + }); + }); + + test('write-long-wrong-length', () { + withMockClient((mock, api) { + + Future test(data, length) { + mock.clear(); + mock.registerResumableUpload( + 'POST', 'b/$bucketName/o', expectAsync((request) { + return mock.respondInitiateResumableUpload(PROJECT); + })); + mock.registerResumableUpload( + 'PUT', 'b/$PROJECT/o', expectAsync((request) { + return mock.respondContinueResumableUpload(); + })); // Default 3 retries in googleapis library. + + var bucket = api.bucket(bucketName); + var sink = bucket.write(bucketName, length: length); + sink.done + .then((_) => throw 'Unexpected') + .catchError( + expectAsync(expectNotNull), + test: (e) => e is String || e is common.ApiRequestError); + return new Stream.fromIterable(data) + .pipe(sink) + .then((_) => throw 'Unexpected') + .catchError( + expectAsync(expectNotNull), + test: (e) => e is String || e is common.ApiRequestError); + } + + test([bytesResumableUpload], bytesResumableUpload.length + 1) + .then(expectAsync((_) => test([bytesResumableUpload, [1, 2]], + bytesResumableUpload.length + 1))); + }); + }); + + test('write-add-error', () { + withMockClient((mock, api) { + var bucket = api.bucket(bucketName); + var controller = new StreamController(sync: true); + var sink = bucket.write(bucketName); + sink.done + .then((_) => throw 'Unexpected') + .catchError(expectAsync(expectNotNull), test: testArgumentError); + var stream = new Stream.fromIterable([[1, 2, 3]]); + sink.addStream(stream).then((_) { + sink.addError(new ArgumentError()); + sink.close() + .catchError(expectAsync(expectNotNull), test: testArgumentError); + }); + }); + }); + + test('write-long-add-error', () { + int count = 0; + withMockClient((mock, api) { + mock.registerResumableUpload( + 'POST', 'b/$bucketName/o', expectAsync((request) { + return mock.respondInitiateResumableUpload(PROJECT); + })); + // The resumable upload will buffer until either close or a full chunk, + // so when we add an error the last byte is never sent. Therefore this + // PUT is only called once. + mock.registerResumableUpload( + 'PUT', 'b/$PROJECT/o', expectAsync((request) { + expect(request.bodyBytes.length, 1024 * 1024); + return mock.respondContinueResumableUpload(); + })); + + var bucket = api.bucket(bucketName); + var sink = bucket.write(bucketName); + sink.done + .then((_) => throw 'Unexpected') + .catchError(expectAsync(expectNotNull), test: testArgumentError); + var stream = new Stream.fromIterable([bytesResumableUpload]); + sink.addStream(stream).then((_) { + sink.addError(new ArgumentError()); + sink.close() + .catchError(expectAsync(expectNotNull), test: testArgumentError); + }); + }); + }); + + test('read', () { + var bytes = [1, 2, 3]; + withMockClient((mock, api) { + mock.register( + 'GET', 'b/$bucketName/o/$objectName', expectAsync((request) { + expect(request.url.queryParameters['alt'], 'media'); + return mock.respondBytes(bytes); + })); + + var bucket = api.bucket(bucketName); + var data = []; + bucket.read(objectName).listen(data.addAll).asFuture() + .then(expectAsync((_) => expect(data, bytes))); + }); + }); + + test('stat', () { + withMockClient((mock, api) { + mock.register( + 'GET', 'b/$bucketName/o/$objectName', expectAsync((request) { + expect(request.url.queryParameters['alt'], 'json'); + return mock.respond(new storage.Object() + ..name = objectName + ..updated = new DateTime(2014) + ..contentType = 'mime/type'); + })); + + var api = new Storage(mock, PROJECT); + var bucket = api.bucket(bucketName); + bucket.info(objectName).then(expectAsync((stat) { + expect(stat.name, objectName); + expect(stat.updated, new DateTime(2014)); + expect(stat.metadata.contentType, 'mime/type'); + })); + }); + }); + + group('list', () { + test('empty', () { + withMockClient((mock, api) { + mock.register('GET', 'b/$bucketName/o', expectAsync((request) { + expect(request.body.length, 0); + return mock.respond(new storage.Objects()); + })); + + var bucket = api.bucket(bucketName); + bucket.list().listen( + (_) => throw 'Unexpected', + onDone: expectAsync(() => null)); + }); + }); + + test('immediate-cancel', () { + withMockClient((mock, api) { + var bucket = api.bucket(bucketName); + bucket.list().listen( + (_) => throw 'Unexpected', + onDone: () => throw 'Unexpected') + ..cancel(); + }); + }); + + test('list', () { + // TODO: Test list. + }); + + test('page', () { + // TODO: Test page. + }); + }); + }); +} From 42a03518c73cd12e5875f81d470ced807b7c90fd Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Mon, 27 Oct 2014 17:05:51 +0100 Subject: [PATCH 013/239] Remove support for PolyModels This will eventually be added in a different form (without using a hidden class property which contains the list of class names in the hierachy) by using multi-kind queries. But multi-kind queries are not yet supported in the datastore V3. R=sgjesse@google.com Review URL: https://codereview.chromium.org//649703004 --- pkgs/gcloud/lib/src/db/model_description.dart | 131 -------------- pkgs/gcloud/lib/src/db/models.dart | 8 - pkgs/gcloud/test/db/e2e/db_test.dart | 163 +----------------- 3 files changed, 5 insertions(+), 297 deletions(-) diff --git a/pkgs/gcloud/lib/src/db/model_description.dart b/pkgs/gcloud/lib/src/db/model_description.dart index f060fb28..6ae8fdad 100644 --- a/pkgs/gcloud/lib/src/db/model_description.dart +++ b/pkgs/gcloud/lib/src/db/model_description.dart @@ -157,137 +157,6 @@ abstract class ModelDescription { } } -abstract class PolyModelDescription extends ModelDescription { - static const int STATE_POLYCLASSES = ModelDescription.STATE_LAST + 1; - static const int STATE_POLYCLASSLIST_TO_MODEL_DESCRIPTION_MAP = - ModelDescription.STATE_LAST + 2; - static const int STATE_LAST = STATE_POLYCLASSLIST_TO_MODEL_DESCRIPTION_MAP; - - final __classField = const StringListProperty(propertyName: 'class'); - const PolyModelDescription() : super('PolyModel'); - - initialize(ModelDB db) { - List stateVector = super.initialize(db); - - // Generate the class list, which begins with the root of the polymorphic - // class hierachy (first direct subclass of PolyModelDescription) and goes - // down to the concrete class. - List getPolyClasses(ModelDescription modelDescription) { - List classes = []; - var currentModelDesc = modelDescription.runtimeType; - while (currentModelDesc != PolyModelDescription) { - var classMirror = mirrors.reflectClass(currentModelDesc); - String polyModelName = classMirror.getField(#PolyModelName).reflectee; - classes.add(polyModelName); - currentModelDesc = classMirror.superclass.reflectedType; - } - return classes.reversed.toList(growable: false); - } - - // NOTE: This is a redundant computation, but we do this only *once* when - // initializing. [O(N^2) where N is number of poly model classes.] - // The reason is every model class should generate it's own state vector - // but part of that state is shared across a complete poly class hierachy. - // Could be optimized if necessary. - Map getPolyClassList2ModelDescriptionMap() { - var map = new HashMap(); - for (var md in db.modelDescriptions) { - if (md is PolyModelDescription) { - map[_getPolyClassString(getPolyClasses(md))] = md; - } - } - return map; - } - - return new List.from([] - ..addAll(stateVector) - ..add(getPolyClasses(this)) - ..add(getPolyClassList2ModelDescriptionMap()), - growable: false); - } - - // We register only the root PolyModelDescription class with the kindName(). - bool registerKind(ModelDB db) => _getPolyClasses(db, this).length == 1; - - String kindName(ModelDB db) => _getPolyClasses(db, this).first; - - datastore.Entity encodeModel(ModelDB db, Model model) { - List stateVector = db.modelDescriptionState(this); - var key = db.toDatastoreKey(model.key); - var properties = {}; - var unIndexedProperties = - stateVector[ModelDescription.STATE_UNINDEXED_PROPERTIES]; - var mirror = mirrors.reflect(model); - - db.propertiesForModel(this).forEach((String fieldName, Property prop) { - _encodeProperty(db, model, mirror, properties, fieldName, prop); - }); - properties[__classField.propertyName] = - __classField.encodeValue(db, _getPolyClasses(db, this)); - - return new datastore.Entity( - key, properties, unIndexedProperties: unIndexedProperties); - } - - Model decodeEntity(ModelDB db, Key key, datastore.Entity entity) { - List stateVector = db.modelDescriptionState(this); - ModelDescription getModelDescriptionByClassList(List classes) { - var polyClassString = _getPolyClassString(classes); - var bottomPolyModelDescription = stateVector - [STATE_POLYCLASSLIST_TO_MODEL_DESCRIPTION_MAP][polyClassString]; - if (bottomPolyModelDescription == null) { - throw new StateError( - 'Could not get ModelDescription for ${classes.join(' ')}'); - } - return bottomPolyModelDescription; - } - - if (entity == null) return null; - - // NOTE: this assumes a default constructor for the model classes! - List classes = __classField.decodePrimitiveValue( - db, entity.properties[__classField.propertyName]); - var bottomPolyModelDescription = getModelDescriptionByClassList(classes); - var classMirror = db.modelClass(bottomPolyModelDescription); - var mirror = classMirror.newInstance(const Symbol(''), []); - - // Set the id and the parent key - mirror.reflectee.id = key.id; - mirror.reflectee.parentKey = key.parent; - - db.propertiesForModel(bottomPolyModelDescription).forEach( - (String fieldName, Property prop) { - if (fieldName != __classField.propertyName) { - bottomPolyModelDescription._decodeProperty( - db, entity, mirror, fieldName, prop); - } - }); - return mirror.reflectee; - } - - Query finishQuery(ModelDB db, Query q) { - q.filter('class IN', [_getPolyClasses(db, this).last]); - return q; - } - - List _getPolyClasses(ModelDB db, - ModelDescription bottomModelDescription) { - List stateVector = db.modelDescriptionState(this); - return stateVector[STATE_POLYCLASSES]; - } - - String _getPolyClassString(List classes) { - // NOTE: We assume here that a classnames do not contain '\n' in it. - return classes.join('\n'); - } - - String fieldNameToPropertyName(ModelDB db, String fieldName) { - var propertyName = super.propertyNameToFieldName(db, fieldName); - if (propertyName == null && fieldName == 'class') return 'class'; - return propertyName; - } -} - // NOTE/TODO: // Currently expanded properties are only // * decoded if there are no clashes in [usedNames] diff --git a/pkgs/gcloud/lib/src/db/models.dart b/pkgs/gcloud/lib/src/db/models.dart index ffadf07e..eaa3d242 100644 --- a/pkgs/gcloud/lib/src/db/models.dart +++ b/pkgs/gcloud/lib/src/db/models.dart @@ -115,14 +115,6 @@ abstract class Model { Key get key => parentKey.append(this.runtimeType, id: id); } -/** - * Superclass for all polymorphic model classes. - * - * The direct subclass of this model must have a [ModelMetadata] annotation - * containing a [PolyModelDescription]. - */ -abstract class PolyModel extends Model { } - /** * Superclass for all expanded model classes. * diff --git a/pkgs/gcloud/test/db/e2e/db_test.dart b/pkgs/gcloud/test/db/e2e/db_test.dart index 715a9cc4..fd926139 100644 --- a/pkgs/gcloud/test/db/e2e/db_test.dart +++ b/pkgs/gcloud/test/db/e2e/db_test.dart @@ -40,30 +40,6 @@ library db_test; /// - name: name /// direction: asc /// -/// - kind: PolyPerson -/// ancestor: no -/// properties: -/// - name: class -/// direction: asc -/// - name: unIndexedName -/// direction: asc -/// -/// - kind: PolyPerson -/// ancestor: no -/// properties: -/// - name: class -/// direction: asc -/// - name: indexedName -/// direction: asc -/// -/// - kind: PolyPerson -/// ancestor: no -/// properties: -/// - name: class -/// direction: asc -/// - name: name -/// direction: asc -/// /// $ gcloud preview datastore create-indexes . /// 02:19 PM Host: appengine.google.com /// 02:19 PM Uploading index definitions. @@ -136,54 +112,6 @@ class UserDesc extends PersonDesc { } -@db.ModelMetadata(const PolyPersonDesc()) -class PolyPerson extends db.PolyModel { - String name; - - // NOTE: There is no need to store these values, we make these two an alias - // for [name]. They are only used for querying. - String get indexedName => name; - String get unIndexedName => name; - set indexedName(String newName) => name = newName; - set unIndexedName(String newName) => name = newName; - - operator==(Object other) => isSame(other); - - isSame(Object other) { - return - other is PolyPerson && - id == other.id && - name == other.name; - } -} - -@db.ModelMetadata(const PolyUserDesc()) -class PolyUser extends PolyPerson { - String nickname; - - isSame(Object other) => - super.isSame(other) && other is PolyUser && nickname == other.nickname; -} - -class PolyPersonDesc extends db.PolyModelDescription { - static String PolyModelName = 'PolyPerson'; - - final id = const db.IntProperty(); - final name = const db.StringProperty(); - final indexedName = const db.StringProperty(indexed: true); - final unIndexedName = const db.StringProperty(indexed: false); - - const PolyPersonDesc() : super(); -} - -class PolyUserDesc extends PolyPersonDesc { - static String PolyModelName = 'PolyUser'; - - final nickname = const db.StringProperty(); - const PolyUserDesc(); -} - - @db.ModelMetadata(const ExpandoPersonDesc()) class ExpandoPerson extends db.ExpandoModel { String name; @@ -317,24 +245,6 @@ runTests(db.DatastoreDB store) { } return testInsertLookupDelete(users); }); - test('poly_insert', () { - var root = store.emptyKey; - var persons = []; - for (var i = 1; i <= 10; i++) { - persons.add(new PolyPerson() - ..id = i - ..parentKey = root - ..name = 'user$i'); - } - for (var i = 11; i <= 20; i++) { - persons.add(new PolyUser() - ..id = i - ..parentKey = root - ..name = 'user$i' - ..nickname = 'nickname${i%3}'); - } - return testInsertLookupDelete(persons); - }); test('expando_insert', () { var root = store.emptyKey; var expandoPersons = []; @@ -366,15 +276,6 @@ runTests(db.DatastoreDB store) { ..age = 2 ..name = 'user2' ..nickname = 'nickname2'); - models.add(new PolyPerson() - ..id = 3 - ..parentKey = root - ..name = 'user3'); - models.add(new PolyUser() - ..id = 4 - ..parentKey = root - ..name = 'user4' - ..nickname = 'nickname4'); var expandoPerson = new ExpandoPerson() ..parentKey = root ..id = 3 @@ -512,26 +413,6 @@ runTests(db.DatastoreDB store) { ..languages = languages); } - var polyPersons = []; - var polyUsers = []; - for (var i = 1; i <= 10; i++) { - polyPersons.add(new PolyPerson() - ..id = i - ..parentKey = root - ..name = 'person$i'); - } - for (var i = 11; i <= 20; i++) { - polyPersons.add(new PolyUser() - ..id = i - ..parentKey = root - ..name = 'user$i' - ..nickname = 'nickname${i%3}'); - polyUsers.add(polyPersons.last); - } - var sortedPolyPersons = [] - ..addAll(polyPersons) - ..sort((a, b) => a.name.compareTo(b.name)); - var expandoPersons = []; for (var i = 1; i <= 3; i++) { var expandoPerson = new ExpandoPerson() @@ -587,7 +468,6 @@ runTests(db.DatastoreDB store) { var allInserts = [] ..addAll(users) - ..addAll(polyPersons) ..addAll(expandoPersons); var allKeys = allInserts.map((db.Model model) => model.key).toList(); return store.commit(inserts: allInserts).then((_) { @@ -668,48 +548,16 @@ runTests(db.DatastoreDB store) { compareModels(barUsers, models, anyOrder: true); }); }, - // PolyModel queries - () { - return store.query(PolyPerson) - ..run().then((List models) { - // We compare here the query result in [models] to - // *all* persons (i.e. [polyPersons] contains all Person and - // User model objects) - compareModels(polyPersons, models, anyOrder: true); - }); - }, - () { - return store.query(PolyUser) - ..run().then((List models) { - // Here we ensure that [models] contains only Users. - compareModels(polyUsers, models, anyOrder: true); - }); - }, - - // PolyModel indexed/unindexed queries - () { - return store.query(PolyPerson) - ..filter('indexedName =', 'person1') - ..run().then((List models) { - compareModels([polyPersons[0]], models, anyOrder: true); - }); - }, - () { - return store.query(PolyPerson) - ..filter('unIndexedName =', 'person1') - ..run().then((List models) { - compareModels([], models, anyOrder: true); - }); - }, // Simple limit/offset test. () { - return store.query(PolyPerson) - ..order('name') + return store.query(User) + ..order('nickname') ..offset(3) - ..limit(10) + ..limit(5) ..run().then((List models) { - var expectedModels = sortedPolyPersons.sublist(3, 13); + var expectedModels = + usersSortedAndFilteredNameDescNicknameAsc.sublist(3, 7); compareModels(expectedModels, models, anyOrder: true); }); }, @@ -768,4 +616,3 @@ runTests(db.DatastoreDB store) { }); }); } - From 74ccd33d90e8e42dad665b41615ab7f64b2b67d7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Tue, 28 Oct 2014 15:54:53 +0100 Subject: [PATCH 014/239] Add first set of end-to-end tests Simple tests which just creates buckets and deletes them again. R=kustermann@google.com, ricow@google.com BUG= Review URL: https://codereview.chromium.org//681293002 --- pkgs/gcloud/README.md | 13 +++ pkgs/gcloud/pubspec.yaml | 1 + pkgs/gcloud/test/storage/e2e_test.dart | 113 +++++++++++++++++++++++++ 3 files changed, 127 insertions(+) create mode 100644 pkgs/gcloud/test/storage/e2e_test.dart diff --git a/pkgs/gcloud/README.md b/pkgs/gcloud/README.md index 140e8d0c..698340fe 100644 --- a/pkgs/gcloud/README.md +++ b/pkgs/gcloud/README.md @@ -1,3 +1,16 @@ ## Google Cloud Platform High level interface for Google Cloud Platform APIs + +### Running tests + +If you want to run the end-to-end tests, a Google Cloud project is required. +When running these tests the following envrionment variables needs to be set: + + GCLOUD_E2E_TEST_PROJECT + GCLOUD_E2E_TEST_KEY + +The vaule of the environment variable `GCLOUD_E2E_TEST_PROJECT` is the name +of the Google Cloud project to use. The value of the environment variable +`GCLOUD_E2E_TEST_KEY` is a Google Cloud Storage path (starting wiht `gs://`) +to a JSON key file for a service account providing access to the Cloud Project. diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index ecbe120c..0ad3555e 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -12,6 +12,7 @@ dev_dependencies: unittest: '>=0.11.0 <0.12.0' mime: '>=0.9.0+3 <0.10.0' http_parser: '>=0.0.2+5 <0.1.0' + googleapis_auth: any transformers: - $dart2js: $include: [] diff --git a/pkgs/gcloud/test/storage/e2e_test.dart b/pkgs/gcloud/test/storage/e2e_test.dart new file mode 100644 index 00000000..0254f75a --- /dev/null +++ b/pkgs/gcloud/test/storage/e2e_test.dart @@ -0,0 +1,113 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +library gcloud.storage; + +import 'dart:async'; +import 'dart:io'; + +import 'package:gcloud/storage.dart'; +import 'package:googleapis/common/common.dart' as common; +import 'package:googleapis_auth/auth_io.dart' as auth; +import 'package:unittest/unittest.dart'; + + +// Enviroment variables for specifying the cloud project to use and the +// location of the service account key for that project. +const String PROJECT_ENV = 'GCLOUD_E2E_TEST_PROJECT'; +const String SERVICE_KEY_LOCATION_ENV = 'GCLOUD_E2E_TEST_KEY'; + +// Default project and service key location used when running on the package +// bot. +const String DEFAULT_PROJECT = 'dart-gcloud-e2e'; +const String DEFAULT_KEY_LOCATION = + 'gs://dart-archive-internal/keys/dart-gcloud-e2e.json'; + +bool onBot() { + // When running on the package-bot the current user is chrome-bot. + var envName; + if (Platform.isWindows) { + envName = 'USERNAME'; + } else { + envName = 'USER'; + } + return Platform.environment[envName] == 'chrome-bot'; +} + +// Get the service key from the specified location. +Future serviceKeyJson(String serviceKeyLocation) { + if (!serviceKeyLocation.startsWith('gs://')) { + throw new Exception('Service key location must start with gs://'); + } + var future; + if (onBot()) { + future = Process.run( + 'python', ['third_party/gsutil/gsutil', 'cat', serviceKeyLocation], + runInShell: true); + } else { + var gsutil = Platform.isWindows ? 'gsutil.cmd' : 'gsutil'; + future = Process.run(gsutil, ['cat', serviceKeyLocation]); + } + return future.then((result) { + if (result.exitCode != 0) { + throw new Exception('Failed to run gsutil, ${result.stderr}'); + } + return result.stdout; + }); +} + +Future connect() { + String project = Platform.environment[PROJECT_ENV]; + String serviceKeyLocation = Platform.environment[SERVICE_KEY_LOCATION_ENV]; + + if (!onBot() && (project == null || serviceKeyLocation == null)) { + throw new StateError( + 'Envoronment variables $PROJECT_ENV and $SERVICE_KEY_LOCATION_ENV ' + 'required when not running on the package bot'); + } + + project = project != null ? project : DEFAULT_PROJECT; + serviceKeyLocation = + serviceKeyLocation != null ? serviceKeyLocation : DEFAULT_KEY_LOCATION; + + return serviceKeyJson(serviceKeyLocation).then((keyJson) { + var creds = new auth.ServiceAccountCredentials.fromJson(keyJson); + return auth.clientViaServiceAccount(creds, Storage.Scopes) + .then((client) => new Storage(client, project)); + }); +} + +String generateBucketName() { + var id = new DateTime.now().millisecondsSinceEpoch; + return 'dart-e2e-test-$id'; +} + +bool testDetailedApiError(e) => e is common.DetailedApiRequestError; + +runTests(Storage storage) { + group('bucket', () { + + test('create-delete', () { + var bucketName = generateBucketName(); + + storage.createBucket(bucketName).then(expectAsync((result) { + expect(result, isNull); + expect(storage.deleteBucket(bucketName), completion(isNull)); + })); + }); + + test('create-error', () { + var bucketName = generateBucketName(); + + storage.createBucket('goog-reserved').catchError(expectAsync((e) { + expect(e, isNotNull); + }), test: testDetailedApiError); + }); + }); +} + +main() { + // Share the same storage connection for all tests. + connect().then(runTests); +} From 6afc6cd3f7cb3c9a236ac0a566c03f25f1dff30f Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Wed, 29 Oct 2014 11:06:10 +0100 Subject: [PATCH 015/239] Decouple annotations from encoding/decoding (which is handled by ModelDB) This makes annotations declarative only and the ModelDB instance is responsible for processing annotations and converting between model instances and datastore entities. The CL contains also a fix in the db e2e test and more strict assertions by asserting the correct order is returned. R=sgjesse@google.com Review URL: https://codereview.chromium.org//682963003 --- pkgs/gcloud/lib/db.dart | 1 + pkgs/gcloud/lib/src/db/db.dart | 27 +- pkgs/gcloud/lib/src/db/model_db.dart | 303 +--------- pkgs/gcloud/lib/src/db/model_db_impl.dart | 526 ++++++++++++++++++ pkgs/gcloud/lib/src/db/model_description.dart | 246 +------- pkgs/gcloud/test/db/e2e/db_test.dart | 28 +- pkgs/gcloud/test/db/model_db_test.dart | 14 +- pkgs/gcloud/test/db/properties_test.dart | 2 + 8 files changed, 589 insertions(+), 558 deletions(-) create mode 100644 pkgs/gcloud/lib/src/db/model_db_impl.dart diff --git a/pkgs/gcloud/lib/db.dart b/pkgs/gcloud/lib/db.dart index 9ef333f2..478a8a7f 100644 --- a/pkgs/gcloud/lib/db.dart +++ b/pkgs/gcloud/lib/db.dart @@ -13,5 +13,6 @@ part 'src/db/annotations.dart'; part 'src/db/db.dart'; part 'src/db/models.dart'; part 'src/db/model_db.dart'; +part 'src/db/model_db_impl.dart'; part 'src/db/model_description.dart'; part 'src/db/properties.dart'; diff --git a/pkgs/gcloud/lib/src/db/db.dart b/pkgs/gcloud/lib/src/db/db.dart index 7a73addf..13c4a90b 100644 --- a/pkgs/gcloud/lib/src/db/db.dart +++ b/pkgs/gcloud/lib/src/db/db.dart @@ -47,13 +47,11 @@ class Transaction { */ Query query(Type kind, Key ancestorKey, {Partition partition}) { _checkSealed(); - var modelDescription = db.modelDB.modelDescriptionForType(kind); - var query = new Query(db, - modelDescription, + return new Query(db, + kind, partition: partition, ancestorKey: ancestorKey, datastoreTransaction: _datastoreTransaction); - return modelDescription.finishQuery(db.modelDB, query); } /** @@ -102,7 +100,6 @@ class Query { final DatastoreDB _db; final datastore.Transaction _transaction; final String _kind; - final ModelDescription _modelDescription; final Partition _partition; final Key _ancestorKey; @@ -112,11 +109,12 @@ class Query { int _offset; int _limit; - Query(DatastoreDB dbImpl, ModelDescription modelDescription, + Query(DatastoreDB dbImpl, Type kind, {Partition partition, Key ancestorKey, datastore.Transaction datastoreTransaction}) - : _db = dbImpl, _kind = modelDescription.kindName(dbImpl.modelDB), - _modelDescription = modelDescription, _partition = partition, + : _db = dbImpl, + _kind = dbImpl.modelDB.kindName(kind), + _partition = partition, _ancestorKey = ancestorKey, _transaction = datastoreTransaction; /** @@ -209,10 +207,10 @@ class Query { String _convertToDatastoreName(String name) { var propertyName = - _modelDescription.fieldNameToPropertyName(_db.modelDB, name); + _db.modelDB.fieldNameToPropertyName(_kind, name); if (propertyName == null) { throw new ArgumentError( - "Field $name is not available for kind $_modelDescription"); + "Field $name is not available for kind $_kind"); } return propertyName; } @@ -224,7 +222,7 @@ class DatastoreDB { Partition _defaultPartition; DatastoreDB(this.datastore, {ModelDB modelDB}) - : _modelDB = modelDB != null ? modelDB : new ModelDB() { + : _modelDB = modelDB != null ? modelDB : new ModelDBImpl() { _defaultPartition = new Partition(null); } @@ -275,13 +273,10 @@ class DatastoreDB { * Build a query for [kind] models. */ Query query(Type kind, {Partition partition, Key ancestorKey}) { - var modelDescription = modelDB.modelDescriptionForType(kind); - - var q = new Query(this, - modelDescription, + return new Query(this, + kind, partition: partition, ancestorKey: ancestorKey); - return modelDescription.finishQuery(modelDB, q); } /** diff --git a/pkgs/gcloud/lib/src/db/model_db.dart b/pkgs/gcloud/lib/src/db/model_db.dart index 6b8d51d4..0655ac3a 100644 --- a/pkgs/gcloud/lib/src/db/model_db.dart +++ b/pkgs/gcloud/lib/src/db/model_db.dart @@ -4,311 +4,40 @@ part of gcloud.db; - -// TODO: We might move some of the complexity of this class to -// [ModelDescription]! - /** - * Represents an in-memory database of all model classes and it's corresponding - * [ModelDescriptions]s. + * A database of all registered models. * - * NOTE: This is an internal class and should not be used by normal users. + * Responsible for converting between dart model objects and datastore entities. */ -class ModelDB { - // Map of properties for a given [ModelDescription] - final Map> _modelProperties = {}; - - // Arbitrary state a model description might want to have - final Map _modelDescriptionStates = {}; - - // Needed when getting data from datastore to instantiate model objects. - final Map _modelDescriptionByKind = {}; - final Map _modelClasses = {}; - final Map _typeByModelDescription = {}; - - // Needed when application gives us model objects. - final Map _modelDescriptionByType = {}; - - - /** - * Initializes a new [ModelDB] from all libraries. - * - * This will scan all libraries for [Model] classes and their - * [ModelDescription] annotations. It will also scan all [Property] instances - * on all [ModelDescription] objects. - * - * Once all libraries have been scanned it will call each [ModelDescription]s - * 'initialize' method and stores the returned state object (this can be - * queried later with [modelDescriptionState]. - * - * Afterwards every [ModelDescription] will be asked whether it wants to - * register a kind name and if so, that kind name will be associated with it. - * - * In case an error is encountered (e.g. two [ModelDescription] classes with - * the same kind name) a [StateError] will be thrown. - */ - ModelDB() { - // WARNING: This is O(n) of the source code, which is very bad! - // Would be nice to have: `currentMirrorSystem().subclassesOf(Model)` - _initialize(mirrors.currentMirrorSystem().libraries.values); - } - - /** - * Initializes a new [ModelDB] only using the library [librarySymbol]. - * - * See also the default [ModelDB] constructor. - */ - ModelDB.fromLibrary(Symbol librarySymbol) { - _initialize([mirrors.currentMirrorSystem().findLibrary(librarySymbol)]); - } - - +abstract class ModelDB { /** * Converts a [datastore.Key] to a [Key]. */ - Key fromDatastoreKey(datastore.Key datastoreKey) { - var namespace = new Partition(datastoreKey.partition.namespace); - Key key = namespace.emptyKey; - for (var element in datastoreKey.elements) { - var type = _typeByModelDescription[_modelDescriptionByKind[element.kind]]; - assert (type != null); - key = key.append(type, id: element.id); - } - return key; - } + Key fromDatastoreKey(datastore.Key datastoreKey); /** * Converts a [Key] to a [datastore.Key]. */ - datastore.Key toDatastoreKey(Key dbKey) { - List elements = []; - var currentKey = dbKey; - while (!currentKey.isEmpty) { - var id = currentKey.id; - - var modelDescription = modelDescriptionForType(currentKey.type); - var idProperty = - propertiesForModel(modelDescription)[ModelDescription.ID_FIELDNAME]; - var kind = modelDescription.kindName(this); - - if (idProperty is IntProperty && (id != null && id is! int)) { - throw new ArgumentError('Expected an integer id property but ' - 'id was of type ${id.runtimeType}'); - } - if (idProperty is StringProperty && (id != null && id is! String)) { - throw new ArgumentError('Expected a string id property but ' - 'id was of type ${id.runtimeType}'); - } - - elements.add(new datastore.KeyElement(kind, id)); - currentKey = currentKey.parent; - } - Partition partition = currentKey._parent; - return new datastore.Key( - elements.reversed.toList(), - partition: new datastore.Partition(partition.namespace)); - } + datastore.Key toDatastoreKey(Key dbKey); /** * Converts a [Model] instance to a [datastore.Entity]. */ - datastore.Entity toDatastoreEntity(Model model) { - try { - var modelDescription = modelDescriptionForType(model.runtimeType); - return modelDescription.encodeModel(this, model); - } catch (error, stack) { - throw - new ArgumentError('Error while encoding entity ($error, $stack).'); - } - } + datastore.Entity toDatastoreEntity(Model model) ; /** * Converts a [datastore.Entity] to a [Model] instance. */ - Model fromDatastoreEntity(datastore.Entity entity) { - if (entity == null) return null; - - Key key = fromDatastoreKey(entity.key); - var kind = entity.key.elements.last.kind; - var modelDescription = _modelDescriptionByKind[kind]; - if (modelDescription == null) { - throw new StateError('Trying to deserialize entity of kind ' - '$kind, but no Model class available for it.'); - } - - try { - return modelDescription.decodeEntity(this, key, entity); - } catch (error, stack) { - throw new StateError('Error while decoding entity ($error, $stack).'); - } - } - - - Iterable get modelDescriptions { - return _modelDescriptionByType.values; - } - - Map propertiesForModel( - ModelDescription modelDescription) { - return _modelProperties[modelDescription]; - } + Model fromDatastoreEntity(datastore.Entity entity); - ModelDescription modelDescriptionForType(Type type) { - return _modelDescriptionByType[type]; - } - - mirrors.ClassMirror modelClass(ModelDescription md) { - return _modelClasses[md]; - } - - modelDescriptionState(ModelDescription modelDescription) { - return _modelDescriptionStates[modelDescription]; - } - - - void _initialize(Iterable libraries) { - libraries.forEach((mirrors.LibraryMirror lm) { - lm.declarations.values - .where((d) => d is mirrors.ClassMirror && d.hasReflectedType) - .forEach((mirrors.ClassMirror declaration) { - var modelDescription = _descriptionFromModelClass(declaration); - if (modelDescription != null) { - _newModelDescription(declaration, modelDescription); - } - }); - }); - - // Ask every [ModelDescription] to compute whatever global state it wants - // to have. - for (var modelDescription in modelDescriptions) { - _modelDescriptionStates[modelDescription] = - modelDescription.initialize(this); - } - - - // Ask every [ModelDescription] whether we should register it with a given - // kind name. - for (var modelDescription in modelDescriptions) { - if (modelDescription.registerKind(this)) { - var kindName = modelDescription.kindName(this); - if (_modelDescriptionByKind.containsKey(kindName)) { - throw new StateError( - 'Cannot have two ModelDescriptions ' - 'with the same kind ($kindName)'); - } - _modelDescriptionByKind[kindName] = modelDescription; - } - } - } - - void _newModelDescription(mirrors.ClassMirror modelClass, - ModelDescription modelDesc) { - assert (!_modelDescriptionByType.containsKey(modelClass.reflectedType)); - - // Map the [modelClass.runtimeType] to this [modelDesc] and vice versa. - _modelDescriptionByType[modelClass.reflectedType] = modelDesc; - _typeByModelDescription[modelDesc] = modelClass.reflectedType; - // Map this [modelDesc] to the [modelClass] mirror for easy instantiation. - _modelClasses[modelDesc] = modelClass; - - // TODO: Move this out to the model description classes. - - // Get all properties, validate that the 'id' property is valid. - var properties = _propertiesFromModelDescription(modelDesc); - var idProperty = properties[ModelDescription.ID_FIELDNAME]; - if (idProperty == null || - (idProperty is! IntProperty && idProperty is! StringProperty)) { - throw new StateError( - 'You need to have an id property and it has to be either an ' - '[IntProperty] or a [StringProperty].'); - } - if (idProperty.propertyName != null) { - throw new StateError( - 'You can not have a new name for the id property.'); - } - _modelProperties[modelDesc] = properties; - - // Ensure we have an empty constructor. - bool defaultConstructorFound = false; - for (var declaration in modelClass.declarations.values) { - if (declaration is mirrors.MethodMirror) { - if (declaration.isConstructor && - declaration.constructorName == const Symbol('') && - declaration.parameters.length == 0) { - defaultConstructorFound = true; - break; - } - } - } - if (!defaultConstructorFound) { - throw new StateError( - 'Class ${modelClass.simpleName} does not have a default ' - 'constructor.'); - } - } - - // TODO: Move this out to the model description classes. - Map _propertiesFromModelDescription( - ModelDescription modelDescription) { - var modelMirror = mirrors.reflect(modelDescription); - var modelClassMirror = mirrors.reflectClass(modelDescription.runtimeType); - - var properties = new Map(); - var propertyNames = new Set(); - - // Loop over all classes in the inheritence path up to the Object class. - while (modelClassMirror.superclass != null) { - var memberMap = modelClassMirror.instanceMembers; - // Loop over all declarations (which includes fields) - modelClassMirror.declarations.forEach((Symbol s, _) { - // Look if we do have a method for [s] - if (memberMap.containsKey(s) && memberMap[s].isGetter) { - // Get a String representation of the field and the value. - var fieldName = mirrors.MirrorSystem.getName(s); - var fieldValue = modelMirror.getField(s).reflectee; - // If the field value is a Property instance we add it to the list - // of properties. - // Fields with '__' are reserved and will not be used. - if (!fieldName.startsWith('__') && - fieldValue != null && - fieldValue is Property) { - var propertyName = fieldValue.propertyName; - if (propertyName == null) propertyName = fieldName; - - if (properties.containsKey(fieldName)) { - throw new StateError( - 'Cannot have two Property objects describing the same Model ' - 'property name in a ModelDescription class hierarchy.'); - } - - if (propertyNames.contains(propertyName)) { - throw new StateError( - 'Cannot have two Property objects mapping to the same ' - 'datastore property name ($propertyName).'); - } - properties[fieldName] = fieldValue; - propertyNames.add(propertyName); - } - } - }); - modelClassMirror = modelClassMirror.superclass; - } - - return properties; - } + /** + * Returns the kind name for instances of [type]. + */ + String kindName(Type type); - ModelDescription _descriptionFromModelClass(mirrors.ClassMirror classMirror) { - var result; - for (mirrors.InstanceMirror instance in classMirror.metadata) { - if (instance.reflectee.runtimeType == ModelMetadata) { - if (result != null) { - throw new StateError( - 'Cannot have more than one ModelMetadata() annotation ' - 'on a Model class'); - } - result = instance.getField(#description).reflectee; - } - } - return result; - } + /** + * Returns the property name used for [fieldName] + */ + // TODO: Get rid of this eventually. + String fieldNameToPropertyName(String kind, String fieldName); } diff --git a/pkgs/gcloud/lib/src/db/model_db_impl.dart b/pkgs/gcloud/lib/src/db/model_db_impl.dart new file mode 100644 index 00000000..6b28aaf8 --- /dev/null +++ b/pkgs/gcloud/lib/src/db/model_db_impl.dart @@ -0,0 +1,526 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +part of gcloud.db; + + +class ModelDBImpl implements ModelDB { + // Map of properties for a given [ModelDescription] + final Map> _modelProperties = {}; + + // Arbitrary state a model description might want to have + final Map _modelDescriptionStates = {}; + + // Needed when getting data from datastore to instantiate model objects. + final Map _modelDescriptionByKind = {}; + final Map _modelClasses = {}; + final Map _typeByModelDescription = {}; + + // Needed when application gives us model objects. + final Map _modelDescriptionByType = {}; + + + /** + * Initializes a new [ModelDB] from all libraries. + * + * This will scan all libraries for [Model] classes and their + * [ModelDescription] annotations. It will also scan all [Property] instances + * on all [ModelDescription] objects. + * + * Once all libraries have been scanned it will call each [ModelDescription]s + * 'initialize' method and stores the returned state object (this can be + * queried later with [modelDescriptionState]. + * + * Afterwards every [ModelDescription] will be asked whether it wants to + * register a kind name and if so, that kind name will be associated with it. + * + * In case an error is encountered (e.g. two [ModelDescription] classes with + * the same kind name) a [StateError] will be thrown. + */ + ModelDBImpl() { + // WARNING: This is O(n) of the source code, which is very bad! + // Would be nice to have: `currentMirrorSystem().subclassesOf(Model)` + _initialize(mirrors.currentMirrorSystem().libraries.values); + } + + /** + * Initializes a new [ModelDB] only using the library [librarySymbol]. + * + * See also the default [ModelDB] constructor. + */ + ModelDBImpl.fromLibrary(Symbol librarySymbol) { + _initialize([mirrors.currentMirrorSystem().findLibrary(librarySymbol)]); + } + + + /** + * Converts a [datastore.Key] to a [Key]. + */ + Key fromDatastoreKey(datastore.Key datastoreKey) { + var namespace = new Partition(datastoreKey.partition.namespace); + Key key = namespace.emptyKey; + for (var element in datastoreKey.elements) { + var type = _typeByModelDescription[_modelDescriptionByKind[element.kind]]; + assert (type != null); + key = key.append(type, id: element.id); + } + return key; + } + + /** + * Converts a [Key] to a [datastore.Key]. + */ + datastore.Key toDatastoreKey(Key dbKey) { + List elements = []; + var currentKey = dbKey; + while (!currentKey.isEmpty) { + var id = currentKey.id; + + var modelDescription = modelDescriptionForType(currentKey.type); + var idProperty = _modelProperties[modelDescription]['id']; + var kind = modelDescription.kindName(this); + + if (idProperty is IntProperty && (id != null && id is! int)) { + throw new ArgumentError('Expected an integer id property but ' + 'id was of type ${id.runtimeType}'); + } + if (idProperty is StringProperty && (id != null && id is! String)) { + throw new ArgumentError('Expected a string id property but ' + 'id was of type ${id.runtimeType}'); + } + + elements.add(new datastore.KeyElement(kind, id)); + currentKey = currentKey.parent; + } + Partition partition = currentKey._parent; + return new datastore.Key( + elements.reversed.toList(), + partition: new datastore.Partition(partition.namespace)); + } + + /** + * Converts a [Model] instance to a [datastore.Entity]. + */ + datastore.Entity toDatastoreEntity(Model model) { + try { + var modelDescription = modelDescriptionForType(model.runtimeType); + return modelDescription.encodeModel(this, model); + } catch (error, stack) { + throw + new ArgumentError('Error while encoding entity ($error, $stack).'); + } + } + + /** + * Converts a [datastore.Entity] to a [Model] instance. + */ + Model fromDatastoreEntity(datastore.Entity entity) { + if (entity == null) return null; + + Key key = fromDatastoreKey(entity.key); + var kind = entity.key.elements.last.kind; + var modelDescription = _modelDescriptionByKind[kind]; + if (modelDescription == null) { + throw new StateError('Trying to deserialize entity of kind ' + '$kind, but no Model class available for it.'); + } + + try { + return modelDescription.decodeEntity(this, key, entity); + } catch (error, stack) { + throw new StateError('Error while decoding entity ($error, $stack).'); + } + } + + String kindName(Type type) { + return _modelDescriptionByType[type]._kind; + } + + String fieldNameToPropertyName(String kind, String fieldName) { + return _modelDescriptionByKind[kind].fieldNameToPropertyName(fieldName); + } + + Iterable get modelDescriptions { + return _modelDescriptionByType.values; + } + + Map propertiesForModel( + ModelDescriptionImpl modelDescription) { + return _modelProperties[modelDescription]; + } + + ModelDescriptionImpl modelDescriptionForType(Type type) { + return _modelDescriptionByType[type]; + } + + mirrors.ClassMirror modelClass(ModelDescriptionImpl md) { + return _modelClasses[md]; + } + + modelDescriptionState(ModelDescriptionImpl modelDescription) { + return _modelDescriptionStates[modelDescription]; + } + + + void _initialize(Iterable libraries) { + libraries.forEach((mirrors.LibraryMirror lm) { + lm.declarations.values + .where((d) => d is mirrors.ClassMirror && d.hasReflectedType) + .forEach((mirrors.ClassMirror declaration) { + var modelDescription = _descriptionFromModelClass(declaration); + if (modelDescription != null) { + _newModelDescription(declaration, modelDescription); + } + }); + }); + + // Ask every [ModelDescription] to compute whatever global state it wants + // to have. + for (var modelDescription in modelDescriptions) { + _modelDescriptionStates[modelDescription] = + modelDescription.initialize(this); + } + + + // Ask every [ModelDescription] whether we should register it with a given + // kind name. + for (var modelDescription in modelDescriptions) { + if (modelDescription.registerKind(this)) { + var kindName = modelDescription.kindName(this); + if (_modelDescriptionByKind.containsKey(kindName)) { + throw new StateError( + 'Cannot have two ModelDescriptions ' + 'with the same kind ($kindName)'); + } + _modelDescriptionByKind[kindName] = modelDescription; + } + } + } + + void _newModelDescription(mirrors.ClassMirror modelClass, + ModelDescription modelDesc) { + assert (!_modelDescriptionByType.containsKey(modelClass.reflectedType)); + + var modelDescImpl; + if (modelDesc is ExpandoModelDescription) { + modelDescImpl = new ExpandoModelDescriptionImpl(modelDesc.kind); + } else { + modelDescImpl = new ModelDescriptionImpl(modelDesc.kind); + } + + // Map the [modelClass.runtimeType] to this [modelDesc] and vice versa. + _modelDescriptionByType[modelClass.reflectedType] = modelDescImpl; + _typeByModelDescription[modelDescImpl] = modelClass.reflectedType; + // Map this [modelDesc] to the [modelClass] mirror for easy instantiation. + _modelClasses[modelDescImpl] = modelClass; + + // TODO: Move this out to the model description classes. + + // Get all properties, validate that the 'id' property is valid. + var properties = _propertiesFromModelDescription(modelDesc); + var idProperty = properties[ModelDescriptionImpl.ID_FIELDNAME]; + if (idProperty == null || + (idProperty is! IntProperty && idProperty is! StringProperty)) { + throw new StateError( + 'You need to have an id property and it has to be either an ' + '[IntProperty] or a [StringProperty].'); + } + if (idProperty.propertyName != null) { + throw new StateError( + 'You can not have a new name for the id property.'); + } + _modelProperties[modelDescImpl] = properties; + + // Ensure we have an empty constructor. + bool defaultConstructorFound = false; + for (var declaration in modelClass.declarations.values) { + if (declaration is mirrors.MethodMirror) { + if (declaration.isConstructor && + declaration.constructorName == const Symbol('') && + declaration.parameters.length == 0) { + defaultConstructorFound = true; + break; + } + } + } + if (!defaultConstructorFound) { + throw new StateError( + 'Class ${modelClass.simpleName} does not have a default ' + 'constructor.'); + } + } + + Map _propertiesFromModelDescription( + ModelDescription modelDescription) { + var modelMirror = mirrors.reflect(modelDescription); + var modelClassMirror = mirrors.reflectClass(modelDescription.runtimeType); + + var properties = new Map(); + var propertyNames = new Set(); + + // Loop over all classes in the inheritence path up to the Object class. + while (modelClassMirror.superclass != null) { + var memberMap = modelClassMirror.instanceMembers; + // Loop over all declarations (which includes fields) + modelClassMirror.declarations.forEach((Symbol s, _) { + // Look if we do have a method for [s] + if (memberMap.containsKey(s) && memberMap[s].isGetter) { + // Get a String representation of the field and the value. + var fieldName = mirrors.MirrorSystem.getName(s); + var fieldValue = modelMirror.getField(s).reflectee; + // If the field value is a Property instance we add it to the list + // of properties. + // Fields with '__' are reserved and will not be used. + if (!fieldName.startsWith('__') && + fieldValue != null && + fieldValue is Property) { + var propertyName = fieldValue.propertyName; + if (propertyName == null) propertyName = fieldName; + + if (properties.containsKey(fieldName)) { + throw new StateError( + 'Cannot have two Property objects describing the same Model ' + 'property name in a ModelDescription class hierarchy.'); + } + + if (propertyNames.contains(propertyName)) { + throw new StateError( + 'Cannot have two Property objects mapping to the same ' + 'datastore property name ($propertyName).'); + } + properties[fieldName] = fieldValue; + propertyNames.add(propertyName); + } + } + }); + modelClassMirror = modelClassMirror.superclass; + } + + return properties; + } + + ModelDescription _descriptionFromModelClass(mirrors.ClassMirror classMirror) { + var result; + for (mirrors.InstanceMirror instance in classMirror.metadata) { + if (instance.reflectee.runtimeType == ModelMetadata) { + if (result != null) { + throw new StateError( + 'Cannot have more than one ModelMetadata() annotation ' + 'on a Model class'); + } + result = instance.getField(#description).reflectee; + } + } + return result; + } +} + +class ModelDescriptionImpl { + static String ID_FIELDNAME = 'id'; + + HashMap property2FieldName; + HashMap field2PropertyName; + Set indexedProperties; + Set unIndexedProperties; + + final String _kind; + + ModelDescriptionImpl(this._kind); + + initialize(ModelDBImpl db) { + // Compute propertyName -> fieldName mapping. + property2FieldName = new HashMap(); + field2PropertyName = new HashMap(); + + db.propertiesForModel(this).forEach((String fieldName, Property prop) { + // The default of a datastore property name is the fieldName. + // It can be overridden with [Property.propertyName]. + String propertyName = prop.propertyName; + if (propertyName == null) propertyName = fieldName; + + if (fieldName != ID_FIELDNAME) { + property2FieldName[propertyName] = fieldName; + field2PropertyName[fieldName] = propertyName; + } + }); + + // Compute properties & unindexed properties + indexedProperties = new Set(); + unIndexedProperties = new Set(); + + db.propertiesForModel(this).forEach((String fieldName, Property prop) { + if (fieldName != ID_FIELDNAME) { + String propertyName = prop.propertyName; + if (propertyName == null) propertyName = fieldName; + + if (prop.indexed) { + indexedProperties.add(propertyName); + } else { + unIndexedProperties.add(propertyName); + } + } + }); + } + + bool registerKind(ModelDBImpl db) => true; + + String kindName(ModelDBImpl db) => _kind; + + datastore.Entity encodeModel(ModelDBImpl db, Model model) { + var key = db.toDatastoreKey(model.key); + + var properties = {}; + var mirror = mirrors.reflect(model); + + db.propertiesForModel(this).forEach((String fieldName, Property prop) { + _encodeProperty(db, model, mirror, properties, fieldName, prop); + }); + + return new datastore.Entity( + key, properties, unIndexedProperties: unIndexedProperties); + } + + _encodeProperty(ModelDBImpl db, Model model, mirrors.InstanceMirror mirror, + Map properties, String fieldName, Property prop) { + String propertyName = prop.propertyName; + if (propertyName == null) propertyName = fieldName; + + if (fieldName != ID_FIELDNAME) { + var value = mirror.getField( + mirrors.MirrorSystem.getSymbol(fieldName)).reflectee; + if (!prop.validate(db, value)) { + throw new StateError('Property validation failed for ' + 'property $fieldName while trying to serialize entity of kind ' + '${model.runtimeType}. '); + } + properties[propertyName] = prop.encodeValue(db, value); + } + } + + Model decodeEntity(ModelDBImpl db, Key key, datastore.Entity entity) { + if (entity == null) return null; + + // NOTE: this assumes a default constructor for the model classes! + var classMirror = db.modelClass(this); + var mirror = classMirror.newInstance(const Symbol(''), []); + + // Set the id and the parent key + mirror.reflectee.id = key.id; + mirror.reflectee.parentKey = key.parent; + + db.propertiesForModel(this).forEach((String fieldName, Property prop) { + _decodeProperty(db, entity, mirror, fieldName, prop); + }); + return mirror.reflectee; + } + + _decodeProperty(ModelDBImpl db, datastore.Entity entity, + mirrors.InstanceMirror mirror, String fieldName, + Property prop) { + String propertyName = fieldNameToPropertyName(fieldName); + + if (fieldName != ID_FIELDNAME) { + var rawValue = entity.properties[propertyName]; + var value = prop.decodePrimitiveValue(db, rawValue); + + if (!prop.validate(db, value)) { + throw new StateError('Property validation failed while ' + 'trying to deserialize entity of kind ' + '${entity.key.elements.last.kind} (property name: $prop)'); + } + + mirror.setField(mirrors.MirrorSystem.getSymbol(fieldName), value); + } + } + + String fieldNameToPropertyName(String fieldName) { + return field2PropertyName[fieldName]; + } + + String propertyNameToFieldName(ModelDBImpl db, String propertySearchName) { + return property2FieldName[propertySearchName]; + } + + Object encodeField(ModelDBImpl db, String fieldName, Object value) { + Property property = db.propertiesForModel(this)[fieldName]; + if (property != null) return property.encodeValue(db, value); + return null; + } +} + +// NOTE/TODO: +// Currently expanded properties are only +// * decoded if there are no clashes in [usedNames] +// * encoded if there are no clashes in [usedNames] +// We might want to throw an error if there are clashes, because otherwise +// - we may end up removing properties after a read-write cycle +// - we may end up dropping added properties in a write +// ([usedNames] := [realFieldNames] + [realPropertyNames]) +class ExpandoModelDescriptionImpl extends ModelDescriptionImpl { + Set realFieldNames; + Set realPropertyNames; + Set usedNames; + + ExpandoModelDescriptionImpl(String kind) : super(kind); + + initialize(ModelDBImpl db) { + super.initialize(db); + + realFieldNames = new Set.from(field2PropertyName.keys); + realPropertyNames = new Set.from(property2FieldName.keys); + usedNames = new Set()..addAll(realFieldNames)..addAll(realPropertyNames); + } + + datastore.Entity encodeModel(ModelDBImpl db, ExpandoModel model) { + var entity = super.encodeModel(db, model); + var properties = entity.properties; + model.additionalProperties.forEach((String key, Object value) { + // NOTE: All expanded properties will be indexed. + if (!usedNames.contains(key)) { + properties[key] = value; + } + }); + return entity; + } + + Model decodeEntity(ModelDBImpl db, Key key, datastore.Entity entity) { + if (entity == null) return null; + + ExpandoModel model = super.decodeEntity(db, key, entity); + var properties = entity.properties; + properties.forEach((String key, Object value) { + if (!usedNames.contains(key)) { + model.additionalProperties[key] = value; + } + }); + return model; + } + + String fieldNameToPropertyName(String fieldName) { + String propertyName = super.fieldNameToPropertyName(fieldName); + // If the ModelDescription doesn't know about [fieldName], it's an + // expanded property, where propertyName == fieldName. + if (propertyName == null) propertyName = fieldName; + return propertyName; + } + + String propertyNameToFieldName(ModelDBImpl db, String propertyName) { + String fieldName = super.propertyNameToFieldName(db, propertyName); + // If the ModelDescription doesn't know about [propertyName], it's an + // expanded property, where propertyName == fieldName. + if (fieldName == null) fieldName = propertyName; + return fieldName; + } + + Object encodeField(ModelDBImpl db, String fieldName, Object value) { + Object primitiveValue = super.encodeField(db, fieldName, value); + // If superclass can't encode field, we return value here (and assume + // it's primitive) + // NOTE: Implicit assumption: + // If value != null then superclass will return != null. + // TODO: Ensure [value] is primitive in this case. + if (primitiveValue == null) primitiveValue = value; + return primitiveValue; + } +} diff --git a/pkgs/gcloud/lib/src/db/model_description.dart b/pkgs/gcloud/lib/src/db/model_description.dart index 6ae8fdad..1cd03b4a 100644 --- a/pkgs/gcloud/lib/src/db/model_description.dart +++ b/pkgs/gcloud/lib/src/db/model_description.dart @@ -8,246 +8,16 @@ part of gcloud.db; /// to a Datastore Entity. /// /// Please see [ModelMetadata] for an example on how to use them. -abstract class ModelDescription { - static String ID_FIELDNAME = 'id'; +class ModelDescription { + final String kind; - // NOTE: These integer constants are array indices into the state vector. - // Subclasses may need to take this into account. - static const int STATE_PROPERTYNAME_TO_FIELDNAME_MAP = 0; - static const int STATE_FIELDNAME_TO_PROPERTYNAME_MAP = 1; - static const int STATE_INDEXED_PROPERTIES = 2; - static const int STATE_UNINDEXED_PROPERTIES = 3; - static const int STATE_LAST = STATE_UNINDEXED_PROPERTIES; - - final String _kind; - const ModelDescription(this._kind); - - initialize(ModelDB db) { - // Compute propertyName -> fieldName mapping. - var property2FieldName = new HashMap(); - var field2PropertyName = new HashMap(); - - db.propertiesForModel(this).forEach((String fieldName, Property prop) { - // The default of a datastore property name is the fieldName. - // It can be overridden with [Property.propertyName]. - String propertyName = prop.propertyName; - if (propertyName == null) propertyName = fieldName; - - if (fieldName != ModelDescription.ID_FIELDNAME) { - property2FieldName[propertyName] = fieldName; - field2PropertyName[fieldName] = propertyName; - } - }); - - // Compute properties & unindexed properties - var indexedProperties = new Set(); - var unIndexedProperties = new Set(); - - db.propertiesForModel(this).forEach((String fieldName, Property prop) { - if (fieldName != ModelDescription.ID_FIELDNAME) { - String propertyName = prop.propertyName; - if (propertyName == null) propertyName = fieldName; - - if (prop.indexed) { - indexedProperties.add(propertyName); - } else { - unIndexedProperties.add(propertyName); - } - } - }); - - // NOTE: This state vector is indexed by the STATE_* integer constants! - return new List.from([ - property2FieldName, - field2PropertyName, - indexedProperties, - unIndexedProperties, - ], growable: false); - } - - bool registerKind(ModelDB db) => true; - - String kindName(ModelDB db) => _kind; - - datastore.Entity encodeModel(ModelDB db, Model model) { - List stateVector = db.modelDescriptionState(this); - var key = db.toDatastoreKey(model.key); - - var properties = {}; - var unIndexedProperties = stateVector[STATE_UNINDEXED_PROPERTIES]; - var mirror = mirrors.reflect(model); - - db.propertiesForModel(this).forEach((String fieldName, Property prop) { - _encodeProperty(db, model, mirror, properties, fieldName, prop); - }); - - return new datastore.Entity( - key, properties, unIndexedProperties: unIndexedProperties); - } - - _encodeProperty(ModelDB db, Model model, mirrors.InstanceMirror mirror, - Map properties, String fieldName, Property prop) { - String propertyName = prop.propertyName; - if (propertyName == null) propertyName = fieldName; - - if (fieldName != ModelDescription.ID_FIELDNAME) { - var value = mirror.getField( - mirrors.MirrorSystem.getSymbol(fieldName)).reflectee; - if (!prop.validate(db, value)) { - throw new StateError('Property validation failed for ' - 'property $fieldName while trying to serialize entity of kind ' - '${model.runtimeType}. '); - } - properties[propertyName] = prop.encodeValue(db, value); - } - } - - Model decodeEntity(ModelDB db, Key key, datastore.Entity entity) { - if (entity == null) return null; - - // NOTE: this assumes a default constructor for the model classes! - var classMirror = db.modelClass(this); - var mirror = classMirror.newInstance(const Symbol(''), []); - - // Set the id and the parent key - mirror.reflectee.id = key.id; - mirror.reflectee.parentKey = key.parent; - - db.propertiesForModel(this).forEach((String fieldName, Property prop) { - _decodeProperty(db, entity, mirror, fieldName, prop); - }); - return mirror.reflectee; - } - - _decodeProperty(ModelDB db, datastore.Entity entity, - mirrors.InstanceMirror mirror, String fieldName, - Property prop) { - String propertyName = fieldNameToPropertyName(db, fieldName); - - if (fieldName != ModelDescription.ID_FIELDNAME) { - var rawValue = entity.properties[propertyName]; - var value = prop.decodePrimitiveValue(db, rawValue); - - if (!prop.validate(db, value)) { - throw new StateError('Property validation failed while ' - 'trying to deserialize entity of kind ' - '${entity.key.elements.last.kind} (property name: $prop)'); - } - - mirror.setField(mirrors.MirrorSystem.getSymbol(fieldName), value); - } - } - - Query finishQuery(ModelDB db, Query q) => q; - - String fieldNameToPropertyName(ModelDB db, String fieldName) { - List stateVector = db.modelDescriptionState(this); - return stateVector[STATE_FIELDNAME_TO_PROPERTYNAME_MAP][fieldName]; - } - - String propertyNameToFieldName(ModelDB db, String propertySearchName) { - List stateVector = db.modelDescriptionState(this); - return stateVector[STATE_PROPERTYNAME_TO_FIELDNAME_MAP][propertySearchName]; - } - - Object encodeField(ModelDB db, String fieldName, Object value) { - Property property = db.propertiesForModel(this)[fieldName]; - if (property != null) return property.encodeValue(db, value); - return null; - } + const ModelDescription(this.kind); } -// NOTE/TODO: -// Currently expanded properties are only -// * decoded if there are no clashes in [usedNames] -// * encoded if there are no clashes in [usedNames] -// We might want to throw an error if there are clashes, because otherwise -// - we may end up removing properties after a read-write cycle -// - we may end up dropping added properties in a write -// ([usedNames] := [realFieldNames] + [realPropertyNames]) -abstract class ExpandoModelDescription extends ModelDescription { - static const int STATE_FIELD_SET = ModelDescription.STATE_LAST + 1; - static const int STATE_PROPERTY_SET = ModelDescription.STATE_LAST + 2; - static const int STATE_USED_NAMES = ModelDescription.STATE_LAST + 3; - static const int STATE_LAST = STATE_USED_NAMES; - +/// Subclasses of [ExpandoModelDescription] describe how to map a dart expando +/// model object to a Datastore Entity. +/// +/// Please see [ModelMetadata] for an example on how to use them. +class ExpandoModelDescription extends ModelDescription { const ExpandoModelDescription(String kind) : super(kind); - - initialize(ModelDB db) { - var stateVector = super.initialize(db); - - var realFieldNames = new Set.from( - stateVector[ModelDescription.STATE_FIELDNAME_TO_PROPERTYNAME_MAP].keys); - var realPropertyNames = new Set.from( - stateVector[ModelDescription.STATE_PROPERTYNAME_TO_FIELDNAME_MAP].keys); - var usedNames = - new Set()..addAll(realFieldNames)..addAll(realPropertyNames); - - // NOPTE: [realFieldNames] and [realPropertyNames] are not used right now - // but we might use them to detect name clashes in the future. - return new List.from([] - ..addAll(stateVector) - ..add(realFieldNames) - ..add(realPropertyNames) - ..add(usedNames), - growable: false); - } - - datastore.Entity encodeModel(ModelDB db, ExpandoModel model) { - List stateVector = db.modelDescriptionState(this); - Set usedNames = stateVector[STATE_USED_NAMES]; - - var entity = super.encodeModel(db, model); - var properties = entity.properties; - model.additionalProperties.forEach((String key, Object value) { - // NOTE: All expanded properties will be indexed. - if (!usedNames.contains(key)) { - properties[key] = value; - } - }); - return entity; - } - - Model decodeEntity(ModelDB db, Key key, datastore.Entity entity) { - if (entity == null) return null; - - List stateVector = db.modelDescriptionState(this); - Set usedNames = stateVector[STATE_USED_NAMES]; - - ExpandoModel model = super.decodeEntity(db, key, entity); - var properties = entity.properties; - properties.forEach((String key, Object value) { - if (!usedNames.contains(key)) { - model.additionalProperties[key] = value; - } - }); - return model; - } - - String fieldNameToPropertyName(ModelDB db, String fieldName) { - String propertyName = super.fieldNameToPropertyName(db, fieldName); - // If the ModelDescription doesn't know about [fieldName], it's an - // expanded property, where propertyName == fieldName. - if (propertyName == null) propertyName = fieldName; - return propertyName; - } - - String propertyNameToFieldName(ModelDB db, String propertyName) { - String fieldName = super.propertyNameToFieldName(db, propertyName); - // If the ModelDescription doesn't know about [propertyName], it's an - // expanded property, where propertyName == fieldName. - if (fieldName == null) fieldName = propertyName; - return fieldName; - } - - Object encodeField(ModelDB db, String fieldName, Object value) { - Object primitiveValue = super.encodeField(db, fieldName, value); - // If superclass can't encode field, we return value here (and assume - // it's primitive) - // NOTE: Implicit assumption: - // If value != null then superclass will return != null. - // TODO: Ensure [value] is primitive in this case. - if (primitiveValue == null) primitiveValue = value; - return primitiveValue; - } } diff --git a/pkgs/gcloud/test/db/e2e/db_test.dart b/pkgs/gcloud/test/db/e2e/db_test.dart index fd926139..6912235b 100644 --- a/pkgs/gcloud/test/db/e2e/db_test.dart +++ b/pkgs/gcloud/test/db/e2e/db_test.dart @@ -66,6 +66,8 @@ class Person extends db.Model { age == other.age && wife == other.wife; } + + String toString() => 'Person(id: $id, name: $name, age: $age)'; } @db.ModelMetadata(const UserDesc()) @@ -93,6 +95,9 @@ class User extends Person { } return true; } + + String toString() => + 'User(${super.toString()}, nickname: $nickname, languages: $languages'; } class PersonDesc extends db.ModelDescription { @@ -399,7 +404,7 @@ runTests(db.DatastoreDB store) { var users = []; for (var i = 1; i <= 10; i++) { var languages = []; - if (i == 10) { + if (i == 9) { languages = ['foo']; } else if (i == 10) { languages = ['foo', 'bar']; @@ -431,14 +436,14 @@ runTests(db.DatastoreDB store) { var usersSortedNameDescNicknameAsc = new List.from(users); usersSortedNameDescNicknameAsc.sort((User a, User b) { - var result = b.name.compareTo(b.name); + var result = b.name.compareTo(a.name); if (result == 0) return a.nickname.compareTo(b.nickname); return result; }); var usersSortedNameDescNicknameDesc = new List.from(users); usersSortedNameDescNicknameDesc.sort((User a, User b) { - var result = b.name.compareTo(b.name); + var result = b.name.compareTo(a.name); if (result == 0) return b.nickname.compareTo(a.nickname); return result; }); @@ -495,7 +500,7 @@ runTests(db.DatastoreDB store) { ..order('nickname') ..run().then((List models) { compareModels( - usersSortedNameDescNicknameAsc, models, anyOrder: true); + usersSortedNameDescNicknameAsc, models); }); }, () { @@ -504,7 +509,7 @@ runTests(db.DatastoreDB store) { ..order('-nickname') ..run().then((List models) { compareModels( - usersSortedNameDescNicknameDesc, models, anyOrder: true); + usersSortedNameDescNicknameDesc, models); }); }, @@ -513,20 +518,20 @@ runTests(db.DatastoreDB store) { return store.query(User) ..filter('name >=', LOWER_BOUND) ..order('-name') - ..order('-nickname') + ..order('nickname') ..run().then((List models) { compareModels(usersSortedAndFilteredNameDescNicknameAsc, - models, anyOrder: true); + models); }); }, () { return store.query(User) ..filter('name >=', LOWER_BOUND) - ..order('name') + ..order('-name') ..order('-nickname') ..run().then((List models) { compareModels(usersSortedAndFilteredNameDescNicknameDesc, - models, anyOrder: true); + models); }); }, @@ -552,13 +557,14 @@ runTests(db.DatastoreDB store) { // Simple limit/offset test. () { return store.query(User) + ..order('-name') ..order('nickname') ..offset(3) - ..limit(5) + ..limit(4) ..run().then((List models) { var expectedModels = usersSortedAndFilteredNameDescNicknameAsc.sublist(3, 7); - compareModels(expectedModels, models, anyOrder: true); + compareModels(expectedModels, models); }); }, diff --git a/pkgs/gcloud/test/db/model_db_test.dart b/pkgs/gcloud/test/db/model_db_test.dart index dde0d6a2..8a0560fc 100644 --- a/pkgs/gcloud/test/db/model_db_test.dart +++ b/pkgs/gcloud/test/db/model_db_test.dart @@ -20,36 +20,38 @@ import 'model_dbs/duplicate_fieldname.dart' as test5; import 'model_dbs/no_default_constructor.dart' as test6; main() { + newModelDB(Symbol symbol)=> new ModelDBImpl.fromLibrary(symbol); + group('model_db', () { group('from_library', () { test('duplicate_kind', () { expect(new Future.sync(() { - new ModelDB.fromLibrary(#gcloud.db.model_test.duplicate_kind); + newModelDB(#gcloud.db.model_test.duplicate_kind); }), throwsA(isStateError)); }); test('duplicate_property', () { expect(new Future.sync(() { - new ModelDB.fromLibrary(#gcloud.db.model_test.duplicate_property); + newModelDB(#gcloud.db.model_test.duplicate_property); }), throwsA(isStateError)); }); test('multiple_annotations', () { expect(new Future.sync(() { - new ModelDB.fromLibrary(#gcloud.db.model_test.multiple_annotations); + newModelDB(#gcloud.db.model_test.multiple_annotations); }), throwsA(isStateError)); }); test('invalid_id', () { expect(new Future.sync(() { - new ModelDB.fromLibrary(#gcloud.db.model_test.invalid_id); + newModelDB(#gcloud.db.model_test.invalid_id); }), throwsA(isStateError)); }); test('duplicate_fieldname', () { expect(new Future.sync(() { - new ModelDB.fromLibrary(#gcloud.db.model_test.duplicate_fieldname); + newModelDB(#gcloud.db.model_test.duplicate_fieldname); }), throwsA(isStateError)); }); test('no_default_constructor', () { expect(new Future.sync(() { - new ModelDB.fromLibrary(#gcloud.db.model_test.no_default_constructor); + newModelDB(#gcloud.db.model_test.no_default_constructor); }), throwsA(isStateError)); }); }); diff --git a/pkgs/gcloud/test/db/properties_test.dart b/pkgs/gcloud/test/db/properties_test.dart index e68dd609..d0938a42 100644 --- a/pkgs/gcloud/test/db/properties_test.dart +++ b/pkgs/gcloud/test/db/properties_test.dart @@ -174,4 +174,6 @@ class ModelDBMock implements ModelDB { Model fromDatastoreEntity(datastore.Entity entity) => null; ModelDescription modelDescriptionForType(Type type) => null; datastore.Entity toDatastoreEntity(Model model) => null; + String fieldNameToPropertyName(String kind, String fieldName) => null; + String kindName(Type type) => null; } From 764bba3a91723efc7cd1c5a80916d769ef3822d8 Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Wed, 29 Oct 2014 11:08:05 +0100 Subject: [PATCH 016/239] Instead of DB.startTransaction() intrudce a DB.withTransaction(callback) This also makes "cross entity group" transactions the default. R=sgjesse@google.com Review URL: https://codereview.chromium.org//683093002 --- pkgs/gcloud/lib/src/db/db.dart | 34 +++++++++++++++++++--------- pkgs/gcloud/test/db/e2e/db_test.dart | 22 ++++++++---------- 2 files changed, 33 insertions(+), 23 deletions(-) diff --git a/pkgs/gcloud/lib/src/db/db.dart b/pkgs/gcloud/lib/src/db/db.dart index 13c4a90b..8a6e4e70 100644 --- a/pkgs/gcloud/lib/src/db/db.dart +++ b/pkgs/gcloud/lib/src/db/db.dart @@ -4,6 +4,21 @@ part of gcloud.db; +/** + * A function definition for transactional functions. + * + * The function will be given a [Transaction] object which can be used to make + * lookups/queries and queue modifications (inserts/updates/deletes). + */ +typedef Future TransactionHandler(Transaction transaction); + +/** + * A datastore transaction. + * + * It can be used for making lookups/queries and queue modifcations + * (inserts/updates/deletes). Finally the transaction can be either committed + * or rolled back. + */ class Transaction { static const int _TRANSACTION_STARTED = 0; static const int _TRANSACTION_ROLLED_BACK = 1; @@ -254,18 +269,15 @@ class DatastoreDB { /** * Begins a new a new transaction. * - * A normal transaction can only touch entities inside one entity group. By - * setting [crossEntityGroup] to `true` it is possible to touch up to - * five entity groups. - * - * Cross entity group transactions come with a cost, due to the fact that - * a two-phase commit protocol will be used. So it will result in higher - * latency. + * A transaction can touch only a limited number of entity groups. This limit + * is currently 5. */ - Future beginTransaction({bool crossEntityGroup: false}) { - return datastore.beginTransaction(crossEntityGroup: crossEntityGroup) - .then((transaction) { - return new Transaction(this, transaction); + // TODO: Add retries and/or auto commit/rollback. + Future withTransaction(TransactionHandler transactionHandler) { + return datastore.beginTransaction(crossEntityGroup: true) + .then((datastoreTransaction) { + var transaction = new Transaction(this, datastoreTransaction); + return transactionHandler(transaction); }); } diff --git a/pkgs/gcloud/test/db/e2e/db_test.dart b/pkgs/gcloud/test/db/e2e/db_test.dart index 6912235b..addc129c 100644 --- a/pkgs/gcloud/test/db/e2e/db_test.dart +++ b/pkgs/gcloud/test/db/e2e/db_test.dart @@ -178,21 +178,19 @@ runTests(db.DatastoreDB store) { } Future testInsertLookupDelete( - List objects, {bool transactional: false, bool xg: false}) { + List objects, {bool transactional: false}) { var keys = objects.map((db.Model obj) => obj.key).toList(); if (transactional) { - return store.beginTransaction(crossEntityGroup: xg) - .then((db.Transaction commitTransaction) { + return store.withTransaction((db.Transaction commitTransaction) { commitTransaction.queueMutations(inserts: objects); - return commitTransaction.commit().then((_) { - return store.beginTransaction(crossEntityGroup: xg) - .then((db.Transaction deleteTransaction) { - return deleteTransaction.lookup(keys).then((List models) { - compareModels(objects, models); - deleteTransaction.queueMutations(deletes: keys); - return deleteTransaction.commit(); - }); + return commitTransaction.commit(); + }).then((_) { + return store.withTransaction((db.Transaction deleteTransaction) { + return deleteTransaction.lookup(keys).then((List models) { + compareModels(objects, models); + deleteTransaction.queueMutations(deletes: keys); + return deleteTransaction.commit(); }); }); }); @@ -288,7 +286,7 @@ runTests(db.DatastoreDB store) { expandoPerson.foo = 'foo1'; expandoPerson.bar = 2; - return testInsertLookupDelete(models, transactional: true, xg: true); + return testInsertLookupDelete(models, transactional: true); }); test('parent_key', () { From 4cb7829ebc6432e56f86214ec0a36668b0f77c19 Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Wed, 29 Oct 2014 11:17:48 +0100 Subject: [PATCH 017/239] Add DoubleProperty to high level datastore API R=sgjesse@google.com Review URL: https://codereview.chromium.org//683803004 --- pkgs/gcloud/lib/src/db/properties.dart | 48 +++++++++++++++++++++++- pkgs/gcloud/test/db/properties_test.dart | 13 +++++++ 2 files changed, 59 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/lib/src/db/properties.dart b/pkgs/gcloud/lib/src/db/properties.dart index 5f55ca9c..5bf5e669 100644 --- a/pkgs/gcloud/lib/src/db/properties.dart +++ b/pkgs/gcloud/lib/src/db/properties.dart @@ -38,7 +38,8 @@ abstract class Property { Object decodePrimitiveValue(ModelDB db, Object value); } - +/// An abstract base class for primitive properties which can e.g. be used +/// within a composed `ListProperty`. abstract class PrimitiveProperty extends Property { const PrimitiveProperty( {String propertyName, bool required: false, bool indexed: true}) @@ -49,7 +50,10 @@ abstract class PrimitiveProperty extends Property { Object decodePrimitiveValue(ModelDB db, Object value) => value; } - +/// A boolean [Property]. +/// +/// It will validate that values are booleans before writing them to the +/// datastore and when reading them back. class BoolProperty extends PrimitiveProperty { const BoolProperty( {String propertyName, bool required: false, bool indexed: true}) @@ -59,6 +63,10 @@ class BoolProperty extends PrimitiveProperty { => super.validate(db, value) && (value == null || value is bool); } +/// A integer [Property]. +/// +/// It will validate that values are integers before writing them to the +/// datastore and when reading them back. class IntProperty extends PrimitiveProperty { const IntProperty( {String propertyName, bool required: false, bool indexed: true}) @@ -68,6 +76,23 @@ class IntProperty extends PrimitiveProperty { => super.validate(db, value) && (value == null || value is int); } +/// A double [Property]. +/// +/// It will validate that values are doubles before writing them to the +/// datastore and when reading them back. +class DoubleProperty extends PrimitiveProperty { + const DoubleProperty( + {String propertyName, bool required: false, bool indexed: true}) + : super(propertyName: propertyName, required: required, indexed: indexed); + + bool validate(ModelDB db, Object value) + => super.validate(db, value) && (value == null || value is double); +} + +/// A string [Property]. +/// +/// It will validate that values are strings before writing them to the +/// datastore and when reading them back. class StringProperty extends PrimitiveProperty { const StringProperty( {String propertyName, bool required: false, bool indexed: true}) @@ -77,6 +102,10 @@ class StringProperty extends PrimitiveProperty { => super.validate(db, value) && (value == null || value is String); } +/// A key [Property]. +/// +/// It will validate that values are keys before writing them to the +/// datastore and when reading them back. class ModelKeyProperty extends PrimitiveProperty { const ModelKeyProperty( {String propertyName, bool required: false, bool indexed: true}) @@ -96,6 +125,11 @@ class ModelKeyProperty extends PrimitiveProperty { } } +/// A binary blob [Property]. +/// +/// It will validate that values are blobs before writing them to the +/// datastore and when reading them back. Blob values will be represented by +/// List. class BlobProperty extends PrimitiveProperty { const BlobProperty({String propertyName, bool required: false}) : super(propertyName: propertyName, required: required, indexed: false); @@ -120,6 +154,10 @@ class BlobProperty extends PrimitiveProperty { } } +/// A datetime [Property]. +/// +/// It will validate that values are DateTime objects before writing them to the +/// datastore and when reading them back. class DateTimeProperty extends PrimitiveProperty { const DateTimeProperty( {String propertyName, bool required: false, bool indexed: true}) @@ -138,6 +176,11 @@ class DateTimeProperty extends PrimitiveProperty { } +/// A composed list [Property], with a `subProperty` for the list elements. +/// +/// It will validate that values are List objects before writing them to the +/// datastore and when reading them back. It will also validate the elements +/// of the list itself. class ListProperty extends Property { final PrimitiveProperty subProperty; @@ -174,6 +217,7 @@ class ListProperty extends Property { } } +/// A convenience [Property] for list of strings. class StringListProperty extends ListProperty { const StringListProperty({String propertyName, bool indexed: true}) : super(const StringProperty(), diff --git a/pkgs/gcloud/test/db/properties_test.dart b/pkgs/gcloud/test/db/properties_test.dart index d0938a42..edf26655 100644 --- a/pkgs/gcloud/test/db/properties_test.dart +++ b/pkgs/gcloud/test/db/properties_test.dart @@ -41,6 +41,19 @@ main() { expect(prop.decodePrimitiveValue(null, 99), equals(99)); }); + test('double_property', () { + var prop = const DoubleProperty(required: true); + expect(prop.validate(null, null), isFalse); + + prop = const DoubleProperty(required: false); + expect(prop.validate(null, null), isTrue); + expect(prop.validate(null, 33.0), isTrue); + expect(prop.encodeValue(null, null), equals(null)); + expect(prop.encodeValue(null, 42.3), equals(42.3)); + expect(prop.decodePrimitiveValue(null, null), equals(null)); + expect(prop.decodePrimitiveValue(null, 99.1), equals(99.1)); + }); + test('string_property', () { var prop = const StringProperty(required: true); expect(prop.validate(null, null), isFalse); From 4d9276138838012e4074fd9442d59e9107b5c8aa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Wed, 29 Oct 2014 12:33:24 +0100 Subject: [PATCH 018/239] Proposal for full ACL support R=kustermann@google.com, lrn@google.com BUG= Review URL: https://codereview.chromium.org//643083002 --- pkgs/gcloud/lib/src/storage_impl.dart | 93 +++++--- pkgs/gcloud/lib/storage.dart | 331 ++++++++++++++++++++------ pkgs/gcloud/test/storage_test.dart | 314 +++++++++++++++++++++++- 3 files changed, 634 insertions(+), 104 deletions(-) diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index 07d0216a..e3ec721b 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -40,16 +40,16 @@ class _StorageImpl implements Storage { _StorageImpl(client, this.project) : _api = new storage.StorageApi(client); - Future createBucket(String bucketName, {BucketAcl acl}) { + Future createBucket(String bucketName, + {PredefinedAcl predefinedAcl, Acl acl}) { var bucket = new storage.Bucket()..name = bucketName; - var predefinedAcl; + var predefinedName = predefinedAcl != null ? predefinedAcl._name : null; if (acl != null) { - assert(acl.isPredefined); - predefinedAcl = acl._predefined; + bucket.acl = acl._toBucketAccessControlList(); } return _api.buckets.insert(bucket, project, - predefinedAcl: predefinedAcl) + predefinedAcl: predefinedName) .then((bucket) => null); } @@ -57,8 +57,11 @@ class _StorageImpl implements Storage { return _api.buckets.delete(bucketName); } - Bucket bucket(String bucketName, {ObjectAcl defaultObjectAcl}) { - return new _BucketImpl(this, bucketName, defaultObjectAcl); + Bucket bucket(String bucketName, + {PredefinedAcl defaultPredefinedObjectAcl, + Acl defaultObjectAcl}) { + return new _BucketImpl( + this, bucketName, defaultPredefinedObjectAcl, defaultObjectAcl); } Future bucketExists(String bucketName) { @@ -118,10 +121,14 @@ class _BucketInformationImpl implements BucketInfo { /// Bucket API implementation providing access to objects. class _BucketImpl implements Bucket { final storage.StorageApi _api; - ObjectAcl _defaultObjectAcl; + PredefinedAcl _defaultPredefinedObjectAcl; + Acl _defaultObjectAcl; final String bucketName; - _BucketImpl(_StorageImpl storage, this.bucketName, this._defaultObjectAcl) : + _BucketImpl(_StorageImpl storage, + this.bucketName, + this._defaultPredefinedObjectAcl, + this._defaultObjectAcl) : this._api = storage._api; String absoluteObjectName(String objectName) { @@ -130,28 +137,50 @@ class _BucketImpl implements Bucket { StreamSink> write( String objectName, - {int length, ObjectMetadata metadata, String contentType}) { + {int length, ObjectMetadata metadata, + Acl acl, PredefinedAcl predefinedAcl, String contentType}) { storage.Object object; if (metadata == null) { - metadata = new _ObjectMetadata(contentType: contentType); - } else if (contentType != null) { - metadata = metadata.replace(contentType: contentType); + metadata = new _ObjectMetadata(acl: acl, contentType: contentType); + } else { + if (acl != null) { + metadata = metadata.replace(acl: acl); + } + if (contentType != null) { + metadata = metadata.replace(contentType: contentType); + } + } + _ObjectMetadata objectMetadata = metadata; + object = objectMetadata._object; + + // If no predefined ACL is passed use the default (if any). + var predefinedName; + if (predefinedAcl != null || _defaultPredefinedObjectAcl != null) { + var predefined = + predefinedAcl != null ? predefinedAcl : _defaultPredefinedObjectAcl; + predefinedName = predefined._name; + } + + // If no ACL is passed use the default (if any). + if (object.acl == null && _defaultObjectAcl != null) { + object.acl = _defaultObjectAcl._toObjectAccessControlList(); } - object = (metadata as _ObjectMetadata)._object; // Fill properties not passed in metadata. object.name = objectName; var sink = new _MediaUploadStreamSink( - _api, bucketName, objectName, object, length); + _api, bucketName, objectName, object, predefinedName, length); return sink; } Future writeBytes( String objectName, List bytes, - {ObjectMetadata metadata, String contentType}) { + {ObjectMetadata metadata, + Acl acl, PredefinedAcl predefinedAcl, String contentType}) { var sink = write(objectName, length: bytes.length, - metadata: metadata, contentType: contentType); + metadata: metadata, acl: acl, predefinedAcl: predefinedAcl, + contentType: contentType); sink.add(bytes); return sink.close(); } @@ -192,16 +221,21 @@ class _BucketImpl implements Bucket { // TODO: support other ObjectMetadata implementations? _ObjectMetadata md = metadata; var object = md._object; - if (md._predefined == null && _defaultObjectAcl == null) { + if (md._acl == null && _defaultObjectAcl == null) { throw new ArgumentError('ACL is required for update'); } if (md.contentType == null) { throw new ArgumentError('Content-Type is required for update'); } - var acl = md._predefined != null ? md._predefined._predefined - : _defaultObjectAcl._predefined; + var acl = md._acl != null ? md._acl : _defaultObjectAcl; + + var predefinedAcl; + if (acl != null) { + object.acl = acl._toObjectAccessControlList(); + } + return _api.objects.update( - object, bucketName, objectName, predefinedAcl: acl); + object, bucketName, objectName, predefinedAcl: predefinedAcl); } Future _listObjects( @@ -336,9 +370,8 @@ class _ObjectStatImpl implements ObjectInfo { class _ObjectMetadata implements ObjectMetadata { storage.Object _object; - ObjectAcl _predefined; - _ObjectMetadata({ObjectAcl acl, + _ObjectMetadata({Acl acl, String contentType, String contentEncoding, String cacheControl, @@ -346,7 +379,7 @@ class _ObjectMetadata implements ObjectMetadata { String contentLanguage, Map custom}) { _object = new storage.Object(); - _predefined = acl; // Only canned ACLs supported. + _object.acl = acl != null ? acl._toObjectAccessControlList() : null; _object.contentType = contentType; _object.contentEncoding = contentEncoding; _object.cacheControl = cacheControl; @@ -357,7 +390,7 @@ class _ObjectMetadata implements ObjectMetadata { _ObjectMetadata._(this._object); - set acl(ObjectAcl value) => _predefined = value; + set acl(Acl value) => _object.acl = value._toObjectAccessControlList(); String get contentType => _object.contentType; set contentType(String value) => _object.contentType = value; @@ -377,7 +410,7 @@ class _ObjectMetadata implements ObjectMetadata { Map get custom => _object.metadata; set custom(Map value) => _object.metadata = value; - ObjectMetadata replace({ObjectAcl acl, + ObjectMetadata replace({Acl acl, String contentType, String contentEncoding, String cacheControl, @@ -385,7 +418,7 @@ class _ObjectMetadata implements ObjectMetadata { String contentLanguage, Map custom}) { return new _ObjectMetadata( - acl: acl != null ? acl : _predefined, + acl: acl != null ? acl : _acl, contentType: contentType != null ? contentType : this.contentType, contentEncoding: contentEncoding != null ? contentEncoding : this.contentEncoding, @@ -407,6 +440,7 @@ class _MediaUploadStreamSink implements StreamSink> { final String _bucketName; final String _objectName; final storage.Object _object; + final String _predefinedAcl; final int _length; final int _maxNormalUploadLength; int _bufferLength = 0; @@ -422,7 +456,8 @@ class _MediaUploadStreamSink implements StreamSink> { int _state; _MediaUploadStreamSink( - this._api, this._bucketName, this._objectName, this._object, this._length, + this._api, this._bucketName, this._objectName, this._object, + this._predefinedAcl, this._length, [this._maxNormalUploadLength = _DEFAULT_MAX_NORMAL_UPLOAD_LENGTH]) { if (_length != null) { // If the length is known in advance decide on the upload strategy @@ -517,6 +552,7 @@ class _MediaUploadStreamSink implements StreamSink> { _api.objects.insert(_object, _bucketName, name: _objectName, + predefinedAcl: _predefinedAcl, uploadMedia: media, uploadOptions: common.UploadOptions.Default) .then((response) { @@ -529,6 +565,7 @@ class _MediaUploadStreamSink implements StreamSink> { _api.objects.insert(_object, _bucketName, name: _objectName, + predefinedAcl: _predefinedAcl, uploadMedia: media, uploadOptions: common.UploadOptions.Resumable) .then((response) { diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index 1b1e9b44..10307461 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -42,9 +42,14 @@ /// /// In most cases relative names are used. Absolute names are typically /// only used for operations involving objects in different buckets. +/// +/// For most of the APIs in ths library which take instances of other classes +/// from this library it is the assumption that the actual implementations +/// provided here are used. library gcloud.storage; import 'dart:async'; +import 'dart:collection' show UnmodifiableListView; import 'package:http/http.dart' as http; @@ -57,54 +62,233 @@ export 'common.dart'; part 'src/storage_impl.dart'; -/// Bucket Access Control List -/// -/// Describe an access control list for a bucket. The access control list -/// defines the level of access for different entities. -/// -/// Currently only supports pre-defined ACLs. + +/// An ACL (Access Control List) describes access rights to buckets and +/// objects. /// -/// TODO: Support for building custom ACLs. -class BucketAcl { - static const AUTHENTICATED_READ = const BucketAcl._('authenticatedRead'); - static const PRIVATE = const BucketAcl._('private'); - static const PROJECT_PRIVATE = const BucketAcl._('projectPrivate'); - static const PUBLIC_READ = const BucketAcl._('publicRead'); - static const PUBLIC_READ_WRITE = const BucketAcl._('publicReadWrite'); +/// An ACL is a prioritized sequence of access control specifications, +/// which individually prevent or grant access. +/// The access controls are described by [AclEntry] objects. +class Acl { + final _entries; + + /// The entries in the ACL. + List get entries => new UnmodifiableListView(_entries); - // Enum value for a predefined bucket ACL. - final String _predefined; + /// Create a new ACL with a list of ACL entries. + Acl(Iterable entries) : _entries = new List.from(entries); - /// Whether this ACL is one of the predefined ones. - bool get isPredefined => true; + List _toBucketAccessControlList() { + return _entries.map((entry) => entry._toBucketAccessControl()).toList(); + } - const BucketAcl._(String this._predefined); + List _toObjectAccessControlList() { + return _entries.map((entry) => entry._toObjectAccessControl()).toList(); + } } -/// Object Access Control List +/// An ACL entry specifies that an entity has a specific access permission. /// -/// Currently only supports pre-defined ACLs. +/// A permission grants a specific permission to the entity. +class AclEntry { + final AclScope scope; + final AclPermission permission; + + AclEntry(this.scope, this.permission); + + storage.BucketAccessControl _toBucketAccessControl() { + var acl = new storage.BucketAccessControl(); + acl.entity = scope._storageEntity; + acl.role = permission._storageBucketRole; + return acl; + } + + storage.ObjectAccessControl _toObjectAccessControl() { + var acl = new storage.ObjectAccessControl(); + acl.entity = scope._storageEntity; + acl.role = permission._storageObjectRole; + return acl; + } +} + +/// An ACL scope specifies an entity for which a permission applies. +/// +/// A scope can be one of: /// -/// Describe an access control list for an object. The access control list -/// define the level of access for different entities. +/// * Google Storage ID +/// * Google account email address +/// * Google group email address +/// * Google Apps domain +/// * Special identifier for all Google account holders +/// * Special identifier for all users +/// +/// See https://cloud.google.com/storage/docs/accesscontrol for more details. +abstract class AclScope { + /// ACL type for scope representing a Google Storage id. + static const int _TYPE_STORAGE_ID = 0; + + /// ACL type for scope representing an account holder. + static const int _TYPE_ACCOUNT = 1; + + /// ACL type for scope representing a group. + static const int _TYPE_GROUP = 2; + + /// ACL type for scope representing a domain. + static const int _TYPE_DOMAIN = 3; + + /// ACL type for scope representing all authenticated users. + static const int _TYPE_ALL_AUTHENTICATED = 4; + + /// ACL type for scope representing all users. + static const int _TYPE_ALL_USERS = 5; + + /// The id of the actual entity this ACL scope represents. The actual values + /// are set in the different subclasses. + final String _id; + + /// The type of this acope this ACL scope represents. + final int _type; + + /// ACL scope for all authenticated users. + static const allAuthenticated = const AllAuthenticatedScope(); + + /// ACL scope for all users. + static const allUsers = const AllUsersScope(); + + const AclScope._(this._type, this._id); + + String get _storageEntity { + switch (_type) { + case _TYPE_STORAGE_ID: + return 'user-$_id'; + case _TYPE_ACCOUNT: + return 'user-$_id'; + case _TYPE_GROUP: + return 'group-$_id'; + case _TYPE_DOMAIN: + return 'domain-$_id'; + case _TYPE_ALL_AUTHENTICATED: + return 'allAuthenticatedUsers'; + case _TYPE_ALL_USERS: + return 'allUsers'; + default: + throw new UnsupportedError('Unexpected ACL scope'); + } + } +} + +/// An ACL scope for an entity identified by a 'Google Storage ID'. +/// +/// The [storageId] is a string of 64 hexadecimal digits that identifies a +/// specific Google account holder or a specific Google group. +class StorageIdScope extends AclScope { + StorageIdScope(String storageId) + : super._(AclScope._TYPE_STORAGE_ID, storageId); + + /// Google Storage ID. + String get storageId => _id; +} + +/// An ACL scope for an entity identified by an individual email address. +class AccountScope extends AclScope { + AccountScope(String email): super._(AclScope._TYPE_ACCOUNT, email); + + /// Email address. + String get email => _id; +} + +/// An ACL scope for an entity identified by an Google Groups email. +class GroupScope extends AclScope { + GroupScope(String group): super._(AclScope._TYPE_GROUP, group); + + /// Group name. + String get group => _id; +} + +/// An ACL scope for an entity identified by a domain name. +class DomainScope extends AclScope { + DomainScope(String domain): super._(AclScope._TYPE_DOMAIN, domain); + + /// Domain name. + String get domain => _id; +} + +/// ACL scope for a all authenticated users. +class AllAuthenticatedScope extends AclScope { + const AllAuthenticatedScope() + : super._(AclScope._TYPE_ALL_AUTHENTICATED, null); +} + +/// ACL scope for a all users. +class AllUsersScope extends AclScope { + const AllUsersScope(): super._(AclScope._TYPE_ALL_USERS, null); +} + +/// Permissions for individual scopes in an ACL. +class AclPermission { + /// Provide read access. + static const READ = const AclPermission._('READER'); + + /// Provide write access. + /// + /// For objects this permission is the same as [FULL_CONTROL]. + static const WRITE = const AclPermission._('WRITER'); + + /// Provide full control. + /// + /// For objects this permission is the same as [WRITE]. + static const FULL_CONTROL = const AclPermission._('OWNER'); + + final String _id; + + const AclPermission._(this._id); + + String get _storageBucketRole => _id; + + String get _storageObjectRole => this == WRITE ? FULL_CONTROL._id : _id; +} + +/// Definition of predefined ACLs. /// -/// TODO: Support for building custom ACLs. -class ObjectAcl { - static const AUTHENTICATED_READ = const ObjectAcl._('authenticatedRead'); - static const BUCKET_OWNER_FULL_CONTROL = - const ObjectAcl._('bucketOwnerFullControl'); - static const BUCKET_OWNER_READ = const ObjectAcl._('bucketOwnerRead'); - static const PRIVATE = const ObjectAcl._('private'); - static const PROJECT_PRIVATE = const ObjectAcl._('projectPrivate'); - static const PUBLIC_READ = const ObjectAcl._('publicRead'); - - // Enum value for a predefined bucket ACL. - final String _predefined; - - /// Whether this ACL is one of the predefined ones. - bool get isPredefined => true; - - const ObjectAcl._(String this._predefined); +/// There is a convenient way of referring to number of _predefined_ ACLs. These +/// predefined ACLs have explicit names, and can _only_ be used to set an ACL, +/// when either creating or updating a bucket or object. This set of predefined +/// ACLs are expanded on the server to their actual list of [AclEntry] objects. +/// When information is retreived on a bucket or object, this expanded list will +/// be present. For a description of these predefined ACLs see: +/// https://cloud.google.com/storage/docs/accesscontrol#extension. +class PredefinedAcl { + String _name; + PredefinedAcl._(this._name); + + /// Predefined ACL for the 'authenticated-read' ACL. Applies to both buckets + /// and objects. + static PredefinedAcl authenticatedRead = + new PredefinedAcl._('authenticatedRead'); + + /// Predefined ACL for the 'private' ACL. Applies to both buckets + /// and objects. + static PredefinedAcl private = new PredefinedAcl._('private'); + + /// Predefined ACL for the 'project-private' ACL. Applies to both buckets + /// and objects. + static PredefinedAcl projectPrivate = new PredefinedAcl._('projectPrivate'); + + /// Predefined ACL for the 'public-read' ACL. Applies to both buckets + /// and objects. + static PredefinedAcl publicRead = new PredefinedAcl._('publicRead'); + + /// Predefined ACL for the 'public-read-write' ACL. Applies only to buckets. + static PredefinedAcl publicReadWrite = new PredefinedAcl._('publicReadWrite'); + + /// Predefined ACL for the 'bucket-owner-full-control' ACL. Applies only to + /// objects. + static PredefinedAcl bucketOwnerFullControl = + new PredefinedAcl._('bucketOwnerFullControl'); + + /// Predefined ACL for the 'bucket-owner-read' ACL. Applies only to + /// objects. + static PredefinedAcl bucketOwnerRead = new PredefinedAcl._('bucketOwnerRead'); } /// Information on a bucket. @@ -119,7 +303,7 @@ abstract class BucketInfo { /// Access to Cloud Storage abstract class Storage { /// List of required OAuth2 scopes for Cloud Storage operation. - static const Scopes = const [ storage.StorageApi.DevstorageFullControlScope ]; + static const Scopes = const [storage.StorageApi.DevstorageFullControlScope]; /// Initializes access to cloud storage. factory Storage(http.Client client, String project) = _StorageImpl; @@ -128,8 +312,13 @@ abstract class Storage { /// /// Creates a cloud storage bucket named [bucketName]. /// + /// The bucket ACL can be set by passing [predefinedAcl] or [acl]. If both + /// are passed the entries from [acl] with be followed by the expansion of + /// [predefinedAcl]. + /// /// Returns a [Future] which completes when the bucket has been created. - Future createBucket(String bucketName, {BucketAcl acl}); + Future createBucket(String bucketName, + {PredefinedAcl predefinedAcl, Acl acl}); /// Delete a cloud storage bucket. /// @@ -144,13 +333,21 @@ abstract class Storage { /// /// Instantiates a `Bucket` object refering to the bucket named [bucketName]. /// - /// If the [defaultObjectAcl] argument is passed the resulting `Bucket` will - /// attach this ACL to all objects created using this `Bucket` object. + /// When an object is created using the resulting `Bucket` an ACL will always + /// be set. If the object creation does not pass any explicit ACL information + /// a default ACL will be used. + /// + /// If the arguments [defaultPredefinedObjectAcl] or [defaultObjectAcl] are + /// passed they define the default ACL. If both are passed the entries from + /// [defaultObjectAcl] with be followed by the expansion of + /// [defaultPredefinedObjectAcl] when an object is created. /// /// Otherwise the default object ACL attached to the bucket will be used. /// /// Returns a `Bucket` instance. - Bucket bucket(String bucketName, {ObjectAcl defaultObjectAcl}); + Bucket bucket(String bucketName, + {PredefinedAcl defaultPredefinedObjectAcl, + Acl defaultObjectAcl}); /// Check whether a cloud storage bucket exists. /// @@ -234,18 +431,11 @@ abstract class ObjectGeneration { /// Access to object metadata abstract class ObjectMetadata { - factory ObjectMetadata({ObjectAcl acl, - String contentType, - String contentEncoding, - String cacheControl, - String contentDisposition, - String contentLanguage, - Map custom}) = _ObjectMetadata; + factory ObjectMetadata({Acl acl, String contentType, String contentEncoding, + String cacheControl, String contentDisposition, String contentLanguage, + Map custom}) = _ObjectMetadata; /// ACL - /// - /// Currently it is only possible to set the ACL on one of the predefined - /// values from the class `ObjectAcl`. - void set acl(ObjectAcl value); + void set acl(Acl value); /// `Content-Type` for this object. String contentType; @@ -270,13 +460,9 @@ abstract class ObjectMetadata { /// Create a copy of this object with some values replaces. /// /// TODO: This cannot be used to set values to null. - ObjectMetadata replace({ObjectAcl acl, - String contentType, - String contentEncoding, - String cacheControl, - String contentDisposition, - String contentLanguage, - Map custom}); + ObjectMetadata replace({Acl acl, String contentType, String contentEncoding, + String cacheControl, String contentDisposition, String contentLanguage, + Map custom}); } /// Result from List objects in a bucket. @@ -327,12 +513,17 @@ abstract class Bucket { /// If [contentType] is not passed the default value of /// `application/octet-stream` will be used. /// + /// It is possible to at one of the predefined ACLs on the created object + /// using the [predefinedAcl] argument. If the [metadata] argument contain a + /// ACL as well, this ACL with be followed by the expansion of + /// [predefinedAcl]. + /// /// Returns a `StreamSink` where the object content can be written. When /// The object content has been written the `StreamSink` completes with - /// an `ObjectStat` instance with the information on the object created. - StreamSink> write( - String objectName, - {int length, ObjectMetadata metadata, String contentType}); + /// an `ObjectInfo` instance with the information on the object created. + StreamSink> write(String objectName, + {int length, ObjectMetadata metadata, + Acl acl, PredefinedAcl predefinedAcl, String contentType}); /// Create an new object in the bucket with specified content. /// @@ -340,9 +531,11 @@ abstract class Bucket { /// /// See [write] for more information on the additional arguments. /// - /// Returns a `Future` which completes when the object is written. - Future writeBytes(String name, List bytes, - {String contentType, ObjectMetadata metadata}); + /// Returns a `Future` which completes with an `ObjectInfo` instance when + /// the object is written. + Future writeBytes(String name, List bytes, + {ObjectMetadata metadata, + Acl acl, PredefinedAcl predefinedAcl, String contentType}); /// Read object content. /// diff --git a/pkgs/gcloud/test/storage_test.dart b/pkgs/gcloud/test/storage_test.dart index 40b19c69..be7038b8 100644 --- a/pkgs/gcloud/test/storage_test.dart +++ b/pkgs/gcloud/test/storage_test.dart @@ -46,13 +46,13 @@ main() { }); }); - test('create-with-acl', () { + test('create-with-predefined-acl', () { var predefined = - [[BucketAcl.AUTHENTICATED_READ, 'authenticatedRead'], - [BucketAcl.PRIVATE, 'private'], - [BucketAcl.PROJECT_PRIVATE, 'projectPrivate'], - [BucketAcl.PUBLIC_READ, 'publicRead'], - [BucketAcl.PUBLIC_READ_WRITE, 'publicReadWrite']]; + [[PredefinedAcl.authenticatedRead, 'authenticatedRead'], + [PredefinedAcl.private, 'private'], + [PredefinedAcl.projectPrivate, 'projectPrivate'], + [PredefinedAcl.publicRead, 'publicRead'], + [PredefinedAcl.publicReadWrite, 'publicReadWrite']]; withMockClient((mock, api) { int count = 0; @@ -61,6 +61,7 @@ main() { var requestBucket = new storage.Bucket.fromJson(JSON.decode(request.body)); expect(requestBucket.name, bucketName); + expect(requestBucket.acl, isNull); expect(request.url.queryParameters['predefinedAcl'], predefined[count++][1]); return mock.respond(new storage.Bucket()..name = bucketName); @@ -68,7 +69,129 @@ main() { var futures = []; for (int i = 0; i < predefined.length; i++) { - futures.add(api.createBucket(bucketName, acl: predefined[i][0])); + futures.add(api.createBucket(bucketName, + predefinedAcl: predefined[i][0])); + } + return Future.wait(futures); + }); + }); + + test('create-with-acl', () { + var acl1 = new Acl([ + new AclEntry(new AccountScope('user@example.com'), + AclPermission.FULL_CONTROL), + ]); + var acl2 = new Acl([ + new AclEntry(new AccountScope('user@example.com'), + AclPermission.FULL_CONTROL), + new AclEntry(new GroupScope('group@example.com'), + AclPermission.WRITE), + ]); + var acl3 = new Acl([ + new AclEntry(new AccountScope('user@example.com'), + AclPermission.FULL_CONTROL), + new AclEntry(new GroupScope('group@example.com'), + AclPermission.WRITE), + new AclEntry(new DomainScope('example.com'), + AclPermission.READ), + ]); + + var acls = [acl1, acl2, acl3]; + + withMockClient((mock, api) { + int count = 0; + + mock.register('POST', 'b', expectAsync((request) { + var requestBucket = + new storage.Bucket.fromJson(JSON.decode(request.body)); + expect(requestBucket.name, bucketName); + expect(request.url.queryParameters['predefinedAcl'], isNull); + expect(requestBucket.acl, isNotNull); + expect(requestBucket.acl.length, count + 1); + expect(requestBucket.acl[0].entity, 'user-user@example.com'); + expect(requestBucket.acl[0].role, 'OWNER'); + if (count > 0) { + expect(requestBucket.acl[1].entity, 'group-group@example.com'); + expect(requestBucket.acl[1].role, 'WRITER'); + } + if (count > 2) { + expect(requestBucket.acl[2].entity, 'domain-example.com'); + expect(requestBucket.acl[2].role, 'READER'); + } + count++; + return mock.respond(new storage.Bucket()..name = bucketName); + }, count: acls.length)); + + var futures = []; + for (int i = 0; i < acls.length; i++) { + futures.add(api.createBucket(bucketName, acl: acls[i])); + } + return Future.wait(futures); + }); + }); + + test('create-with-acl-and-predefined-acl', () { + var predefined = + [[PredefinedAcl.authenticatedRead, 'authenticatedRead'], + [PredefinedAcl.private, 'private'], + [PredefinedAcl.projectPrivate, 'projectPrivate'], + [PredefinedAcl.publicRead, 'publicRead'], + [PredefinedAcl.publicReadWrite, 'publicReadWrite']]; + + var acl1 = new Acl([ + new AclEntry(new AccountScope('user@example.com'), + AclPermission.FULL_CONTROL), + ]); + var acl2 = new Acl([ + new AclEntry(new AccountScope('user@example.com'), + AclPermission.FULL_CONTROL), + new AclEntry(new GroupScope('group@example.com'), + AclPermission.WRITE), + ]); + var acl3 = new Acl([ + new AclEntry(new AccountScope('user@example.com'), + AclPermission.FULL_CONTROL), + new AclEntry(new GroupScope('group@example.com'), + AclPermission.WRITE), + new AclEntry(new DomainScope('example.com'), + AclPermission.READ), + ]); + + var acls = [acl1, acl2, acl3]; + + withMockClient((mock, api) { + int count = 0; + + mock.register('POST', 'b', expectAsync((request) { + var requestBucket = + new storage.Bucket.fromJson(JSON.decode(request.body)); + int predefinedIndex = count ~/ acls.length; + int aclIndex = count % acls.length; + expect(requestBucket.name, bucketName); + expect(request.url.queryParameters['predefinedAcl'], + predefined[predefinedIndex][1]); + expect(requestBucket.acl, isNotNull); + expect(requestBucket.acl.length, aclIndex + 1); + expect(requestBucket.acl[0].entity, 'user-user@example.com'); + expect(requestBucket.acl[0].role, 'OWNER'); + if (aclIndex > 0) { + expect(requestBucket.acl[1].entity, 'group-group@example.com'); + expect(requestBucket.acl[1].role, 'WRITER'); + } + if (aclIndex > 2) { + expect(requestBucket.acl[2].entity, 'domain-example.com'); + expect(requestBucket.acl[2].role, 'READER'); + } + count++; + return mock.respond(new storage.Bucket()..name = bucketName); + }, count: predefined.length * acls.length)); + + var futures = []; + for (int i = 0; i < predefined.length; i++) { + for (int j = 0; j < acls.length; j++) { + futures.add(api.createBucket( + bucketName, predefinedAcl: predefined[i][0], acl: acls[j])); + } } return Future.wait(futures); }); @@ -472,6 +595,183 @@ main() { }); }); + test('write-with-predefined-acl', () { + var predefined = + [[PredefinedAcl.authenticatedRead, 'authenticatedRead'], + [PredefinedAcl.private, 'private'], + [PredefinedAcl.projectPrivate, 'projectPrivate'], + [PredefinedAcl.publicRead, 'publicRead'], + [PredefinedAcl.bucketOwnerFullControl, 'bucketOwnerFullControl'], + [PredefinedAcl.bucketOwnerRead, 'bucketOwnerRead']]; + + withMockClient((mock, api) { + int count = 0; + var bytes = [1,2,3]; + + mock.registerUpload( + 'POST', 'b/$bucketName/o', expectAsync((request) { + return mock.processNormalMediaUpload(request) + .then(expectAsync((mediaUpload) { + var object = + new storage.Object.fromJson(JSON.decode(mediaUpload.json)); + expect(object.name, objectName); + expect(mediaUpload.bytes, bytes); + expect(request.url.queryParameters['predefinedAcl'], + predefined[count++][1]); + expect(object.acl, isNull); + return mock.respond(new storage.Object()..name = objectName); + })); + }, count: predefined.length)); + + var bucket = api.bucket(bucketName); + var futures = []; + for (int i = 0; i < predefined.length; i++) { + futures.add(bucket.writeBytes(objectName, bytes, + predefinedAcl: predefined[i][0])); + } + return Future.wait(futures); + }); + }); + + test('write-with-acl', () { + var acl1 = new Acl([ + new AclEntry(new AccountScope('user@example.com'), + AclPermission.FULL_CONTROL), + ]); + var acl2 = new Acl([ + new AclEntry(new AccountScope('user@example.com'), + AclPermission.FULL_CONTROL), + new AclEntry(new GroupScope('group@example.com'), + AclPermission.WRITE), + ]); + var acl3 = new Acl([ + new AclEntry(new AccountScope('user@example.com'), + AclPermission.FULL_CONTROL), + new AclEntry(new GroupScope('group@example.com'), + AclPermission.WRITE), + new AclEntry(new DomainScope('example.com'), + AclPermission.READ), + ]); + + var acls = [acl1, acl2, acl3]; + + withMockClient((mock, api) { + int count = 0; + var bytes = [1,2,3]; + + mock.registerUpload( + 'POST', 'b/$bucketName/o', expectAsync((request) { + return mock.processNormalMediaUpload(request) + .then(expectAsync((mediaUpload) { + var object = + new storage.Object.fromJson(JSON.decode(mediaUpload.json)); + expect(object.name, objectName); + expect(mediaUpload.bytes, bytes); + expect(request.url.queryParameters['predefinedAcl'], isNull); + expect(object.acl, isNotNull); + expect(object.acl.length, count + 1); + expect(object.acl[0].entity, 'user-user@example.com'); + expect(object.acl[0].role, 'OWNER'); + if (count > 0) { + expect(object.acl[1].entity, 'group-group@example.com'); + expect(object.acl[1].role, 'OWNER'); + } + if (count > 2) { + expect(object.acl[2].entity, 'domain-example.com'); + expect(object.acl[2].role, 'READER'); + } + count++; + return mock.respond(new storage.Object()..name = objectName); + })); + }, count: acls.length)); + + var bucket = api.bucket(bucketName); + var futures = []; + for (int i = 0; i < acls.length; i++) { + futures.add(bucket.writeBytes(objectName, bytes, acl: acls[i])); + } + return Future.wait(futures); + }); + }); + + test('write-with-acl-and-predefined-acl', () { + var predefined = + [[PredefinedAcl.authenticatedRead, 'authenticatedRead'], + [PredefinedAcl.private, 'private'], + [PredefinedAcl.projectPrivate, 'projectPrivate'], + [PredefinedAcl.publicRead, 'publicRead'], + [PredefinedAcl.bucketOwnerFullControl, 'bucketOwnerFullControl'], + [PredefinedAcl.bucketOwnerRead, 'bucketOwnerRead']]; + + var acl1 = new Acl([ + new AclEntry(new AccountScope('user@example.com'), + AclPermission.FULL_CONTROL), + ]); + var acl2 = new Acl([ + new AclEntry(new AccountScope('user@example.com'), + AclPermission.FULL_CONTROL), + new AclEntry(new GroupScope('group@example.com'), + AclPermission.WRITE), + ]); + var acl3 = new Acl([ + new AclEntry(new AccountScope('user@example.com'), + AclPermission.FULL_CONTROL), + new AclEntry(new GroupScope('group@example.com'), + AclPermission.WRITE), + new AclEntry(new DomainScope('example.com'), + AclPermission.READ), + ]); + + var acls = [acl1, acl2, acl3]; + + withMockClient((mock, api) { + int count = 0; + var bytes = [1,2,3]; + + mock.registerUpload( + 'POST', 'b/$bucketName/o', expectAsync((request) { + return mock.processNormalMediaUpload(request) + .then(expectAsync((mediaUpload) { + int predefinedIndex = count ~/ acls.length; + int aclIndex = count % acls.length; + var object = + new storage.Object.fromJson(JSON.decode(mediaUpload.json)); + expect(object.name, objectName); + expect(mediaUpload.bytes, bytes); + expect(request.url.queryParameters['predefinedAcl'], + predefined[predefinedIndex][1]); + expect(object.acl, isNotNull); + expect(object.acl.length, aclIndex + 1); + expect(object.acl[0].entity, 'user-user@example.com'); + expect(object.acl[0].role, 'OWNER'); + if (aclIndex > 0) { + expect(object.acl[1].entity, 'group-group@example.com'); + expect(object.acl[1].role, 'OWNER'); + } + if (aclIndex > 2) { + expect(object.acl[2].entity, 'domain-example.com'); + expect(object.acl[2].role, 'READER'); + } + count++; + return mock.respond(new storage.Object()..name = objectName); + })); + }, count: predefined.length * acls.length)); + + var bucket = api.bucket(bucketName); + var futures = []; + for (int i = 0; i < predefined.length; i++) { + for (int j = 0; j < acls.length; j++) { + futures.add(bucket.writeBytes( + objectName, bytes, + acl: acls[j], predefinedAcl: predefined[i][0])); + } + } + return Future.wait(futures); + }); + }); + + + test('read', () { var bytes = [1, 2, 3]; withMockClient((mock, api) { From 23afc3f9560fda0ba9eea5364a14ff803f617095 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Wed, 29 Oct 2014 13:03:24 +0100 Subject: [PATCH 019/239] Move test-files into subdirectories R=kustermann@google.com BUG= Review URL: https://codereview.chromium.org//686043003 --- pkgs/gcloud/test/{ => pubsub}/pubsub_test.dart | 2 +- pkgs/gcloud/test/{ => storage}/storage_test.dart | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) rename pkgs/gcloud/test/{ => pubsub}/pubsub_test.dart (99%) rename pkgs/gcloud/test/{ => storage}/storage_test.dart (99%) diff --git a/pkgs/gcloud/test/pubsub_test.dart b/pkgs/gcloud/test/pubsub/pubsub_test.dart similarity index 99% rename from pkgs/gcloud/test/pubsub_test.dart rename to pkgs/gcloud/test/pubsub/pubsub_test.dart index 82ee3437..95c53d64 100644 --- a/pkgs/gcloud/test/pubsub_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_test.dart @@ -13,7 +13,7 @@ import 'package:gcloud/pubsub.dart'; import 'package:googleapis_beta/pubsub/v1beta1.dart' as pubsub; -import 'common.dart'; +import '../common.dart'; const String ROOT_PATH = '/pubsub/v1beta1/'; diff --git a/pkgs/gcloud/test/storage_test.dart b/pkgs/gcloud/test/storage/storage_test.dart similarity index 99% rename from pkgs/gcloud/test/storage_test.dart rename to pkgs/gcloud/test/storage/storage_test.dart index be7038b8..7d59f28b 100644 --- a/pkgs/gcloud/test/storage_test.dart +++ b/pkgs/gcloud/test/storage/storage_test.dart @@ -15,7 +15,7 @@ import 'package:gcloud/storage.dart'; import 'package:googleapis/storage/v1.dart' as storage; import 'package:googleapis/common/common.dart' as common; -import 'common.dart'; +import '../common.dart'; const String ROOT_PATH = '/storage/v1/'; From 12fda7a68f5435e59bcc08e259dd2963ac2358d4 Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Thu, 30 Oct 2014 10:56:04 +0100 Subject: [PATCH 020/239] Skip storage e2e tests on browser R=ricow@google.com Review URL: https://codereview.chromium.org//691733002 --- pkgs/gcloud/.status | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pkgs/gcloud/.status b/pkgs/gcloud/.status index 7f29452f..fd22c860 100644 --- a/pkgs/gcloud/.status +++ b/pkgs/gcloud/.status @@ -8,5 +8,9 @@ build/test/db/e2e/*: Skip test/datastore/e2e/*: Skip test/db/e2e/*: Skip +[ $browser ] +build/test/storage/e2e_test: Skip +test/storage/e2e_test: Skip + [ $compiler == dart2js ] *: Skip From 9784b047b9f39ee8ba78ac3611b895de225eb33f Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Thu, 30 Oct 2014 12:36:07 +0100 Subject: [PATCH 021/239] Make low level datastore query API use Page<>, make highlevel db API use Stream for queries R=sgjesse@google.com Review URL: https://codereview.chromium.org//671643004 --- pkgs/gcloud/lib/datastore.dart | 13 +- pkgs/gcloud/lib/db.dart | 2 + pkgs/gcloud/lib/src/datastore_impl.dart | 171 ++++++++++++++---- pkgs/gcloud/lib/src/db/db.dart | 16 +- .../test/datastore/e2e/datastore_test.dart | 40 ++-- pkgs/gcloud/test/db/e2e/db_test.dart | 28 +-- 6 files changed, 202 insertions(+), 68 deletions(-) diff --git a/pkgs/gcloud/lib/datastore.dart b/pkgs/gcloud/lib/datastore.dart index 578e7979..3dbf4bf8 100644 --- a/pkgs/gcloud/lib/datastore.dart +++ b/pkgs/gcloud/lib/datastore.dart @@ -11,6 +11,8 @@ library gcloud.datastore; import 'dart:async'; +import 'common.dart' show Page; + class ApplicationError implements Exception { final String message; ApplicationError(this.message); @@ -369,7 +371,13 @@ abstract class Datastore { /// If a [transaction] is given, the lookup will be within this transaction. Future> lookup(List keys, {Transaction transaction}); - /// Runs a query on the dataset and returns matching [Entity]s. + /// Runs a query on the dataset and returns a [Page] of matching [Entity]s. + /// + /// The [Page] instance returned might not contain all matching [Entity]s - + /// in which case `isLast` is set to `false`. The page's `next` method can + /// be used to page through the whole result set. + /// The maximum number of [Entity]s returned within a single page is + /// implementation specific. /// /// - `query` is used to restrict the number of returned [Entity]s and may /// may specify an order. @@ -383,7 +391,6 @@ abstract class Datastore { /// /// Outside of transactions, the result set might be stale. Queries are by /// default eventually consistent. - /// TODO(Issue #6): Make this pageable. - Future> query( + Future> query( Query query, {Partition partition, Transaction transaction}); } diff --git a/pkgs/gcloud/lib/db.dart b/pkgs/gcloud/lib/db.dart index 478a8a7f..b401896a 100644 --- a/pkgs/gcloud/lib/db.dart +++ b/pkgs/gcloud/lib/db.dart @@ -9,6 +9,8 @@ import 'dart:collection'; import 'dart:mirrors' as mirrors; import 'datastore.dart' as datastore; +import 'common.dart' show Page, StreamFromPages; + part 'src/db/annotations.dart'; part 'src/db/db.dart'; part 'src/db/models.dart'; diff --git a/pkgs/gcloud/lib/src/datastore_impl.dart b/pkgs/gcloud/lib/src/datastore_impl.dart index 8e2e3e5d..1ff4a23d 100644 --- a/pkgs/gcloud/lib/src/datastore_impl.dart +++ b/pkgs/gcloud/lib/src/datastore_impl.dart @@ -5,10 +5,13 @@ library gcloud.datastore_impl; import 'dart:async'; +import 'dart:convert'; +import 'dart:math'; import 'package:http/http.dart' as http; import '../datastore.dart' as datastore; +import '../common.dart' show Page; import 'package:googleapis_beta/datastore/v1beta2.dart' as api; class TransactionImpl implements datastore.Transaction { @@ -44,7 +47,7 @@ class DatastoreImpl implements datastore.Datastore { return apiKey; } - datastore.Key _convertApi2DatastoreKey(api.Key key) { + static datastore.Key _convertApi2DatastoreKey(api.Key key) { var elements = key.path.map((api.KeyPathElement element) { if (element.id != null) { return new datastore.KeyElement(element.kind, int.parse(element.id)); @@ -84,7 +87,7 @@ class DatastoreImpl implements datastore.Datastore { return true; } - _convertApi2DatastorePropertyValue(api.Value value) { + static _convertApi2DatastorePropertyValue(api.Value value) { if (value.booleanValue != null) return value.booleanValue; else if (value.integerValue != null) @@ -153,7 +156,7 @@ class DatastoreImpl implements datastore.Datastore { } } - _convertApi2DatastoreProperty(api.Property property) { + static _convertApi2DatastoreProperty(api.Property property) { if (property.booleanValue != null) return property.booleanValue; else if (property.integerValue != null) @@ -218,7 +221,7 @@ class DatastoreImpl implements datastore.Datastore { } } - datastore.Entity _convertApi2DatastoreEntity(api.Entity entity) { + static datastore.Entity _convertApi2DatastoreEntity(api.Entity entity) { var unindexedProperties = new Set(); var properties = {}; @@ -354,7 +357,7 @@ class DatastoreImpl implements datastore.Datastore { return orders.map(_convertDatastore2ApiOrder).toList(); } - Future _handleError(error, stack) { + static Future _handleError(error, stack) { if (error is api.DetailedApiRequestError) { if (error.status == 400) { return new Future.error( @@ -504,14 +507,15 @@ class DatastoreImpl implements datastore.Datastore { }, onError: _handleError); } - Future> query(datastore.Query query, - {datastore.Partition partition, - datastore.Transaction transaction}) { + Future> query( + datastore.Query query, {datastore.Partition partition, + datastore.Transaction transaction}) { + // NOTE: We explicitly do not set 'limit' here, since this is handled by + // QueryPageImpl.runQuery. var apiQuery = new api.Query() ..filter = _convertDatastore2ApiFilters(query.filters, query.ancestorKey) ..order = _convertDatastore2ApiOrders(query.orders) - ..limit = query.limit ..offset = query.offset; if (query.kind != null) { @@ -530,30 +534,8 @@ class DatastoreImpl implements datastore.Datastore { ..namespace = partition.namespace; } - var results = []; - Future next({String lastEndCursor}) { - apiQuery.startCursor = lastEndCursor; - return _api.datasets.runQuery(request, _project).then((result) { - var batch = result.batch; - if (batch.entityResults != null) { - for (var result in batch.entityResults) { - results.add(_convertApi2DatastoreEntity(result.entity)); - } - } - if (result.batch.moreResults == 'NOT_FINISHED') { - if (result.batch.endCursor == null) { - throw new datastore.DatastoreError( - 'Server did not supply an end cursor, even though the query ' - 'is not done.'); - } - return next(lastEndCursor: result.batch.endCursor); - } else { - return results; - } - }); - } - - return next().catchError(_handleError); + return QueryPageImpl.runQuery(_api, _project, request, query.limit) + .catchError(_handleError); } Future rollback(datastore.Transaction transaction) { @@ -563,3 +545,126 @@ class DatastoreImpl implements datastore.Datastore { return _api.datasets.rollback(request, _project).catchError(_handleError); } } + +class QueryPageImpl implements Page { + static const int MAX_ENTITIES_PER_RESPONSE = 2000; + + final api.DatastoreApi _api; + final String _project; + final api.RunQueryRequest _nextRequest; + final List _entities; + final bool _isLast; + + // This might be `null` in which case we request as many as we can get. + final int _remainingNumberOfEntities; + + QueryPageImpl(this._api, this._project, + this._nextRequest, this._entities, + this._isLast, this._remainingNumberOfEntities); + + static Future runQuery(api.DatastoreApi api, + String project, + api.RunQueryRequest request, + int limit, + {int batchSize}) { + int batchLimit = batchSize; + if (batchLimit == null) { + batchLimit = MAX_ENTITIES_PER_RESPONSE; + } + if (limit != null && limit < batchLimit) { + batchLimit = limit; + } + + request.query.limit = batchLimit; + + return api.datasets.runQuery(request, project).then((response) { + var returnedEntities = const []; + + var batch = response.batch; + if (batch.entityResults != null) { + returnedEntities = batch.entityResults + .map((result) => result.entity) + .map(DatastoreImpl._convertApi2DatastoreEntity) + .toList(); + } + + // This check is only necessary for the first request/response pair + // (if offset was supplied). + if (request.query.offset != null && + request.query.offset > 0 && + request.query.offset != response.batch.skippedResults) { + throw new datastore.DatastoreError( + 'Server did not skip over the specified ${request.query.offset} ' + 'entities.'); + } + + if (limit != null && returnedEntities.length > limit) { + throw new datastore.DatastoreError( + 'Server returned more entities then the limit for the request' + '(${request.query.limit}) was.'); + } + + if (limit != null && + returnedEntities.length < batchLimit && + response.batch.moreResults == 'MORE_RESULTS_AFTER_LIMIT') { + throw new datastore.DatastoreError( + 'Server returned response with less entities then the limit was, ' + 'but signals there are more results after the limit.'); + } + + // In case a limit was specified, we need to subtraction the number of + // entities we already got. + // (the checks above guarantee that this subraction is >= 0). + int remainingEntities; + if (limit != null) { + remainingEntities = limit - returnedEntities.length; + } + + bool isLast = ((limit != null && remainingEntities == 0) || + response.batch.moreResults == 'NO_MORE_RESULTS'); + + if (!isLast && response.batch.endCursor == null) { + throw new datastore.DatastoreError( + 'Server did not supply an end cursor, even though the query ' + 'is not done.'); + } + + if (isLast) { + return new QueryPageImpl( + api, project, request, returnedEntities, true, null); + } else { + // NOTE: We reuse the old RunQueryRequest object here . + + // The offset will be 0 from now on, since the first request will have + // skipped over the first `offset` results. + request.query.offset = 0; + + // Furthermore we set the startCursor to the endCursor of the previous + // result batch, so we can continue where we left off. + request.query.startCursor = batch.endCursor; + + return new QueryPageImpl( + api, project, request, returnedEntities, false, remainingEntities); + } + }); + } + + bool get isLast => _isLast; + + List get items => _entities; + + Future> next({int pageSize}) { + // NOTE: We do not respect [pageSize] here, the only mechanism we can + // really use is `query.limit`, but this is user-specified when making + // the query. + if (isLast) { + return new Future.sync(() { + throw new ArgumentError('Cannot call next() on last page.'); + }); + } + + return QueryPageImpl.runQuery( + _api, _project, _nextRequest, _remainingNumberOfEntities) + .catchError(DatastoreImpl._handleError); + } +} diff --git a/pkgs/gcloud/lib/src/db/db.dart b/pkgs/gcloud/lib/src/db/db.dart index 8a6e4e70..fababe09 100644 --- a/pkgs/gcloud/lib/src/db/db.dart +++ b/pkgs/gcloud/lib/src/db/db.dart @@ -198,7 +198,7 @@ class Query { * return the newest updates performed on the datastore since updates * will be reflected in the indices in an eventual consistent way. */ - Future> run() { + Stream run() { var ancestorKey; if (_ancestorKey != null) { ancestorKey = _db.modelDB.toDatastoreKey(_ancestorKey); @@ -213,13 +213,17 @@ class Query { partition = new datastore.Partition(_partition.namespace); } - return _db.datastore.query( - query, transaction: _transaction, partition: partition) - .then((List entities) { - return entities.map(_db.modelDB.fromDatastoreEntity).toList(); - }); + return new StreamFromPages((int pageSize) { + return _db.datastore.query( + query, transaction: _transaction, partition: partition); + }).stream.map(_db.modelDB.fromDatastoreEntity); } + // TODO: + // - add runPaged() returning Page + // - add run*() method once we have EntityResult{Entity,Cursor} in low-level + // API. + String _convertToDatastoreName(String name) { var propertyName = _db.modelDB.fieldNameToPropertyName(_kind, name); diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test.dart index 9b1bb91f..410582b4 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test.dart @@ -30,6 +30,7 @@ library datastore_test; import 'dart:async'; import 'package:gcloud/datastore.dart'; +import 'package:gcloud/common.dart'; import 'package:unittest/unittest.dart'; import '../error_matchers.dart'; @@ -41,6 +42,10 @@ Future sleep(Duration duration) { return completer.future; } +Future> consumePages(FirstPageProvider provider) { + return new StreamFromPages(provider).stream.toList(); +} + runTests(Datastore datastore) { Future withTransaction(Function f, {bool xg: false}) { return datastore.beginTransaction(crossEntityGroup: xg).then(f); @@ -592,7 +597,8 @@ runTests(Datastore datastore) { var query = new Query( kind: kind, filters: filters, orders: orders, offset: offset, limit: limit); - return datastore.query(query).then((List entities) { + return consumePages((_) => datastore.query(query)) + .then((List entities) { if (transaction != null) { return datastore.commit(transaction: transaction) .then((_) => entities); @@ -900,7 +906,8 @@ runTests(Datastore datastore) { () { var ancestorQuery = new Query(ancestorKey: rootKey, orders: orders); - return datastore.query(ancestorQuery).then((results) { + return consumePages((_) => datastore.query(ancestorQuery)) + .then((results) { expect(results.length, 2); expect(compareEntity(entity, results[0]), isTrue); expect(compareEntity(entity2, results[1]), isTrue); @@ -910,7 +917,8 @@ runTests(Datastore datastore) { () { var ancestorQuery = new Query(ancestorKey: subKey, orders: orders); - return datastore.query(ancestorQuery).then((results) { + return consumePages((_) => datastore.query(ancestorQuery)) + .then((results) { expect(results.length, 2); expect(compareEntity(entity, results[0]), isTrue); expect(compareEntity(entity2, results[1]), isTrue); @@ -919,7 +927,8 @@ runTests(Datastore datastore) { // - by [subSubKey] () { var ancestorQuery = new Query(ancestorKey: subSubKey); - return datastore.query(ancestorQuery).then((results) { + return consumePages((_) => datastore.query(ancestorQuery)) + .then((results) { expect(results.length, 1); expect(compareEntity(entity, results[0]), isTrue); }); @@ -927,7 +936,8 @@ runTests(Datastore datastore) { // - by [subSubKey2] () { var ancestorQuery = new Query(ancestorKey: subSubKey2); - return datastore.query(ancestorQuery).then((results) { + return consumePages((_) => datastore.query(ancestorQuery)) + .then((results) { expect(results.length, 1); expect(compareEntity(entity2, results[0]), isTrue); }); @@ -937,7 +947,8 @@ runTests(Datastore datastore) { // - by [rootKey] + 'SubSubKind' () { var query = new Query(ancestorKey: rootKey, kind: 'SubSubKind'); - return datastore.query(query).then((List results) { + return consumePages((_) => datastore.query(query)) + .then((List results) { expect(results.length, 1); expect(compareEntity(entity, results[0]), isTrue); }); @@ -945,7 +956,8 @@ runTests(Datastore datastore) { // - by [rootKey] + 'SubSubKind2' () { var query = new Query(ancestorKey: rootKey, kind: 'SubSubKind2'); - return datastore.query(query).then((List results) { + return consumePages((_) => datastore.query(query)) + .then((List results) { expect(results.length, 1); expect(compareEntity(entity2, results[0]), isTrue); }); @@ -953,7 +965,8 @@ runTests(Datastore datastore) { // - by [subSubKey] + 'SubSubKind' () { var query = new Query(ancestorKey: subSubKey, kind: 'SubSubKind'); - return datastore.query(query).then((List results) { + return consumePages((_) => datastore.query(query)) + .then((List results) { expect(results.length, 1); expect(compareEntity(entity, results[0]), isTrue); }); @@ -962,7 +975,8 @@ runTests(Datastore datastore) { () { var query = new Query(ancestorKey: subSubKey2, kind: 'SubSubKind2'); - return datastore.query(query).then((List results) { + return consumePages((_) => datastore.query(query)) + .then((List results) { expect(results.length, 1); expect(compareEntity(entity2, results[0]), isTrue); }); @@ -971,7 +985,8 @@ runTests(Datastore datastore) { () { var query = new Query(ancestorKey: subSubKey, kind: 'SubSubKind2'); - return datastore.query(query).then((List results) { + return consumePages((_) => datastore.query(query)) + .then((List results) { expect(results.length, 0); }); }, @@ -979,7 +994,8 @@ runTests(Datastore datastore) { () { var query = new Query(ancestorKey: subSubKey2, kind: 'SubSubKind'); - return datastore.query(query).then((List results) { + return consumePages((_) => datastore.query(query)) + .then((List results) { expect(results.length, 0); }); }, @@ -992,8 +1008,6 @@ runTests(Datastore datastore) { return Future.forEach(futures, (f) => f()).then(expectAsync((_) {})); }); }); - }); - }); } diff --git a/pkgs/gcloud/test/db/e2e/db_test.dart b/pkgs/gcloud/test/db/e2e/db_test.dart index addc129c..ceee4067 100644 --- a/pkgs/gcloud/test/db/e2e/db_test.dart +++ b/pkgs/gcloud/test/db/e2e/db_test.dart @@ -479,14 +479,16 @@ runTests(db.DatastoreDB store) { // Queries for [Person] return no results, we only have [User] // objects. () { - return store.query(Person).run().then((List models) { + return store.query(Person).run().toList() + .then((List models) { compareModels([], models); }); }, // All users query () { - return store.query(User).run().then((List models) { + return store.query(User).run().toList() + .then((List models) { compareModels(users, models, anyOrder: true); }); }, @@ -496,7 +498,7 @@ runTests(db.DatastoreDB store) { return store.query(User) ..order('-name') ..order('nickname') - ..run().then((List models) { + ..run().toList().then((List models) { compareModels( usersSortedNameDescNicknameAsc, models); }); @@ -505,7 +507,7 @@ runTests(db.DatastoreDB store) { return store.query(User) ..order('-name') ..order('-nickname') - ..run().then((List models) { + ..run().toList().then((List models) { compareModels( usersSortedNameDescNicknameDesc, models); }); @@ -517,7 +519,7 @@ runTests(db.DatastoreDB store) { ..filter('name >=', LOWER_BOUND) ..order('-name') ..order('nickname') - ..run().then((List models) { + ..run().toList().then((List models) { compareModels(usersSortedAndFilteredNameDescNicknameAsc, models); }); @@ -527,7 +529,7 @@ runTests(db.DatastoreDB store) { ..filter('name >=', LOWER_BOUND) ..order('-name') ..order('-nickname') - ..run().then((List models) { + ..run().toList().then((List models) { compareModels(usersSortedAndFilteredNameDescNicknameDesc, models); }); @@ -539,7 +541,7 @@ runTests(db.DatastoreDB store) { return store.query(User) ..filter('languages IN', ['foo']) ..order('name') - ..run().then((List models) { + ..run().toList().then((List models) { compareModels(fooUsers, models, anyOrder: true); }); }, @@ -547,7 +549,7 @@ runTests(db.DatastoreDB store) { return store.query(User) ..filter('languages IN', ['bar']) ..order('name') - ..run().then((List models) { + ..run().toList().then((List models) { compareModels(barUsers, models, anyOrder: true); }); }, @@ -559,7 +561,7 @@ runTests(db.DatastoreDB store) { ..order('nickname') ..offset(3) ..limit(4) - ..run().then((List models) { + ..run().toList().then((List models) { var expectedModels = usersSortedAndFilteredNameDescNicknameAsc.sublist(3, 7); compareModels(expectedModels, models); @@ -570,7 +572,7 @@ runTests(db.DatastoreDB store) { () { return store.query(ExpandoPerson) ..filter('name =', expandoPersons.last.name) - ..run().then((List models) { + ..run().toList().then((List models) { compareModels([expandoPersons.last], models); }); }, @@ -578,7 +580,7 @@ runTests(db.DatastoreDB store) { () { return store.query(ExpandoPerson) ..filter('foo =', expandoPersons.last.foo) - ..run().then((List models) { + ..run().toList().then((List models) { compareModels([expandoPersons.last], models); }); }, @@ -586,7 +588,7 @@ runTests(db.DatastoreDB store) { () { return store.query(ExpandoPerson) ..filter('bar =', expandoPersons.last.bar) - ..run().then((List models) { + ..run().toList().then((List models) { compareModels([expandoPersons.last], models); }); }, @@ -595,7 +597,7 @@ runTests(db.DatastoreDB store) { () { return store.query(ExpandoPerson) ..filter('nickname =', expandoPersons.last.nickname) - ..run().then((List models) { + ..run().toList().then((List models) { compareModels([expandoPersons.last], models); }); }, From 660ac5541cb287bb0358e8d0d5a8320d250b3dde Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Thu, 30 Oct 2014 09:07:18 -0700 Subject: [PATCH 022/239] Make sure TODOs don't show up in doc comments R=kustermann@google.com Review URL: https://codereview.chromium.org//687263003 --- pkgs/gcloud/lib/datastore.dart | 8 ++++---- pkgs/gcloud/lib/storage.dart | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/pkgs/gcloud/lib/datastore.dart b/pkgs/gcloud/lib/datastore.dart index 3dbf4bf8..6303ec21 100644 --- a/pkgs/gcloud/lib/datastore.dart +++ b/pkgs/gcloud/lib/datastore.dart @@ -156,7 +156,7 @@ class Key { /// The default namespace is `null`. Using empty Strings as namespaces is /// invalid. /// -/// TODO(Issue #6): Add dataset-id here. +// TODO(Issue #6): Add dataset-id here. class Partition { static const Partition DEFAULT = const Partition._default(); @@ -241,7 +241,7 @@ class Filter { /// The direction of a order. /// -/// TODO(Issue #6): Make this class Private and add the two statics to the +// TODO(Issue #6): Make this class Private and add the two statics to the /// 'Order' class. /// [i.e. so one can write Order.Ascending, Order.Descending]. class OrderDirection { @@ -261,7 +261,7 @@ class Order { /// The name of the property used for the order. final String propertyName; - /// TODO(Issue #6): Make [direction] the second argument and make it optional. + // TODO(Issue #6): Make [direction] the second argument and make it optional. Order(this.direction, this.propertyName); } @@ -355,7 +355,7 @@ abstract class Datastore { /// /// This method might complete with a [TransactionAbortedError] error. /// Users must take care of retrying transactions. - /// TODO(Issue #6): Consider splitting `inserts` into insert/update/upsert. + // TODO(Issue #6): Consider splitting `inserts` into insert/update/upsert. Future commit({List inserts, List autoIdInserts, List deletes, diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index 10307461..68368e7d 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -459,7 +459,7 @@ abstract class ObjectMetadata { /// Create a copy of this object with some values replaces. /// - /// TODO: This cannot be used to set values to null. + // TODO: This cannot be used to set values to null. ObjectMetadata replace({Acl acl, String contentType, String contentEncoding, String cacheControl, String contentDisposition, String contentLanguage, Map custom}); @@ -539,17 +539,17 @@ abstract class Bucket { /// Read object content. /// - /// TODO: More documentation + // TODO: More documentation Stream> read(String objectName, {int offset: 0, int length}); /// Lookup object metadata. /// - /// TODO: More documentation + // TODO: More documentation Future info(String name); /// Update object metadata. /// - /// TODO: More documentation + // TODO: More documentation Future updateMetadata(String objectName, ObjectMetadata metadata); /// List objects in the bucket. From 116da04f14e8c34a5fd316a62cf2a9aa5081dcb3 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Thu, 30 Oct 2014 09:10:04 -0700 Subject: [PATCH 023/239] gcloud: Analysis fixes R=sgjesse@google.com Review URL: https://codereview.chromium.org//655063005 --- pkgs/gcloud/lib/src/datastore_impl.dart | 1 + pkgs/gcloud/lib/src/storage_impl.dart | 1 + pkgs/gcloud/test/datastore/error_matchers.dart | 1 - pkgs/gcloud/test/storage/storage_test.dart | 2 +- 4 files changed, 3 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/lib/src/datastore_impl.dart b/pkgs/gcloud/lib/src/datastore_impl.dart index 1ff4a23d..1638d341 100644 --- a/pkgs/gcloud/lib/src/datastore_impl.dart +++ b/pkgs/gcloud/lib/src/datastore_impl.dart @@ -186,6 +186,7 @@ class DatastoreImpl implements datastore.Datastore { var apiProperty = new api.Property() ..indexed = indexed; if (value == null) { + return null; } else if (value is bool) { return apiProperty ..booleanValue = value; diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index e3ec721b..552342b3 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -390,6 +390,7 @@ class _ObjectMetadata implements ObjectMetadata { _ObjectMetadata._(this._object); + List get _acl => _object.acl; set acl(Acl value) => _object.acl = value._toObjectAccessControlList(); String get contentType => _object.contentType; diff --git a/pkgs/gcloud/test/datastore/error_matchers.dart b/pkgs/gcloud/test/datastore/error_matchers.dart index 2a7e8656..582ec1aa 100644 --- a/pkgs/gcloud/test/datastore/error_matchers.dart +++ b/pkgs/gcloud/test/datastore/error_matchers.dart @@ -6,7 +6,6 @@ library error_matchers; import 'package:unittest/unittest.dart'; import 'package:gcloud/datastore.dart'; -import 'package:gcloud/db.dart'; class _ApplicationError extends TypeMatcher { const _ApplicationError() : super("ApplicationError"); diff --git a/pkgs/gcloud/test/storage/storage_test.dart b/pkgs/gcloud/test/storage/storage_test.dart index 7d59f28b..4afa185c 100644 --- a/pkgs/gcloud/test/storage/storage_test.dart +++ b/pkgs/gcloud/test/storage/storage_test.dart @@ -203,7 +203,7 @@ main() { 'DELETE', new RegExp(r'b/[a-z/-]*$'), expectAsync((request) { expect(request.url.path, '${ROOT_PATH}b/$bucketName'); expect(request.body.length, 0); - return mock.respond(new storage.Bucket()..name = bucketName);; + return mock.respond(new storage.Bucket()..name = bucketName); })); expect(api.deleteBucket(bucketName), completion(isNull)); From e45b0bae483fafe30c05c397373a407a49450ac5 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Thu, 30 Oct 2014 09:11:08 -0700 Subject: [PATCH 024/239] updated gcloud dependencies and ignore .pub dir R=sgjesse@google.com Review URL: https://codereview.chromium.org//689723004 --- pkgs/gcloud/.gitignore | 1 + pkgs/gcloud/pubspec.yaml | 10 +++++----- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/pkgs/gcloud/.gitignore b/pkgs/gcloud/.gitignore index 25161add..192d2706 100644 --- a/pkgs/gcloud/.gitignore +++ b/pkgs/gcloud/.gitignore @@ -1,2 +1,3 @@ pubspec.lock packages +.pub diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 0ad3555e..ad7f62fd 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -5,14 +5,14 @@ environment: sdk: '>=1.5.0 <2.0.0' dependencies: crypto: '>=0.9.0 <0.10.0' - http: '>=0.11.0 <0.12.0' - googleapis: '>=0.2.0 <0.3.0' - googleapis_beta: '>=0.3.0 <0.4.0' + googleapis: '>=0.2.0 <0.4.0' + googleapis_beta: '>=0.3.0 <0.5.0' + http: '>=0.11.0 <0.12.0' dev_dependencies: - unittest: '>=0.11.0 <0.12.0' - mime: '>=0.9.0+3 <0.10.0' http_parser: '>=0.0.2+5 <0.1.0' googleapis_auth: any + mime: '>=0.9.0+3 <0.10.0' + unittest: '>=0.11.0 <0.12.0' transformers: - $dart2js: $include: [] From efdb2a5ac6633e92786b3341a2604c66f38c9025 Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Fri, 31 Oct 2014 11:11:59 +0100 Subject: [PATCH 025/239] Switch from using separate annotation class to annotations on model class/fields directly It also moves all annotations Kind/Property into the same dart file. R=sgjesse@google.com Review URL: https://codereview.chromium.org//691043002 --- pkgs/gcloud/lib/db.dart | 2 - pkgs/gcloud/lib/db/metamodel.dart | 23 +- pkgs/gcloud/lib/src/db/annotations.dart | 280 +++++++++++- pkgs/gcloud/lib/src/db/model_db_impl.dart | 428 +++++++++--------- pkgs/gcloud/lib/src/db/model_description.dart | 23 - pkgs/gcloud/lib/src/db/models.dart | 6 - pkgs/gcloud/lib/src/db/properties.dart | 225 --------- pkgs/gcloud/test/db/e2e/db_test.dart | 42 +- pkgs/gcloud/test/db/model_db_test.dart | 10 +- .../db/model_dbs/duplicate_fieldname.dart | 23 +- .../test/db/model_dbs/duplicate_kind.dart | 14 +- .../test/db/model_dbs/duplicate_property.dart | 13 +- .../db/model_dbs/invalid_id_property.dart | 15 - .../db/model_dbs/multiple_annotations.dart | 9 +- .../db/model_dbs/no_default_constructor.dart | 7 +- 15 files changed, 520 insertions(+), 600 deletions(-) delete mode 100644 pkgs/gcloud/lib/src/db/model_description.dart delete mode 100644 pkgs/gcloud/lib/src/db/properties.dart delete mode 100644 pkgs/gcloud/test/db/model_dbs/invalid_id_property.dart diff --git a/pkgs/gcloud/lib/db.dart b/pkgs/gcloud/lib/db.dart index b401896a..76f3e1fb 100644 --- a/pkgs/gcloud/lib/db.dart +++ b/pkgs/gcloud/lib/db.dart @@ -16,5 +16,3 @@ part 'src/db/db.dart'; part 'src/db/models.dart'; part 'src/db/model_db.dart'; part 'src/db/model_db_impl.dart'; -part 'src/db/model_description.dart'; -part 'src/db/properties.dart'; diff --git a/pkgs/gcloud/lib/db/metamodel.dart b/pkgs/gcloud/lib/db/metamodel.dart index d30dd87a..9571066c 100644 --- a/pkgs/gcloud/lib/db/metamodel.dart +++ b/pkgs/gcloud/lib/db/metamodel.dart @@ -4,10 +4,12 @@ library gcloud.db.meta_model; -import '../db.dart'; +import '../db.dart' as db; + +@db.Kind(name: '__namespace__') +class Namespace extends db.ExpandoModel { + static const int EmptyNamespaceId = 1; -@ModelMetadata(const NamespaceDescription()) -class Namespace extends ExpandoModel { String get name { // The default namespace will be reported with id 1. if (id == NamespaceDescription.EmptyNamespaceId) return null; @@ -15,18 +17,7 @@ class Namespace extends ExpandoModel { } } -@ModelMetadata(const KindDescription()) -class Kind extends Model { +@db.Kind(name: '__kind__') +class Kind extends db.Model { String get name => id; } - -class NamespaceDescription extends ExpandoModelDescription { - static const int EmptyNamespaceId = 1; - final id = const IntProperty(); - const NamespaceDescription() : super('__namespace__'); -} - -class KindDescription extends ModelDescription { - final id = const IntProperty(); - const KindDescription() : super('__kind__'); -} diff --git a/pkgs/gcloud/lib/src/db/annotations.dart b/pkgs/gcloud/lib/src/db/annotations.dart index 91c20cc7..b1f0f6b5 100644 --- a/pkgs/gcloud/lib/src/db/annotations.dart +++ b/pkgs/gcloud/lib/src/db/annotations.dart @@ -4,33 +4,277 @@ part of gcloud.db; -/// This class should be used to annotate DB Model classes. +/// Annotation used to mark dart classes which can be stored into datastore. /// -/// It will attach a description on how to map dart Objects to Datastore -/// Entities. +/// The `Kind` annotation on a class as well as other `Property` annotations on +/// fields or getters of the class itself (and any of it's superclasses) up to +/// the [Model] class describe the *mapping* of *dart objects* to datastore +/// *entities*. /// -/// Note that the model class needs to have an empty default constructor. +/// An "entity" is an object which can be stored into Google Cloud Datastore. +/// It contains a number of named "properties", some of them might get indexed, +/// others are not. A "property" value can be of a limited set of supported +/// types (such as `int` and `String`). /// -/// Here is an example of a Dart Model class and a ModelScription which -/// describes the mapping. -/// -/// @ModelMetadata(const PersonDesc()) -/// class Person extends Model { +/// Here is an example of a dart model class which can be stored into datastore: +/// @Kind() +/// class Person extends db.Model { +/// @StringProperty() /// String name; +/// +/// @IntProperty() +/// int age; +/// +/// @DateTimeProperty() /// DateTime dateOfBirth; /// } +class Kind { + /// The kind name used when saving objects to datastore. + /// + /// If `null` the name will be the same as the class name at which the + /// annotation is placed. + final String name; + + /// The type, either [ID_TYPE_INTEGER] or [ID_TYPE_STRING]. + final IdType idType; + + /// Annotation specifying the name of this kind and whether to use integer or + /// string `id`s. + /// + /// If `name` is omitted, it will default to the name of class to which this + /// annotation is attached to. + const Kind({this.name, this.idType: IdType.Integer}); +} + +/// The type used for id's of an entity. +class IdType { + /// Use integer ids for identifying entities. + static const IdType Integer = const IdType(1); + + /// Use string ids for identifying entities. + static const IdType String = const IdType(2); + + final int _type; + + const IdType(this._type); +} + +/// Describes a property of an Entity. +/// +/// Please see [Kind] for an example on how to use them. +abstract class Property { + /// The name of the property. + /// + /// If it is `null`, the name will be the same as used in the + /// model class. + final String propertyName; + + /// Specifies whether this property is required or not. + /// + /// If required is `true`, it will be enforced when saving model objects to + /// the datastore and when retrieving them. + final bool required; + + /// Specifies whether this property should be indexed or not. + /// + /// When running queries no this property, it is necessary to set [indexed] to + /// `true`. + final bool indexed; + + const Property({this.propertyName, this.required: false, this.indexed: true}); + + bool validate(ModelDB db, Object value) { + if (required && value == null) return false; + return true; + } + + Object encodeValue(ModelDB db, Object value); + + Object decodePrimitiveValue(ModelDB db, Object value); +} + +/// An abstract base class for primitive properties which can e.g. be used +/// within a composed `ListProperty`. +abstract class PrimitiveProperty extends Property { + const PrimitiveProperty( + {String propertyName, bool required: false, bool indexed: true}) + : super(propertyName: propertyName, required: required, indexed: indexed); + + Object encodeValue(ModelDB db, Object value) => value; + + Object decodePrimitiveValue(ModelDB db, Object value) => value; +} + +/// A boolean [Property]. /// -/// class PersonDesc extends ModelDescription { -/// final id = const IntProperty(); +/// It will validate that values are booleans before writing them to the +/// datastore and when reading them back. +class BoolProperty extends PrimitiveProperty { + const BoolProperty( + {String propertyName, bool required: false, bool indexed: true}) + : super(propertyName: propertyName, required: required, indexed: indexed); + + bool validate(ModelDB db, Object value) + => super.validate(db, value) && (value == null || value is bool); +} + +/// A integer [Property]. /// -/// final name = const StringProperty(); -/// final dateOfBirth = const DateTimeProperty(); +/// It will validate that values are integers before writing them to the +/// datastore and when reading them back. +class IntProperty extends PrimitiveProperty { + const IntProperty( + {String propertyName, bool required: false, bool indexed: true}) + : super(propertyName: propertyName, required: required, indexed: indexed); + + bool validate(ModelDB db, Object value) + => super.validate(db, value) && (value == null || value is int); +} + +/// A double [Property]. /// -/// const GreetingDesc() : super('Person'); -/// } +/// It will validate that values are doubles before writing them to the +/// datastore and when reading them back. +class DoubleProperty extends PrimitiveProperty { + const DoubleProperty( + {String propertyName, bool required: false, bool indexed: true}) + : super(propertyName: propertyName, required: required, indexed: indexed); + + bool validate(ModelDB db, Object value) + => super.validate(db, value) && (value == null || value is double); +} + +/// A string [Property]. +/// +/// It will validate that values are strings before writing them to the +/// datastore and when reading them back. +class StringProperty extends PrimitiveProperty { + const StringProperty( + {String propertyName, bool required: false, bool indexed: true}) + : super(propertyName: propertyName, required: required, indexed: indexed); + + bool validate(ModelDB db, Object value) + => super.validate(db, value) && (value == null || value is String); +} + +/// A key [Property]. +/// +/// It will validate that values are keys before writing them to the +/// datastore and when reading them back. +class ModelKeyProperty extends PrimitiveProperty { + const ModelKeyProperty( + {String propertyName, bool required: false, bool indexed: true}) + : super(propertyName: propertyName, required: required, indexed: indexed); + + bool validate(ModelDB db, Object value) + => super.validate(db, value) && (value == null || value is Key); + + Object encodeValue(ModelDB db, Object value) { + if (value == null) return null; + return db.toDatastoreKey(value); + } + + Object decodePrimitiveValue(ModelDB db, Object value) { + if (value == null) return null; + return db.fromDatastoreKey(value as datastore.Key); + } +} + +/// A binary blob [Property]. +/// +/// It will validate that values are blobs before writing them to the +/// datastore and when reading them back. Blob values will be represented by +/// List. +class BlobProperty extends PrimitiveProperty { + const BlobProperty({String propertyName, bool required: false}) + : super(propertyName: propertyName, required: required, indexed: false); + + // NOTE: We don't validate that the entries of the list are really integers + // of the range 0..255! + // If an untyped list was created the type check will always succeed. i.e. + // "[1, true, 'bar'] is List" evaluates to `true` + bool validate(ModelDB db, Object value) + => super.validate(db, value) && (value == null || value is List); + + Object encodeValue(ModelDB db, Object value) { + if (value == null) return null; + return new datastore.BlobValue(value); + } + + Object decodePrimitiveValue(ModelDB db, Object value) { + if (value == null) return null; + + datastore.BlobValue blobValue = value; + return blobValue.bytes; + } +} + +/// A datetime [Property]. +/// +/// It will validate that values are DateTime objects before writing them to the +/// datastore and when reading them back. +class DateTimeProperty extends PrimitiveProperty { + const DateTimeProperty( + {String propertyName, bool required: false, bool indexed: true}) + : super(propertyName: propertyName, required: required, indexed: indexed); + + bool validate(ModelDB db, Object value) + => super.validate(db, value) && (value == null || value is DateTime); + + Object decodePrimitiveValue(ModelDB db, Object value) { + if (value is int) { + return + new DateTime.fromMillisecondsSinceEpoch(value ~/ 1000, isUtc: true); + } + return value; + } +} + + +/// A composed list [Property], with a `subProperty` for the list elements. /// -class ModelMetadata { - final ModelDescription description; +/// It will validate that values are List objects before writing them to the +/// datastore and when reading them back. It will also validate the elements +/// of the list itself. +class ListProperty extends Property { + final PrimitiveProperty subProperty; + + // TODO: We want to support optional list properties as well. + // Get rid of "required: true" here. + const ListProperty(this.subProperty, + {String propertyName, bool indexed: true}) + : super(propertyName: propertyName, required: true, indexed: indexed); + + bool validate(ModelDB db, Object value) { + if (!super.validate(db, value) || value is! List) return false; + + for (var entry in value) { + if (!subProperty.validate(db, entry)) return false; + } + return true; + } + + Object encodeValue(ModelDB db, Object value) { + if (value == null) return null; + List list = value; + if (list.length == 0) return null; + if (list.length == 1) return list[0]; + return list.map( + (value) => subProperty.encodeValue(db, value)).toList(); + } + + Object decodePrimitiveValue(ModelDB db, Object value) { + if (value == null) return []; + if (value is! List) return [value]; + return (value as List) + .map((entry) => subProperty.decodePrimitiveValue(db, entry)) + .toList(); + } +} - const ModelMetadata(this.description); +/// A convenience [Property] for list of strings. +class StringListProperty extends ListProperty { + const StringListProperty({String propertyName, bool indexed: true}) + : super(const StringProperty(), + propertyName: propertyName, indexed: indexed); } diff --git a/pkgs/gcloud/lib/src/db/model_db_impl.dart b/pkgs/gcloud/lib/src/db/model_db_impl.dart index 6b28aaf8..3b44cdec 100644 --- a/pkgs/gcloud/lib/src/db/model_db_impl.dart +++ b/pkgs/gcloud/lib/src/db/model_db_impl.dart @@ -4,88 +4,86 @@ part of gcloud.db; - +/// An implementation of [ModelDB] based on model class annotations. +/// +/// The two constructors will scan loaded dart libraries for classes with a +/// [Kind] annotation. +/// +/// An example on how to write a model class is: +/// @Kind +/// class Person extends db.Model { +/// @StringProperty +/// String name; +/// +/// @IntProperty +/// int age; +/// +/// @DateTimeProperty +/// DateTime dateOfBirth; +/// } +/// +/// These classes must either extend [Model] or [ExpandoModel]. Furthermore +/// they must have an empty default constructor which can be used to construct +/// model objects when doing lookups/queries from datastore. class ModelDBImpl implements ModelDB { - // Map of properties for a given [ModelDescription] - final Map> _modelProperties = {}; - - // Arbitrary state a model description might want to have - final Map _modelDescriptionStates = {}; - - // Needed when getting data from datastore to instantiate model objects. - final Map _modelDescriptionByKind = {}; - final Map _modelClasses = {}; - final Map _typeByModelDescription = {}; - - // Needed when application gives us model objects. - final Map _modelDescriptionByType = {}; - - - /** - * Initializes a new [ModelDB] from all libraries. - * - * This will scan all libraries for [Model] classes and their - * [ModelDescription] annotations. It will also scan all [Property] instances - * on all [ModelDescription] objects. - * - * Once all libraries have been scanned it will call each [ModelDescription]s - * 'initialize' method and stores the returned state object (this can be - * queried later with [modelDescriptionState]. - * - * Afterwards every [ModelDescription] will be asked whether it wants to - * register a kind name and if so, that kind name will be associated with it. - * - * In case an error is encountered (e.g. two [ModelDescription] classes with - * the same kind name) a [StateError] will be thrown. - */ + final Map<_ModelDescription, Map> _modelDesc2Properties = {}; + final Map _kind2ModelDesc = {}; + final Map<_ModelDescription, mirrors.ClassMirror> _modelDesc2ClassMirror = {}; + final Map<_ModelDescription, Type> _type2ModelDesc = {}; + final Map _modelDesc2Type = {}; + + /// Initializes a new [ModelDB] from all libraries. + /// + /// This will scan all libraries for classes with a [Kind] annotation. + /// + /// In case an error is encountered (e.g. two model classes with the same kind + /// name) a [StateError] will be thrown. ModelDBImpl() { // WARNING: This is O(n) of the source code, which is very bad! // Would be nice to have: `currentMirrorSystem().subclassesOf(Model)` _initialize(mirrors.currentMirrorSystem().libraries.values); } - /** - * Initializes a new [ModelDB] only using the library [librarySymbol]. - * - * See also the default [ModelDB] constructor. - */ + /// Initializes a new [ModelDB] from all libraries. + /// + /// This will scan the given [librarySymnbol] for classes with a [Kind] + /// annotation. + /// + /// In case an error is encountered (e.g. two model classes with the same kind + /// name) a [StateError] will be thrown. ModelDBImpl.fromLibrary(Symbol librarySymbol) { _initialize([mirrors.currentMirrorSystem().findLibrary(librarySymbol)]); } - - /** - * Converts a [datastore.Key] to a [Key]. - */ + /// Converts a [datastore.Key] to a [Key]. Key fromDatastoreKey(datastore.Key datastoreKey) { var namespace = new Partition(datastoreKey.partition.namespace); Key key = namespace.emptyKey; for (var element in datastoreKey.elements) { - var type = _typeByModelDescription[_modelDescriptionByKind[element.kind]]; + var type = _type2ModelDesc[_kind2ModelDesc[element.kind]]; assert (type != null); key = key.append(type, id: element.id); } return key; } - /** - * Converts a [Key] to a [datastore.Key]. - */ + /// Converts a [Key] to a [datastore.Key]. datastore.Key toDatastoreKey(Key dbKey) { List elements = []; var currentKey = dbKey; while (!currentKey.isEmpty) { var id = currentKey.id; - var modelDescription = modelDescriptionForType(currentKey.type); - var idProperty = _modelProperties[modelDescription]['id']; + var modelDescription = _modelDescriptionForType(currentKey.type); var kind = modelDescription.kindName(this); - if (idProperty is IntProperty && (id != null && id is! int)) { + bool useIntegerId = modelDescription.useIntegerId; + + if (useIntegerId && id != null && id is! int) { throw new ArgumentError('Expected an integer id property but ' 'id was of type ${id.runtimeType}'); } - if (idProperty is StringProperty && (id != null && id is! String)) { + if (!useIntegerId && (id != null && id is! String)) { throw new ArgumentError('Expected a string id property but ' 'id was of type ${id.runtimeType}'); } @@ -99,12 +97,10 @@ class ModelDBImpl implements ModelDB { partition: new datastore.Partition(partition.namespace)); } - /** - * Converts a [Model] instance to a [datastore.Entity]. - */ + /// Converts a [Model] instance to a [datastore.Entity]. datastore.Entity toDatastoreEntity(Model model) { try { - var modelDescription = modelDescriptionForType(model.runtimeType); + var modelDescription = _modelDescriptionForType(model.runtimeType); return modelDescription.encodeModel(this, model); } catch (error, stack) { throw @@ -112,15 +108,13 @@ class ModelDBImpl implements ModelDB { } } - /** - * Converts a [datastore.Entity] to a [Model] instance. - */ + /// Converts a [datastore.Entity] to a [Model] instance. Model fromDatastoreEntity(datastore.Entity entity) { if (entity == null) return null; Key key = fromDatastoreKey(entity.key); var kind = entity.key.elements.last.kind; - var modelDescription = _modelDescriptionByKind[kind]; + var modelDescription = _kind2ModelDesc[kind]; if (modelDescription == null) { throw new StateError('Trying to deserialize entity of kind ' '$kind, but no Model class available for it.'); @@ -133,33 +127,44 @@ class ModelDBImpl implements ModelDB { } } + /// Returns the string representation of the kind of model class [type]. + /// + /// If the model class `type` is not found it will throw an `ArgumentError`. String kindName(Type type) { - return _modelDescriptionByType[type]._kind; + var kind = _modelDesc2Type[type].kind; + if (kind == null) { + throw new ArgumentError( + 'The class $type was not associated with a kind.'); + } + return kind; } + /// Returns the name of the property corresponding to the kind [kind] and + /// [fieldName]. String fieldNameToPropertyName(String kind, String fieldName) { - return _modelDescriptionByKind[kind].fieldNameToPropertyName(fieldName); + var modelDescription = _kind2ModelDesc[kind]; + if (modelDescription == null) { + throw new ArgumentError('The kind $kind is unknown.'); + } + return modelDescription.fieldNameToPropertyName(fieldName); } - Iterable get modelDescriptions { - return _modelDescriptionByType.values; - } - Map propertiesForModel( - ModelDescriptionImpl modelDescription) { - return _modelProperties[modelDescription]; + Iterable<_ModelDescription> get _modelDescriptions { + return _modelDesc2Type.values; } - ModelDescriptionImpl modelDescriptionForType(Type type) { - return _modelDescriptionByType[type]; + Map _propertiesForModel( + _ModelDescription modelDescription) { + return _modelDesc2Properties[modelDescription]; } - mirrors.ClassMirror modelClass(ModelDescriptionImpl md) { - return _modelClasses[md]; + _ModelDescription _modelDescriptionForType(Type type) { + return _modelDesc2Type[type]; } - modelDescriptionState(ModelDescriptionImpl modelDescription) { - return _modelDescriptionStates[modelDescription]; + mirrors.ClassMirror _modelClass(_ModelDescription md) { + return _modelDesc2ClassMirror[md]; } @@ -168,69 +173,76 @@ class ModelDBImpl implements ModelDB { lm.declarations.values .where((d) => d is mirrors.ClassMirror && d.hasReflectedType) .forEach((mirrors.ClassMirror declaration) { - var modelDescription = _descriptionFromModelClass(declaration); - if (modelDescription != null) { - _newModelDescription(declaration, modelDescription); - } + _tryLoadNewModelClass(declaration); }); }); // Ask every [ModelDescription] to compute whatever global state it wants // to have. - for (var modelDescription in modelDescriptions) { - _modelDescriptionStates[modelDescription] = - modelDescription.initialize(this); + for (var modelDescription in _modelDescriptions) { + modelDescription.initialize(this); } - // Ask every [ModelDescription] whether we should register it with a given // kind name. - for (var modelDescription in modelDescriptions) { - if (modelDescription.registerKind(this)) { - var kindName = modelDescription.kindName(this); - if (_modelDescriptionByKind.containsKey(kindName)) { - throw new StateError( - 'Cannot have two ModelDescriptions ' - 'with the same kind ($kindName)'); - } - _modelDescriptionByKind[kindName] = modelDescription; + for (var modelDescription in _modelDescriptions) { + var kindName = modelDescription.kindName(this); + if (_kind2ModelDesc.containsKey(kindName)) { + throw new StateError( + 'Cannot have two ModelDescriptions ' + 'with the same kind ($kindName)'); } + _kind2ModelDesc[kindName] = modelDescription; } } - void _newModelDescription(mirrors.ClassMirror modelClass, - ModelDescription modelDesc) { - assert (!_modelDescriptionByType.containsKey(modelClass.reflectedType)); - - var modelDescImpl; - if (modelDesc is ExpandoModelDescription) { - modelDescImpl = new ExpandoModelDescriptionImpl(modelDesc.kind); - } else { - modelDescImpl = new ModelDescriptionImpl(modelDesc.kind); + void _tryLoadNewModelClass(mirrors.ClassMirror classMirror) { + Kind kindAnnotation; + for (mirrors.InstanceMirror instance in classMirror.metadata) { + if (instance.reflectee.runtimeType == Kind) { + if (kindAnnotation != null) { + throw new StateError( + 'Cannot have more than one ModelMetadata() annotation ' + 'on a Model class'); + } + kindAnnotation = instance.reflectee; + } } - // Map the [modelClass.runtimeType] to this [modelDesc] and vice versa. - _modelDescriptionByType[modelClass.reflectedType] = modelDescImpl; - _typeByModelDescription[modelDescImpl] = modelClass.reflectedType; - // Map this [modelDesc] to the [modelClass] mirror for easy instantiation. - _modelClasses[modelDescImpl] = modelClass; + if (kindAnnotation != null) { + var name = kindAnnotation.name; + var integerId = kindAnnotation.idType == IdType.Integer; + var stringId = kindAnnotation.idType == IdType.String; - // TODO: Move this out to the model description classes. + // Fall back to the class name. + if (name == null) { + name = mirrors.MirrorSystem.getName(classMirror.simpleName); + } - // Get all properties, validate that the 'id' property is valid. - var properties = _propertiesFromModelDescription(modelDesc); - var idProperty = properties[ModelDescriptionImpl.ID_FIELDNAME]; - if (idProperty == null || - (idProperty is! IntProperty && idProperty is! StringProperty)) { - throw new StateError( - 'You need to have an id property and it has to be either an ' - '[IntProperty] or a [StringProperty].'); + // This constraint should be guaranteed by the Kind() const constructor. + assert ((integerId && !stringId) || (!integerId && stringId)); + + _tryLoadNewModelClassFull(classMirror, name, integerId); } - if (idProperty.propertyName != null) { - throw new StateError( - 'You can not have a new name for the id property.'); + } + + void _tryLoadNewModelClassFull(mirrors.ClassMirror modelClass, + String name, + bool useIntegerId) { + assert (!_modelDesc2Type.containsKey(modelClass.reflectedType)); + + var modelDesc; + if (_isExpandoClass(modelClass)) { + modelDesc = new _ExpandoModelDescription(name, useIntegerId); + } else { + modelDesc = new _ModelDescription(name, useIntegerId); } - _modelProperties[modelDescImpl] = properties; + + _type2ModelDesc[modelDesc] = modelClass.reflectedType; + _modelDesc2Type[modelClass.reflectedType] = modelDesc; + _modelDesc2ClassMirror[modelDesc] = modelClass; + _modelDesc2Properties[modelDesc] = + _propertiesFromModelDescription(modelClass); // Ensure we have an empty constructor. bool defaultConstructorFound = false; @@ -252,10 +264,7 @@ class ModelDBImpl implements ModelDB { } Map _propertiesFromModelDescription( - ModelDescription modelDescription) { - var modelMirror = mirrors.reflect(modelDescription); - var modelClassMirror = mirrors.reflectClass(modelDescription.runtimeType); - + mirrors.ClassMirror modelClassMirror) { var properties = new Map(); var propertyNames = new Set(); @@ -263,33 +272,43 @@ class ModelDBImpl implements ModelDB { while (modelClassMirror.superclass != null) { var memberMap = modelClassMirror.instanceMembers; // Loop over all declarations (which includes fields) - modelClassMirror.declarations.forEach((Symbol s, _) { - // Look if we do have a method for [s] - if (memberMap.containsKey(s) && memberMap[s].isGetter) { - // Get a String representation of the field and the value. - var fieldName = mirrors.MirrorSystem.getName(s); - var fieldValue = modelMirror.getField(s).reflectee; - // If the field value is a Property instance we add it to the list - // of properties. - // Fields with '__' are reserved and will not be used. - if (!fieldName.startsWith('__') && - fieldValue != null && - fieldValue is Property) { - var propertyName = fieldValue.propertyName; + modelClassMirror.declarations.forEach((Symbol fieldSymbol, + mirrors.DeclarationMirror decl) { + // Look if the symbol is a getter and we have metadata attached to it. + if (memberMap.containsKey(fieldSymbol) && + memberMap[fieldSymbol].isGetter && + decl.metadata != null) { + var propertyAnnotations = decl.metadata + .map((mirrors.InstanceMirror mirror) => mirror.reflectee) + .where((Object property) => property is Property) + .toList(); + + if (propertyAnnotations.length > 1) { + throw new StateError( + 'Cannot have more than one Property annotation on a model ' + 'field.'); + } else if (propertyAnnotations.length == 1) { + var property = propertyAnnotations.first; + + // Get a String representation of the field and the value. + var fieldName = mirrors.MirrorSystem.getName(fieldSymbol); + + // Determine the name to use for the property in datastore. + var propertyName = (property as Property).propertyName; if (propertyName == null) propertyName = fieldName; if (properties.containsKey(fieldName)) { throw new StateError( - 'Cannot have two Property objects describing the same Model ' - 'property name in a ModelDescription class hierarchy.'); + 'Cannot have two Property objects describing the same field ' + 'in a model object class hierarchy.'); } if (propertyNames.contains(propertyName)) { throw new StateError( 'Cannot have two Property objects mapping to the same ' - 'datastore property name ($propertyName).'); + 'datastore property name "$propertyName".'); } - properties[fieldName] = fieldValue; + properties[fieldName] = property; propertyNames.add(propertyName); } } @@ -300,72 +319,58 @@ class ModelDBImpl implements ModelDB { return properties; } - ModelDescription _descriptionFromModelClass(mirrors.ClassMirror classMirror) { - var result; - for (mirrors.InstanceMirror instance in classMirror.metadata) { - if (instance.reflectee.runtimeType == ModelMetadata) { - if (result != null) { - throw new StateError( - 'Cannot have more than one ModelMetadata() annotation ' - 'on a Model class'); - } - result = instance.getField(#description).reflectee; + bool _isExpandoClass(mirrors.ClassMirror modelClass) { + while (modelClass.superclass != modelClass) { + if (modelClass.reflectedType == ExpandoModel) { + return true; + } else if (modelClass.reflectedType == Model) { + return false; } + modelClass = modelClass.superclass; } - return result; + throw new StateError('This should be unreachable.'); } } -class ModelDescriptionImpl { - static String ID_FIELDNAME = 'id'; - - HashMap property2FieldName; - HashMap field2PropertyName; - Set indexedProperties; - Set unIndexedProperties; +class _ModelDescription { + final HashMap _property2FieldName = + new HashMap(); + final HashMap _field2PropertyName = + new HashMap(); + final Set _indexedProperties = new Set(); + final Set _unIndexedProperties = new Set(); - final String _kind; + final String kind; + final bool useIntegerId; - ModelDescriptionImpl(this._kind); + _ModelDescription(this.kind, this.useIntegerId); - initialize(ModelDBImpl db) { + void initialize(ModelDBImpl db) { // Compute propertyName -> fieldName mapping. - property2FieldName = new HashMap(); - field2PropertyName = new HashMap(); - - db.propertiesForModel(this).forEach((String fieldName, Property prop) { + db._propertiesForModel(this).forEach((String fieldName, Property prop) { // The default of a datastore property name is the fieldName. // It can be overridden with [Property.propertyName]. String propertyName = prop.propertyName; if (propertyName == null) propertyName = fieldName; - if (fieldName != ID_FIELDNAME) { - property2FieldName[propertyName] = fieldName; - field2PropertyName[fieldName] = propertyName; - } + _property2FieldName[propertyName] = fieldName; + _field2PropertyName[fieldName] = propertyName; }); // Compute properties & unindexed properties - indexedProperties = new Set(); - unIndexedProperties = new Set(); - - db.propertiesForModel(this).forEach((String fieldName, Property prop) { - if (fieldName != ID_FIELDNAME) { - String propertyName = prop.propertyName; - if (propertyName == null) propertyName = fieldName; - - if (prop.indexed) { - indexedProperties.add(propertyName); - } else { - unIndexedProperties.add(propertyName); - } + db._propertiesForModel(this).forEach((String fieldName, Property prop) { + String propertyName = prop.propertyName; + if (propertyName == null) propertyName = fieldName; + + if (prop.indexed) { + _indexedProperties.add(propertyName); + } else { + _unIndexedProperties.add(propertyName); } }); } - bool registerKind(ModelDBImpl db) => true; - - String kindName(ModelDBImpl db) => _kind; + String kindName(ModelDBImpl db) => kind; datastore.Entity encodeModel(ModelDBImpl db, Model model) { var key = db.toDatastoreKey(model.key); @@ -373,12 +378,12 @@ class ModelDescriptionImpl { var properties = {}; var mirror = mirrors.reflect(model); - db.propertiesForModel(this).forEach((String fieldName, Property prop) { + db._propertiesForModel(this).forEach((String fieldName, Property prop) { _encodeProperty(db, model, mirror, properties, fieldName, prop); }); return new datastore.Entity( - key, properties, unIndexedProperties: unIndexedProperties); + key, properties, unIndexedProperties: _unIndexedProperties); } _encodeProperty(ModelDBImpl db, Model model, mirrors.InstanceMirror mirror, @@ -386,30 +391,28 @@ class ModelDescriptionImpl { String propertyName = prop.propertyName; if (propertyName == null) propertyName = fieldName; - if (fieldName != ID_FIELDNAME) { - var value = mirror.getField( - mirrors.MirrorSystem.getSymbol(fieldName)).reflectee; - if (!prop.validate(db, value)) { - throw new StateError('Property validation failed for ' - 'property $fieldName while trying to serialize entity of kind ' - '${model.runtimeType}. '); - } - properties[propertyName] = prop.encodeValue(db, value); + var value = mirror.getField( + mirrors.MirrorSystem.getSymbol(fieldName)).reflectee; + if (!prop.validate(db, value)) { + throw new StateError('Property validation failed for ' + 'property $fieldName while trying to serialize entity of kind ' + '${model.runtimeType}. '); } + properties[propertyName] = prop.encodeValue(db, value); } Model decodeEntity(ModelDBImpl db, Key key, datastore.Entity entity) { if (entity == null) return null; // NOTE: this assumes a default constructor for the model classes! - var classMirror = db.modelClass(this); + var classMirror = db._modelClass(this); var mirror = classMirror.newInstance(const Symbol(''), []); // Set the id and the parent key mirror.reflectee.id = key.id; mirror.reflectee.parentKey = key.parent; - db.propertiesForModel(this).forEach((String fieldName, Property prop) { + db._propertiesForModel(this).forEach((String fieldName, Property prop) { _decodeProperty(db, entity, mirror, fieldName, prop); }); return mirror.reflectee; @@ -420,30 +423,28 @@ class ModelDescriptionImpl { Property prop) { String propertyName = fieldNameToPropertyName(fieldName); - if (fieldName != ID_FIELDNAME) { - var rawValue = entity.properties[propertyName]; - var value = prop.decodePrimitiveValue(db, rawValue); + var rawValue = entity.properties[propertyName]; + var value = prop.decodePrimitiveValue(db, rawValue); - if (!prop.validate(db, value)) { - throw new StateError('Property validation failed while ' - 'trying to deserialize entity of kind ' - '${entity.key.elements.last.kind} (property name: $prop)'); - } - - mirror.setField(mirrors.MirrorSystem.getSymbol(fieldName), value); + if (!prop.validate(db, value)) { + throw new StateError('Property validation failed while ' + 'trying to deserialize entity of kind ' + '${entity.key.elements.last.kind} (property name: $prop)'); } + + mirror.setField(mirrors.MirrorSystem.getSymbol(fieldName), value); } String fieldNameToPropertyName(String fieldName) { - return field2PropertyName[fieldName]; + return _field2PropertyName[fieldName]; } String propertyNameToFieldName(ModelDBImpl db, String propertySearchName) { - return property2FieldName[propertySearchName]; + return _property2FieldName[propertySearchName]; } Object encodeField(ModelDBImpl db, String fieldName, Object value) { - Property property = db.propertiesForModel(this)[fieldName]; + Property property = db._propertiesForModel(this)[fieldName]; if (property != null) return property.encodeValue(db, value); return null; } @@ -457,18 +458,19 @@ class ModelDescriptionImpl { // - we may end up removing properties after a read-write cycle // - we may end up dropping added properties in a write // ([usedNames] := [realFieldNames] + [realPropertyNames]) -class ExpandoModelDescriptionImpl extends ModelDescriptionImpl { +class _ExpandoModelDescription extends _ModelDescription { Set realFieldNames; Set realPropertyNames; Set usedNames; - ExpandoModelDescriptionImpl(String kind) : super(kind); + _ExpandoModelDescription(String kind, bool useIntegerId) + : super(kind, useIntegerId); - initialize(ModelDBImpl db) { + void initialize(ModelDBImpl db) { super.initialize(db); - realFieldNames = new Set.from(field2PropertyName.keys); - realPropertyNames = new Set.from(property2FieldName.keys); + realFieldNames = new Set.from(_field2PropertyName.keys); + realPropertyNames = new Set.from(_property2FieldName.keys); usedNames = new Set()..addAll(realFieldNames)..addAll(realPropertyNames); } diff --git a/pkgs/gcloud/lib/src/db/model_description.dart b/pkgs/gcloud/lib/src/db/model_description.dart deleted file mode 100644 index 1cd03b4a..00000000 --- a/pkgs/gcloud/lib/src/db/model_description.dart +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file -// for details. All rights reserved. Use of this source code is governed by a -// BSD-style license that can be found in the LICENSE file. - -part of gcloud.db; - -/// Subclasses of [ModelDescription] describe how to map a dart model object -/// to a Datastore Entity. -/// -/// Please see [ModelMetadata] for an example on how to use them. -class ModelDescription { - final String kind; - - const ModelDescription(this.kind); -} - -/// Subclasses of [ExpandoModelDescription] describe how to map a dart expando -/// model object to a Datastore Entity. -/// -/// Please see [ModelMetadata] for an example on how to use them. -class ExpandoModelDescription extends ModelDescription { - const ExpandoModelDescription(String kind) : super(kind); -} diff --git a/pkgs/gcloud/lib/src/db/models.dart b/pkgs/gcloud/lib/src/db/models.dart index eaa3d242..d27938c3 100644 --- a/pkgs/gcloud/lib/src/db/models.dart +++ b/pkgs/gcloud/lib/src/db/models.dart @@ -104,9 +104,6 @@ class Partition { * * Every model class has a [id] -- which must be an integer or a string, and * a [parentKey]. The [key] getter is returning the key for the model object. - * - * Every concrete model class inheriting (directly or indirectly) from [Model] - * needs a [ModelMetadata] annotation containing it's [ModelDescription]. */ abstract class Model { Object id; @@ -118,9 +115,6 @@ abstract class Model { /** * Superclass for all expanded model classes. * - * The subclasses of this model must have a [ModelMetadata] annotation - * containing a [ExpandoModelDescription]. - * * The [ExpandoModel] class adds support for having dynamic properties. You can * set arbitrary fields on these models. The expanded values must be values * accepted by the [RawDatastore] implementation. diff --git a/pkgs/gcloud/lib/src/db/properties.dart b/pkgs/gcloud/lib/src/db/properties.dart deleted file mode 100644 index 5bf5e669..00000000 --- a/pkgs/gcloud/lib/src/db/properties.dart +++ /dev/null @@ -1,225 +0,0 @@ -// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file -// for details. All rights reserved. Use of this source code is governed by a -// BSD-style license that can be found in the LICENSE file. - -part of gcloud.db; - -/// Describes a property of an Entity. -/// -/// Please see [ModelMetadata] for an example on how to use them. -abstract class Property { - /// The name of the property. - /// - /// If it is `null`, the name will be the same as used in the - /// [ModelDescription]. - final String propertyName; - - /// Specifies whether this property is required or not. - /// - /// If required is `true`, it will be enforced when saving model objects to - /// the datastore and when retrieving them. - final bool required; - - /// Specifies whether this property should be indexed or not. - /// - /// When running queries no this property, it is necessary to set [indexed] to - /// `true`. - final bool indexed; - - const Property({this.propertyName, this.required: false, this.indexed: true}); - - bool validate(ModelDB db, Object value) { - if (required && value == null) return false; - return true; - } - - Object encodeValue(ModelDB db, Object value); - - Object decodePrimitiveValue(ModelDB db, Object value); -} - -/// An abstract base class for primitive properties which can e.g. be used -/// within a composed `ListProperty`. -abstract class PrimitiveProperty extends Property { - const PrimitiveProperty( - {String propertyName, bool required: false, bool indexed: true}) - : super(propertyName: propertyName, required: required, indexed: indexed); - - Object encodeValue(ModelDB db, Object value) => value; - - Object decodePrimitiveValue(ModelDB db, Object value) => value; -} - -/// A boolean [Property]. -/// -/// It will validate that values are booleans before writing them to the -/// datastore and when reading them back. -class BoolProperty extends PrimitiveProperty { - const BoolProperty( - {String propertyName, bool required: false, bool indexed: true}) - : super(propertyName: propertyName, required: required, indexed: indexed); - - bool validate(ModelDB db, Object value) - => super.validate(db, value) && (value == null || value is bool); -} - -/// A integer [Property]. -/// -/// It will validate that values are integers before writing them to the -/// datastore and when reading them back. -class IntProperty extends PrimitiveProperty { - const IntProperty( - {String propertyName, bool required: false, bool indexed: true}) - : super(propertyName: propertyName, required: required, indexed: indexed); - - bool validate(ModelDB db, Object value) - => super.validate(db, value) && (value == null || value is int); -} - -/// A double [Property]. -/// -/// It will validate that values are doubles before writing them to the -/// datastore and when reading them back. -class DoubleProperty extends PrimitiveProperty { - const DoubleProperty( - {String propertyName, bool required: false, bool indexed: true}) - : super(propertyName: propertyName, required: required, indexed: indexed); - - bool validate(ModelDB db, Object value) - => super.validate(db, value) && (value == null || value is double); -} - -/// A string [Property]. -/// -/// It will validate that values are strings before writing them to the -/// datastore and when reading them back. -class StringProperty extends PrimitiveProperty { - const StringProperty( - {String propertyName, bool required: false, bool indexed: true}) - : super(propertyName: propertyName, required: required, indexed: indexed); - - bool validate(ModelDB db, Object value) - => super.validate(db, value) && (value == null || value is String); -} - -/// A key [Property]. -/// -/// It will validate that values are keys before writing them to the -/// datastore and when reading them back. -class ModelKeyProperty extends PrimitiveProperty { - const ModelKeyProperty( - {String propertyName, bool required: false, bool indexed: true}) - : super(propertyName: propertyName, required: required, indexed: indexed); - - bool validate(ModelDB db, Object value) - => super.validate(db, value) && (value == null || value is Key); - - Object encodeValue(ModelDB db, Object value) { - if (value == null) return null; - return db.toDatastoreKey(value); - } - - Object decodePrimitiveValue(ModelDB db, Object value) { - if (value == null) return null; - return db.fromDatastoreKey(value as datastore.Key); - } -} - -/// A binary blob [Property]. -/// -/// It will validate that values are blobs before writing them to the -/// datastore and when reading them back. Blob values will be represented by -/// List. -class BlobProperty extends PrimitiveProperty { - const BlobProperty({String propertyName, bool required: false}) - : super(propertyName: propertyName, required: required, indexed: false); - - // NOTE: We don't validate that the entries of the list are really integers - // of the range 0..255! - // If an untyped list was created the type check will always succeed. i.e. - // "[1, true, 'bar'] is List" evaluates to `true` - bool validate(ModelDB db, Object value) - => super.validate(db, value) && (value == null || value is List); - - Object encodeValue(ModelDB db, Object value) { - if (value == null) return null; - return new datastore.BlobValue(value); - } - - Object decodePrimitiveValue(ModelDB db, Object value) { - if (value == null) return null; - - datastore.BlobValue blobValue = value; - return blobValue.bytes; - } -} - -/// A datetime [Property]. -/// -/// It will validate that values are DateTime objects before writing them to the -/// datastore and when reading them back. -class DateTimeProperty extends PrimitiveProperty { - const DateTimeProperty( - {String propertyName, bool required: false, bool indexed: true}) - : super(propertyName: propertyName, required: required, indexed: indexed); - - bool validate(ModelDB db, Object value) - => super.validate(db, value) && (value == null || value is DateTime); - - Object decodePrimitiveValue(ModelDB db, Object value) { - if (value is int) { - return - new DateTime.fromMillisecondsSinceEpoch(value ~/ 1000, isUtc: true); - } - return value; - } -} - - -/// A composed list [Property], with a `subProperty` for the list elements. -/// -/// It will validate that values are List objects before writing them to the -/// datastore and when reading them back. It will also validate the elements -/// of the list itself. -class ListProperty extends Property { - final PrimitiveProperty subProperty; - - // TODO: We want to support optional list properties as well. - // Get rid of "required: true" here. - const ListProperty(this.subProperty, - {String propertyName, bool indexed: true}) - : super(propertyName: propertyName, required: true, indexed: indexed); - - bool validate(ModelDB db, Object value) { - if (!super.validate(db, value) || value is! List) return false; - - for (var entry in value) { - if (!subProperty.validate(db, entry)) return false; - } - return true; - } - - Object encodeValue(ModelDB db, Object value) { - if (value == null) return null; - List list = value; - if (list.length == 0) return null; - if (list.length == 1) return list[0]; - return list.map( - (value) => subProperty.encodeValue(db, value)).toList(); - } - - Object decodePrimitiveValue(ModelDB db, Object value) { - if (value == null) return []; - if (value is! List) return [value]; - return (value as List) - .map((entry) => subProperty.decodePrimitiveValue(db, entry)) - .toList(); - } -} - -/// A convenience [Property] for list of strings. -class StringListProperty extends ListProperty { - const StringListProperty({String propertyName, bool indexed: true}) - : super(const StringProperty(), - propertyName: propertyName, indexed: indexed); -} diff --git a/pkgs/gcloud/test/db/e2e/db_test.dart b/pkgs/gcloud/test/db/e2e/db_test.dart index ceee4067..490027bd 100644 --- a/pkgs/gcloud/test/db/e2e/db_test.dart +++ b/pkgs/gcloud/test/db/e2e/db_test.dart @@ -50,10 +50,15 @@ import 'package:unittest/unittest.dart'; import 'package:gcloud/db.dart' as db; -@db.ModelMetadata(const PersonDesc()) +@db.Kind() class Person extends db.Model { + @db.StringProperty() String name; + + @db.IntProperty() int age; + + @db.ModelKeyProperty() db.Key wife; operator==(Object other) => sameAs(other); @@ -70,9 +75,13 @@ class Person extends db.Model { String toString() => 'Person(id: $id, name: $name, age: $age)'; } -@db.ModelMetadata(const UserDesc()) + +@db.Kind() class User extends Person { + @db.StringProperty() String nickname; + + @db.StringListProperty(propertyName: 'language') List languages = const []; sameAs(Object other) { @@ -100,26 +109,13 @@ class User extends Person { 'User(${super.toString()}, nickname: $nickname, languages: $languages'; } -class PersonDesc extends db.ModelDescription { - final id = const db.IntProperty(); - final name = const db.StringProperty(); - final age = const db.IntProperty(); - final wife = const db.ModelKeyProperty(); - const PersonDesc({String kind: 'Person'}) : super(kind); -} - -class UserDesc extends PersonDesc { - final nickname = const db.StringProperty(); - final languages = - const db.StringListProperty(propertyName: 'language'); - const UserDesc({String kind: 'User'}) : super(kind: kind); -} - - -@db.ModelMetadata(const ExpandoPersonDesc()) +@db.Kind() class ExpandoPerson extends db.ExpandoModel { + @db.StringProperty() String name; + + @db.StringProperty(propertyName: 'NN') String nickname; operator==(Object other) { @@ -138,14 +134,6 @@ class ExpandoPerson extends db.ExpandoModel { } } -class ExpandoPersonDesc extends db.ExpandoModelDescription { - final id = const db.IntProperty(); - final name = const db.StringProperty(); - final nickname = const db.StringProperty(propertyName: 'NN'); - - const ExpandoPersonDesc() : super('ExpandoPerson'); -} - Future sleep(Duration duration) { var completer = new Completer(); diff --git a/pkgs/gcloud/test/db/model_db_test.dart b/pkgs/gcloud/test/db/model_db_test.dart index 8a0560fc..bf8be819 100644 --- a/pkgs/gcloud/test/db/model_db_test.dart +++ b/pkgs/gcloud/test/db/model_db_test.dart @@ -15,9 +15,8 @@ import 'package:unittest/unittest.dart'; import 'model_dbs/duplicate_kind.dart' as test1; import 'model_dbs/duplicate_property.dart' as test2; import 'model_dbs/multiple_annotations.dart' as test3; -import 'model_dbs/invalid_id_property.dart' as test4; -import 'model_dbs/duplicate_fieldname.dart' as test5; -import 'model_dbs/no_default_constructor.dart' as test6; +import 'model_dbs/duplicate_fieldname.dart' as test4; +import 'model_dbs/no_default_constructor.dart' as test5; main() { newModelDB(Symbol symbol)=> new ModelDBImpl.fromLibrary(symbol); @@ -39,11 +38,6 @@ main() { newModelDB(#gcloud.db.model_test.multiple_annotations); }), throwsA(isStateError)); }); - test('invalid_id', () { - expect(new Future.sync(() { - newModelDB(#gcloud.db.model_test.invalid_id); - }), throwsA(isStateError)); - }); test('duplicate_fieldname', () { expect(new Future.sync(() { newModelDB(#gcloud.db.model_test.duplicate_fieldname); diff --git a/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart b/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart index 7f376285..33b6bce0 100644 --- a/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart +++ b/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart @@ -6,21 +6,14 @@ library gcloud.db.model_test.duplicate_fieldname; import 'package:gcloud/db.dart' as db; -@db.ModelMetadata(const ADesc()) -class A extends db.Model {} - -@db.ModelMetadata(const BDesc()) -class B extends A {} - - -class ADesc extends db.ModelDescription { - final id = const db.IntProperty(); - - final foo = const db.IntProperty(propertyName: 'foo'); - const ADesc({String kind: 'A'}) : super(kind); +@db.Kind() +class A extends db.Model { + @db.IntProperty() + int foo; } -class BDesc extends ADesc { - final foo = const db.IntProperty(propertyName: 'bar'); - const BDesc() : super(kind: 'B'); +@db.Kind() +class B extends A { + @db.IntProperty(propertyName: 'bar') + int foo; } diff --git a/pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart b/pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart index 46a90cd6..89ae7eb0 100644 --- a/pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart +++ b/pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart @@ -6,18 +6,8 @@ library gcloud.db.model_test.duplicate_kind; import 'package:gcloud/db.dart' as db; -@db.ModelMetadata(const ADesc()) +@db.Kind() class A extends db.Model { } -class ADesc extends db.ModelDescription { - final id = const db.IntProperty(); - const ADesc() : super('A'); -} - -@db.ModelMetadata(const BDesc()) +@db.Kind(name: 'A') class B extends db.Model { } - -class BDesc extends db.ModelDescription { - final id = const db.IntProperty(); - const BDesc() : super('A'); -} diff --git a/pkgs/gcloud/test/db/model_dbs/duplicate_property.dart b/pkgs/gcloud/test/db/model_dbs/duplicate_property.dart index 5b5e07b6..6e770798 100644 --- a/pkgs/gcloud/test/db/model_dbs/duplicate_property.dart +++ b/pkgs/gcloud/test/db/model_dbs/duplicate_property.dart @@ -6,12 +6,11 @@ library gcloud.db.model_test.duplicate_property; import 'package:gcloud/db.dart' as db; -@db.ModelMetadata(const ADesc()) -class A extends db.Model { } +@db.Kind() +class A extends db.Model { + @db.IntProperty() + int foo; -class ADesc extends db.ModelDescription { - final id = const db.IntProperty(); - final foo = const db.IntProperty(propertyName: 'foo'); - final bar = const db.IntProperty(propertyName: 'foo'); - const ADesc() : super('A'); + @db.IntProperty(propertyName: 'foo') + int bar; } diff --git a/pkgs/gcloud/test/db/model_dbs/invalid_id_property.dart b/pkgs/gcloud/test/db/model_dbs/invalid_id_property.dart deleted file mode 100644 index d46332c3..00000000 --- a/pkgs/gcloud/test/db/model_dbs/invalid_id_property.dart +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file -// for details. All rights reserved. Use of this source code is governed by a -// BSD-style license that can be found in the LICENSE file. - -library gcloud.db.model_test.invalid_id; - -import 'package:gcloud/db.dart' as db; - -@db.ModelMetadata(const ADesc()) -class A extends db.Model { } - -class ADesc extends db.ModelDescription { - final id = const db.DateTimeProperty(); - const ADesc() : super('A'); -} diff --git a/pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart b/pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart index 07fad6cd..30e11a91 100644 --- a/pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart +++ b/pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart @@ -6,11 +6,6 @@ library gcloud.db.model_test.multiple_annotations; import 'package:gcloud/db.dart' as db; -@db.ModelMetadata(const ADesc()) -@db.ModelMetadata(const ADesc()) +@db.Kind() +@db.Kind() class A extends db.Model { } - -class ADesc extends db.ModelDescription { - final id = const db.IntProperty(); - const ADesc() : super('A'); -} diff --git a/pkgs/gcloud/test/db/model_dbs/no_default_constructor.dart b/pkgs/gcloud/test/db/model_dbs/no_default_constructor.dart index 60bc9b67..1c3b3d5e 100644 --- a/pkgs/gcloud/test/db/model_dbs/no_default_constructor.dart +++ b/pkgs/gcloud/test/db/model_dbs/no_default_constructor.dart @@ -6,12 +6,7 @@ library gcloud.db.model_test.no_default_constructor; import 'package:gcloud/db.dart' as db; -@db.ModelMetadata(const ADesc()) +@db.Kind() class A extends db.Model { A(int i); } - -class ADesc extends db.ModelDescription { - final id = const db.IntProperty(); - const ADesc() : super('A'); -} From afe5b5e408470b3fb1b160b4336500005599fd7d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Fri, 31 Oct 2014 12:53:35 +0100 Subject: [PATCH 026/239] Add comparison to Acl objects This also adds toString on Acl instances, and uses const where possible. R=lrn@google.com BUG= Review URL: https://codereview.chromium.org//687973003 --- pkgs/gcloud/lib/storage.dart | 97 ++++++++++++++++++---- pkgs/gcloud/test/storage/storage_test.dart | 45 ++++++++++ 2 files changed, 125 insertions(+), 17 deletions(-) diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index 68368e7d..53c1a00f 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -62,6 +62,20 @@ export 'common.dart'; part 'src/storage_impl.dart'; +int _jenkinsHash(List e) { + const _HASH_MASK = 0x3fffffff; + int hash = 0; + for (int i = 0; i < e.length; i++) { + int c = e[i].hashCode; + hash = (hash + c) & _HASH_MASK; + hash = (hash + (hash << 10)) & _HASH_MASK; + hash ^= (hash >> 6); + } + hash = (hash + (hash << 3)) & _HASH_MASK; + hash ^= (hash >> 11); + hash = (hash + (hash << 15)) & _HASH_MASK; + return hash; +} /// An ACL (Access Control List) describes access rights to buckets and /// objects. @@ -70,7 +84,7 @@ part 'src/storage_impl.dart'; /// which individually prevent or grant access. /// The access controls are described by [AclEntry] objects. class Acl { - final _entries; + final List _entries; /// The entries in the ACL. List get entries => new UnmodifiableListView(_entries); @@ -85,6 +99,24 @@ class Acl { List _toObjectAccessControlList() { return _entries.map((entry) => entry._toObjectAccessControl()).toList(); } + + int get hashCode => _jenkinsHash(_entries); + + bool operator==(Object other) { + if (other is Acl) { + List entries = _entries; + List otherEntries = other._entries; + if (entries.length != otherEntries.length) return false; + for (int i = 0; i < entries.length; i++) { + if (entries[i] != otherEntries[i]) return false; + } + return true; + } else { + return false; + } + } + + String toString() => 'Acl($_entries)'; } /// An ACL entry specifies that an entity has a specific access permission. @@ -94,7 +126,7 @@ class AclEntry { final AclScope scope; final AclPermission permission; - AclEntry(this.scope, this.permission); + const AclEntry(this.scope, this.permission); storage.BucketAccessControl _toBucketAccessControl() { var acl = new storage.BucketAccessControl(); @@ -109,6 +141,16 @@ class AclEntry { acl.role = permission._storageObjectRole; return acl; } + + int get hashCode => _jenkinsHash([scope, permission]); + + bool operator==(Object other) { + return other is AclEntry && + scope == other.scope && + permission == other.permission; + } + + String toString() => 'AclEntry($scope, $permission)'; } /// An ACL scope specifies an entity for which a permission applies. @@ -175,6 +217,14 @@ abstract class AclScope { throw new UnsupportedError('Unexpected ACL scope'); } } + + int get hashCode => _jenkinsHash([_type, _id]); + + bool operator==(Object other) { + return other is AclScope && _type == other._type && _id == other._id; + } + + String toString() => 'AclScope($_storageEntity)'; } /// An ACL scope for an entity identified by a 'Google Storage ID'. @@ -182,7 +232,7 @@ abstract class AclScope { /// The [storageId] is a string of 64 hexadecimal digits that identifies a /// specific Google account holder or a specific Google group. class StorageIdScope extends AclScope { - StorageIdScope(String storageId) + const StorageIdScope(String storageId) : super._(AclScope._TYPE_STORAGE_ID, storageId); /// Google Storage ID. @@ -191,7 +241,7 @@ class StorageIdScope extends AclScope { /// An ACL scope for an entity identified by an individual email address. class AccountScope extends AclScope { - AccountScope(String email): super._(AclScope._TYPE_ACCOUNT, email); + const AccountScope(String email): super._(AclScope._TYPE_ACCOUNT, email); /// Email address. String get email => _id; @@ -199,7 +249,7 @@ class AccountScope extends AclScope { /// An ACL scope for an entity identified by an Google Groups email. class GroupScope extends AclScope { - GroupScope(String group): super._(AclScope._TYPE_GROUP, group); + const GroupScope(String group): super._(AclScope._TYPE_GROUP, group); /// Group name. String get group => _id; @@ -207,7 +257,7 @@ class GroupScope extends AclScope { /// An ACL scope for an entity identified by a domain name. class DomainScope extends AclScope { - DomainScope(String domain): super._(AclScope._TYPE_DOMAIN, domain); + const DomainScope(String domain): super._(AclScope._TYPE_DOMAIN, domain); /// Domain name. String get domain => _id; @@ -246,6 +296,14 @@ class AclPermission { String get _storageBucketRole => _id; String get _storageObjectRole => this == WRITE ? FULL_CONTROL._id : _id; + + int get hashCode => _id.hashCode; + + bool operator==(Object other) { + return other is AclPermission && _id == other._id; + } + + String toString() => 'AclPermission($_id)'; } /// Definition of predefined ACLs. @@ -258,37 +316,42 @@ class AclPermission { /// be present. For a description of these predefined ACLs see: /// https://cloud.google.com/storage/docs/accesscontrol#extension. class PredefinedAcl { - String _name; - PredefinedAcl._(this._name); + final String _name; + const PredefinedAcl._(this._name); /// Predefined ACL for the 'authenticated-read' ACL. Applies to both buckets /// and objects. - static PredefinedAcl authenticatedRead = - new PredefinedAcl._('authenticatedRead'); + static const PredefinedAcl authenticatedRead = + const PredefinedAcl._('authenticatedRead'); /// Predefined ACL for the 'private' ACL. Applies to both buckets /// and objects. - static PredefinedAcl private = new PredefinedAcl._('private'); + static const PredefinedAcl private = const PredefinedAcl._('private'); /// Predefined ACL for the 'project-private' ACL. Applies to both buckets /// and objects. - static PredefinedAcl projectPrivate = new PredefinedAcl._('projectPrivate'); + static const PredefinedAcl projectPrivate = + const PredefinedAcl._('projectPrivate'); /// Predefined ACL for the 'public-read' ACL. Applies to both buckets /// and objects. - static PredefinedAcl publicRead = new PredefinedAcl._('publicRead'); + static const PredefinedAcl publicRead = const PredefinedAcl._('publicRead'); /// Predefined ACL for the 'public-read-write' ACL. Applies only to buckets. - static PredefinedAcl publicReadWrite = new PredefinedAcl._('publicReadWrite'); + static const PredefinedAcl publicReadWrite = + const PredefinedAcl._('publicReadWrite'); /// Predefined ACL for the 'bucket-owner-full-control' ACL. Applies only to /// objects. - static PredefinedAcl bucketOwnerFullControl = - new PredefinedAcl._('bucketOwnerFullControl'); + static const PredefinedAcl bucketOwnerFullControl = + const PredefinedAcl._('bucketOwnerFullControl'); /// Predefined ACL for the 'bucket-owner-read' ACL. Applies only to /// objects. - static PredefinedAcl bucketOwnerRead = new PredefinedAcl._('bucketOwnerRead'); + static const PredefinedAcl bucketOwnerRead = + const PredefinedAcl._('bucketOwnerRead'); + + String toString() => 'PredefinedAcl($_name)'; } /// Information on a bucket. diff --git a/pkgs/gcloud/test/storage/storage_test.dart b/pkgs/gcloud/test/storage/storage_test.dart index 4afa185c..323b7a2f 100644 --- a/pkgs/gcloud/test/storage/storage_test.dart +++ b/pkgs/gcloud/test/storage/storage_test.dart @@ -843,4 +843,49 @@ main() { }); }); }); + + group('acl', () { + const id = const StorageIdScope('1234567890'); + const user = const AccountScope('sgjesse@google.com'); + const group = const GroupScope('dart'); + const domain = const DomainScope('dartlang.org'); + + const userRead = const AclEntry(user, AclPermission.READ); + const groupWrite = const AclEntry(group, AclPermission.WRITE); + const domainFullControl = + const AclEntry(domain, AclPermission.FULL_CONTROL); + + test('compare-scope', () { + expect(id, new StorageIdScope('1234567890')); + expect(user, new AccountScope('sgjesse@google.com')); + expect(group, new GroupScope('dart')); + expect(domain, new DomainScope('dartlang.org')); + expect(AclScope.allAuthenticated, new AllAuthenticatedScope()); + expect(AclScope.allUsers, new AllUsersScope()); + }); + + test('compare-entries', () { + expect(userRead, new AclEntry(user, AclPermission.READ)); + expect(groupWrite, new AclEntry(group, AclPermission.WRITE)); + expect(domainFullControl, + new AclEntry(domain, AclPermission.FULL_CONTROL)); + }); + + test('compare-acls', () { + var acl = new Acl([userRead, groupWrite, domainFullControl]); + expect(acl, new Acl([new AclEntry(user, AclPermission.READ), + new AclEntry(group, AclPermission.WRITE), + new AclEntry(domain, AclPermission.FULL_CONTROL)])); + expect(acl, + isNot(equals(new Acl([new AclEntry(group, AclPermission.WRITE), + new AclEntry(user, AclPermission.READ), + new AclEntry(domain, AclPermission.FULL_CONTROL)])))); + }); + + + test('compare-predefined-acls', () { + expect(PredefinedAcl.private, PredefinedAcl.private); + expect(PredefinedAcl.private, isNot(equals(PredefinedAcl.publicRead))); + }); + }); } From 86c5413c2e783d3e132c6740b7250389745df400 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Fri, 31 Oct 2014 13:02:38 +0100 Subject: [PATCH 027/239] Number of updates to the storage API * Handling of more bucket info * Handling of object metadata including ACLs * Made the ObjectMetadata imutable * Added more tests * Added tracing HTTP client to help testing R=kustermann@google.com BUG= Review URL: https://codereview.chromium.org//693573002 --- pkgs/gcloud/lib/src/storage_impl.dart | 107 +++++---- pkgs/gcloud/lib/storage.dart | 185 +++++++++++---- pkgs/gcloud/test/common.dart | 61 ++++- pkgs/gcloud/test/storage/e2e_test.dart | 247 ++++++++++++++++++++- pkgs/gcloud/test/storage/storage_test.dart | 150 +++++++++++++ 5 files changed, 650 insertions(+), 100 deletions(-) diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index 552342b3..a37e4ed6 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -73,8 +73,8 @@ class _StorageImpl implements Storage { } Future bucketInfo(String bucketName) { - return _api.buckets.get(bucketName) - .then((bucket) => new _BucketInformationImpl(bucket)); + return _api.buckets.get(bucketName, projection: 'full') + .then((bucket) => new _BucketInfoImpl(bucket)); } Stream listBucketNames() { @@ -108,14 +108,20 @@ class _StorageImpl implements Storage { } } -class _BucketInformationImpl implements BucketInfo { +class _BucketInfoImpl implements BucketInfo { storage.Bucket _bucket; - _BucketInformationImpl(this._bucket); + _BucketInfoImpl(this._bucket); String get bucketName => _bucket.name; + String get etag => _bucket.etag; + DateTime get created => _bucket.timeCreated; + + String get id => _bucket.id; + + Acl get acl => new Acl._fromBucketAcl(_bucket); } /// Bucket API implementation providing access to objects. @@ -196,8 +202,12 @@ class _BucketImpl implements Bucket { } Future info(String objectName) { - return _api.objects.get(bucketName, objectName) - .then((object) => new _ObjectStatImpl(object)); + return _api.objects.get(bucketName, objectName, projection: 'full') + .then((object) => new _ObjectInfoImpl(object)); + } + + Future delete(String objectName) { + return _api.objects.delete(bucketName, objectName); } Stream list({String prefix}) { @@ -221,21 +231,16 @@ class _BucketImpl implements Bucket { // TODO: support other ObjectMetadata implementations? _ObjectMetadata md = metadata; var object = md._object; - if (md._acl == null && _defaultObjectAcl == null) { + if (md._object.acl == null && _defaultObjectAcl == null) { throw new ArgumentError('ACL is required for update'); } if (md.contentType == null) { throw new ArgumentError('Content-Type is required for update'); } - var acl = md._acl != null ? md._acl : _defaultObjectAcl; - - var predefinedAcl; - if (acl != null) { - object.acl = acl._toObjectAccessControlList(); + if (md._object.acl == null) { + md._object.acl = _defaultObjectAcl._toObjectAccessControlList(); } - - return _api.objects.update( - object, bucketName, objectName, predefinedAcl: predefinedAcl); + return _api.objects.update(object, bucketName, objectName); } Future _listObjects( @@ -329,25 +334,30 @@ class _ObjectGenerationImpl implements ObjectGeneration { _ObjectGenerationImpl(this.objectGeneration, this.metaGeneration); } -class _ObjectStatImpl implements ObjectInfo { - storage.Object _object; +class _ObjectInfoImpl implements ObjectInfo { + final storage.Object _object; + final ObjectMetadata _metadata; Uri _downloadLink; ObjectGeneration _generation; - ObjectMetadata _metadata; - _ObjectStatImpl(object) : + _ObjectInfoImpl(storage.Object object) : _object = object, _metadata = new _ObjectMetadata._(object); String get name => _object.name; - int get size => int.parse(_object.size); + int get length => int.parse(_object.size); DateTime get updated => _object.updated; + String get etag => _object.etag; + List get md5Hash => crypto.CryptoUtils.base64StringToBytes(_object.md5Hash); - int get crc32CChecksum => int.parse(_object.crc32c); + int get crc32CChecksum { + var list = crypto.CryptoUtils.base64StringToBytes(_object.crc32c); + return (list[3] << 24) | (list[2] << 16) | (list[1] << 8) | list[0]; + } Uri get downloadLink { if (_downloadLink == null) { @@ -369,7 +379,10 @@ class _ObjectStatImpl implements ObjectInfo { } class _ObjectMetadata implements ObjectMetadata { - storage.Object _object; + final storage.Object _object; + Acl _cachedAcl; + ObjectGeneration _cachedGeneration; + Map _cachedCustom; _ObjectMetadata({Acl acl, String contentType, @@ -377,8 +390,8 @@ class _ObjectMetadata implements ObjectMetadata { String cacheControl, String contentDisposition, String contentLanguage, - Map custom}) { - _object = new storage.Object(); + Map custom}) + : _object = new storage.Object() { _object.acl = acl != null ? acl._toObjectAccessControlList() : null; _object.contentType = contentType; _object.contentEncoding = contentEncoding; @@ -390,26 +403,38 @@ class _ObjectMetadata implements ObjectMetadata { _ObjectMetadata._(this._object); - List get _acl => _object.acl; - set acl(Acl value) => _object.acl = value._toObjectAccessControlList(); + Acl get acl { + if (_cachedAcl == null) { + _cachedAcl = new Acl._fromObjectAcl(_object); + } + return _cachedAcl; + } String get contentType => _object.contentType; - set contentType(String value) => _object.contentType = value; String get contentEncoding => _object.contentEncoding; - set contentEncoding(String value) => _object.contentEncoding = value; String get cacheControl => _object.cacheControl; - set cacheControl(String value) => _object.cacheControl = value; String get contentDisposition => _object.contentDisposition; - set contentDisposition(String value) => _object.contentDisposition = value; String get contentLanguage => _object.contentLanguage; - set contentLanguage(String value) => _object.contentLanguage = value; - Map get custom => _object.metadata; - set custom(Map value) => _object.metadata = value; + ObjectGeneration get generation { + if (_cachedGeneration == null) { + _cachedGeneration = new ObjectGeneration( + _object.generation, int.parse(_object.metageneration)); + } + return _cachedGeneration; + } + + Map get custom { + if (_object.metadata == null) return null; + if (_cachedCustom == null) { + _cachedCustom = new UnmodifiableMapView(_object.metadata); + } + return _cachedCustom; + } ObjectMetadata replace({Acl acl, String contentType, @@ -419,7 +444,7 @@ class _ObjectMetadata implements ObjectMetadata { String contentLanguage, Map custom}) { return new _ObjectMetadata( - acl: acl != null ? acl : _acl, + acl: acl != null ? acl : this.acl, contentType: contentType != null ? contentType : this.contentType, contentEncoding: contentEncoding != null ? contentEncoding : this.contentEncoding, @@ -428,7 +453,7 @@ class _ObjectMetadata implements ObjectMetadata { : this.contentEncoding, contentLanguage: contentLanguage != null ? contentLanguage : this.contentEncoding, - custom: custom != null ? custom : this.custom); + custom: custom != null ? new Map.from(custom) : this.custom); } } @@ -549,7 +574,9 @@ class _MediaUploadStreamSink implements StreamSink> { } void _startNormalUpload(Stream stream, int length) { - var media = new common.Media(stream, length); + var contentType = _object.contentType != null + ? _object.contentType : 'application/octet-stream'; + var media = new common.Media(stream, length, contentType: contentType); _api.objects.insert(_object, _bucketName, name: _objectName, @@ -557,12 +584,14 @@ class _MediaUploadStreamSink implements StreamSink> { uploadMedia: media, uploadOptions: common.UploadOptions.Default) .then((response) { - _doneCompleter.complete(new _ObjectStatImpl(response)); + _doneCompleter.complete(new _ObjectInfoImpl(response)); }, onError: _completeError); } void _startResumableUpload(Stream stream, int length) { - var media = new common.Media(stream, length); + var contentType = _object.contentType != null + ? _object.contentType : 'application/octet-stream'; + var media = new common.Media(stream, length, contentType: contentType); _api.objects.insert(_object, _bucketName, name: _objectName, @@ -570,7 +599,7 @@ class _MediaUploadStreamSink implements StreamSink> { uploadMedia: media, uploadOptions: common.UploadOptions.Resumable) .then((response) { - _doneCompleter.complete(new _ObjectStatImpl(response)); + _doneCompleter.complete(new _ObjectInfoImpl(response)); }, onError: _completeError); } } diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index 53c1a00f..4c763fae 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -49,7 +49,7 @@ library gcloud.storage; import 'dart:async'; -import 'dart:collection' show UnmodifiableListView; +import 'dart:collection' show UnmodifiableListView, UnmodifiableMapView; import 'package:http/http.dart' as http; @@ -92,6 +92,62 @@ class Acl { /// Create a new ACL with a list of ACL entries. Acl(Iterable entries) : _entries = new List.from(entries); + Acl._fromBucketAcl(storage.Bucket bucket) + : _entries = new List(bucket.acl == null ? 0 : bucket.acl.length) { + if (bucket.acl != null) { + for (int i = 0; i < bucket.acl.length; i++) { + _entries[i] = new AclEntry(_aclScopeFromEntity(bucket.acl[i].entity), + _aclPermissionFromRole(bucket.acl[i].role)); + } + } + } + + Acl._fromObjectAcl(storage.Object object) + : _entries = new List(object.acl == null ? 0 : object.acl.length) { + if (object.acl != null) { + for (int i = 0; i < object.acl.length; i++) { + _entries[i] = new AclEntry(_aclScopeFromEntity(object.acl[i].entity), + _aclPermissionFromRole(object.acl[i].role)); + } + } + } + + AclScope _aclScopeFromEntity(String entity) { + if (entity.startsWith('user-')) { + String tmp = entity.substring(5); + int at = tmp.indexOf('@'); + if (at != -1) { + return new AccountScope(tmp); + } else { + return new StorageIdScope(tmp); + } + } else if (entity.startsWith('group-')) { + return new GroupScope(entity.substring(6)); + } else if (entity.startsWith('domain-')) { + return new DomainScope(entity.substring(7)); + } else if (entity.startsWith('allAuthenticatedUsers-')) { + return AclScope.allAuthenticated; + } else if (entity.startsWith('allUsers-')) { + return AclScope.allUsers; + } else if (entity.startsWith('project-')) { + String tmp = entity.substring(8); + int dash = tmp.indexOf('-'); + if (dash != -1) { + return new ProjectScope(tmp.substring(dash + 1), + tmp.substring(0, dash)); + } + } + return new OpaqueScope(entity); + } + + AclPermission _aclPermissionFromRole(String role) { + if (role == 'READER') return AclPermission.READ; + if (role == 'WRITER') return AclPermission.WRITE; + if (role == 'OWNER') return AclPermission.FULL_CONTROL; + throw new UnsupportedError( + "Server returned a unsupported permission role '$role'"); + } + List _toBucketAccessControlList() { return _entries.map((entry) => entry._toBucketAccessControl()).toList(); } @@ -169,20 +225,26 @@ abstract class AclScope { /// ACL type for scope representing a Google Storage id. static const int _TYPE_STORAGE_ID = 0; + /// ACL type for scope representing a project entity. + static const int _TYPE_PROJECT = 1; + /// ACL type for scope representing an account holder. - static const int _TYPE_ACCOUNT = 1; + static const int _TYPE_ACCOUNT = 2; /// ACL type for scope representing a group. - static const int _TYPE_GROUP = 2; + static const int _TYPE_GROUP = 3; /// ACL type for scope representing a domain. - static const int _TYPE_DOMAIN = 3; + static const int _TYPE_DOMAIN = 4; /// ACL type for scope representing all authenticated users. - static const int _TYPE_ALL_AUTHENTICATED = 4; + static const int _TYPE_ALL_AUTHENTICATED = 5; /// ACL type for scope representing all users. - static const int _TYPE_ALL_USERS = 5; + static const int _TYPE_ALL_USERS = 6; + + /// ACL type for scope representing an unsupported scope. + static const int _TYPE_OPAQUE = 7; /// The id of the actual entity this ACL scope represents. The actual values /// are set in the different subclasses. @@ -199,32 +261,9 @@ abstract class AclScope { const AclScope._(this._type, this._id); - String get _storageEntity { - switch (_type) { - case _TYPE_STORAGE_ID: - return 'user-$_id'; - case _TYPE_ACCOUNT: - return 'user-$_id'; - case _TYPE_GROUP: - return 'group-$_id'; - case _TYPE_DOMAIN: - return 'domain-$_id'; - case _TYPE_ALL_AUTHENTICATED: - return 'allAuthenticatedUsers'; - case _TYPE_ALL_USERS: - return 'allUsers'; - default: - throw new UnsupportedError('Unexpected ACL scope'); - } - } - - int get hashCode => _jenkinsHash([_type, _id]); - - bool operator==(Object other) { - return other is AclScope && _type == other._type && _id == other._id; - } - String toString() => 'AclScope($_storageEntity)'; + + String get _storageEntity; } /// An ACL scope for an entity identified by a 'Google Storage ID'. @@ -237,6 +276,8 @@ class StorageIdScope extends AclScope { /// Google Storage ID. String get storageId => _id; + + String get _storageEntity => 'user-$_id'; } /// An ACL scope for an entity identified by an individual email address. @@ -245,6 +286,8 @@ class AccountScope extends AclScope { /// Email address. String get email => _id; + + String get _storageEntity => 'user-$_id'; } /// An ACL scope for an entity identified by an Google Groups email. @@ -253,6 +296,8 @@ class GroupScope extends AclScope { /// Group name. String get group => _id; + + String get _storageEntity => 'group-$_id'; } /// An ACL scope for an entity identified by a domain name. @@ -261,17 +306,46 @@ class DomainScope extends AclScope { /// Domain name. String get domain => _id; + + String get _storageEntity => 'domain-$_id'; +} + +/// An ACL scope for an project related entity. +class ProjectScope extends AclScope { + /// Project role. + /// + /// Possible values are `owners`, `editors` and `viewers`. + final String role; + + ProjectScope(String project, String this.role) + : super._(AclScope._TYPE_PROJECT, project); + + /// Project ID. + String get project => _id; + + String get _storageEntity => 'project-$role-$_id'; +} + +/// An ACL scope for an unsupported scope. +class OpaqueScope extends AclScope { + OpaqueScope(String id) : super._(AclScope._TYPE_OPAQUE, id); + + String get _storageEntity => _id; } /// ACL scope for a all authenticated users. class AllAuthenticatedScope extends AclScope { const AllAuthenticatedScope() : super._(AclScope._TYPE_ALL_AUTHENTICATED, null); + + String get _storageEntity => 'allAuthenticatedUsers'; } /// ACL scope for a all users. class AllUsersScope extends AclScope { const AllUsersScope(): super._(AclScope._TYPE_ALL_USERS, null); + + String get _storageEntity => 'allUsers'; } /// Permissions for individual scopes in an ACL. @@ -359,8 +433,17 @@ abstract class BucketInfo { /// Name of the bucket. String get bucketName; + /// Entity tag for the bucket. + String get etag; + /// When this bucket was created. DateTime get created; + + /// Bucket ID. + String get id; + + /// Acl of the bucket. + Acl get acl; } /// Access to Cloud Storage @@ -461,12 +544,15 @@ abstract class ObjectInfo { /// Name of the object. String get name; - /// Size of the data. - int get size; + /// Length of the data. + int get length; /// When this object was updated. DateTime get updated; + /// Entity tag for the object. + String get etag; + /// MD5 hash of the object. List get md5Hash; @@ -484,43 +570,45 @@ abstract class ObjectInfo { } /// Generational information on an object. -abstract class ObjectGeneration { +class ObjectGeneration { /// Object generation. - String get objectGeneration; + final String objectGeneration; /// Metadata generation. - int get metaGeneration; + final int metaGeneration; + + const ObjectGeneration(this.objectGeneration, this.metaGeneration); } -/// Access to object metadata +/// Access to object metadata. abstract class ObjectMetadata { factory ObjectMetadata({Acl acl, String contentType, String contentEncoding, String cacheControl, String contentDisposition, String contentLanguage, Map custom}) = _ObjectMetadata; - /// ACL - void set acl(Acl value); + /// ACL. + Acl get acl; /// `Content-Type` for this object. - String contentType; + String get contentType; /// `Content-Encoding` for this object. - String contentEncoding; + String get contentEncoding; /// `Cache-Control` for this object. - String cacheControl; + String get cacheControl; /// `Content-Disposition` for this object. - String contentDisposition; + String get contentDisposition; /// `Content-Language` for this object. /// /// The value of this field must confirm to RFC 3282. - String contentLanguage; + String get contentLanguage; /// Custom metadata. - Map custom; + Map get custom; - /// Create a copy of this object with some values replaces. + /// Create a copy of this object with some values replaced. /// // TODO: This cannot be used to set values to null. ObjectMetadata replace({Acl acl, String contentType, String contentEncoding, @@ -610,6 +698,11 @@ abstract class Bucket { // TODO: More documentation Future info(String name); + /// Delete an object. + /// + // TODO: More documentation + Future delete(String name); + /// Update object metadata. /// // TODO: More documentation diff --git a/pkgs/gcloud/test/common.dart b/pkgs/gcloud/test/common.dart index 1a4bc48f..e71ee3e2 100644 --- a/pkgs/gcloud/test/common.dart +++ b/pkgs/gcloud/test/common.dart @@ -153,13 +153,13 @@ class MockClient extends http.BaseClient { .then((j) => json = j); } else if (partCount == 2) { // Second part is the base64 encoded bytes. - expect(contentType, 'application/octet-stream'); mimeMultipart .transform(ASCII.decoder) .fold('', (p, e) => '$p$e') .then(crypto.CryptoUtils.base64StringToBytes) .then((bytes) { - completer.complete(new NormalMediaUpload(json, bytes)); + completer.complete( + new NormalMediaUpload(json, bytes, contentType)); }); } else { // Exactly two parts expected. @@ -172,7 +172,56 @@ class MockClient extends http.BaseClient { } class NormalMediaUpload { - String json; - List bytes; - NormalMediaUpload(this.json, this.bytes); -} \ No newline at end of file + final String json; + final List bytes; + final String contentType; + NormalMediaUpload(this.json, this.bytes, this.contentType); +} + +// Implementation of http.Client which traces all requests and responses. +// Mainly useful for local testing. +class TraceClient extends http.BaseClient { + final http.Client client; + + TraceClient(this.client); + + Future send(http.BaseRequest request) { + print(request); + return request.finalize().toBytes().then((body) { + print('--- START REQUEST ---'); + print(UTF8.decode(body)); + print('--- END REQUEST ---'); + var r = new RequestImpl(request.method, request.url, body); + r.headers.addAll(request.headers); + return client.send(r).then((http.StreamedResponse rr) { + return rr.stream.toBytes().then((body) { + print('--- START RESPONSE ---'); + print(UTF8.decode(body)); + print('--- END RESPONSE ---'); + return new http.StreamedResponse( + new http.ByteStream.fromBytes(body), + rr.statusCode, + headers: rr.headers); + + }); + }); + }); + } + + void close() { + client.close(); + } +} + +// http.BaseRequest implementationn used by the TraceClient. +class RequestImpl extends http.BaseRequest { + final List _body; + + RequestImpl(String method, Uri url, this._body) + : super(method, url); + + http.ByteStream finalize() { + super.finalize(); + return new http.ByteStream.fromBytes(_body); + } +} diff --git a/pkgs/gcloud/test/storage/e2e_test.dart b/pkgs/gcloud/test/storage/e2e_test.dart index 0254f75a..3cda6da6 100644 --- a/pkgs/gcloud/test/storage/e2e_test.dart +++ b/pkgs/gcloud/test/storage/e2e_test.dart @@ -12,6 +12,7 @@ import 'package:googleapis/common/common.dart' as common; import 'package:googleapis_auth/auth_io.dart' as auth; import 'package:unittest/unittest.dart'; +import '../common.dart'; // Enviroment variables for specifying the cloud project to use and the // location of the service account key for that project. @@ -57,7 +58,7 @@ Future serviceKeyJson(String serviceKeyLocation) { }); } -Future connect() { +Future connect({bool trace: false}) { String project = Platform.environment[PROJECT_ENV]; String serviceKeyLocation = Platform.environment[SERVICE_KEY_LOCATION_ENV]; @@ -74,7 +75,10 @@ Future connect() { return serviceKeyJson(serviceKeyLocation).then((keyJson) { var creds = new auth.ServiceAccountCredentials.fromJson(keyJson); return auth.clientViaServiceAccount(creds, Storage.Scopes) - .then((client) => new Storage(client, project)); + .then((client) { + if (trace) client = new TraceClient(client); + return new Storage(client, project); + }); }); } @@ -85,18 +89,60 @@ String generateBucketName() { bool testDetailedApiError(e) => e is common.DetailedApiRequestError; -runTests(Storage storage) { - group('bucket', () { +// Generate a list just above the limit when changing to resumable upload. +const int MB = 1024 * 1024; +const int maxNormalUpload = 1 * MB; +const int minResumableUpload = maxNormalUpload + 1; +var bytesResumableUpload = + new List.generate(minResumableUpload, (e) => e & 255); - test('create-delete', () { +runTests(Storage storage, Bucket testBucket) { + group('bucket', () { + test('create-info-delete', () { var bucketName = generateBucketName(); - - storage.createBucket(bucketName).then(expectAsync((result) { + return storage.createBucket(bucketName).then(expectAsync((result) { expect(result, isNull); - expect(storage.deleteBucket(bucketName), completion(isNull)); + return storage.bucketInfo(bucketName).then(expectAsync((info) { + expect(info.bucketName, bucketName); + expect(info.etag, isNotNull); + expect(info.created is DateTime, isTrue); + expect(info.id, isNotNull); + return storage.deleteBucket(bucketName) + .then(expectAsync((result) { + expect(result, isNull); + })); + })); })); }); + test('create-with-predefined-acl-delete', () { + Future test(predefinedAcl, expectedLength) { + var bucketName = generateBucketName(); + return storage.createBucket(bucketName, predefinedAcl: predefinedAcl) + .then(expectAsync((result) { + expect(result, isNull); + return storage.bucketInfo(bucketName).then(expectAsync((info) { + var acl = info.acl; + expect(info.bucketName, bucketName); + expect(acl.entries.length, expectedLength); + return storage.deleteBucket(bucketName) + .then(expectAsync((result) { + expect(result, isNull); + })); + })); + })); + } + + return Future.forEach([ + // TODO: Figure out why some returned ACLs are empty. + () => test(PredefinedAcl.authenticatedRead, 0), + // [test, [PredefinedAcl.private, 0]], // TODO: Cannot delete. + () => test(PredefinedAcl.projectPrivate, 3), + () => test(PredefinedAcl.publicRead, 0), + () => test(PredefinedAcl.publicReadWrite, 0) + ], (f) => f().then(expectAsync((_) {}))); + }); + test('create-error', () { var bucketName = generateBucketName(); @@ -105,9 +151,192 @@ runTests(Storage storage) { }), test: testDetailedApiError); }); }); + + // TODO: Remove solo_ here when the rate-limit issue have been resolved. + solo_group('object', () { + // Run all object tests in the same bucket to try to avoid the rate-limit + // for creating and deleting buckets while testing. + Future withTestBucket(function) { + return function(testBucket).whenComplete(() { + // TODO: Clean the bucket. + }); + } + + test('create-read-delete', () { + Future test(name, bytes) { + return withTestBucket((Bucket bucket) { + return bucket.writeBytes('test', bytes).then(expectAsync((info) { + expect(info, isNotNull); + return bucket.read('test') + .fold([], (p, e) => p..addAll(e)) + .then(expectAsync((result) { + expect(result, bytes); + return bucket.delete('test').then(expectAsync((result) { + expect(result, isNull); + })); + })); + })); + }); + } + + return Future.forEach([ + () => test('test-1', [1, 2, 3]), + () => test('test-2', bytesResumableUpload) + ], (f) => f().then(expectAsync((_) {}))); + }); + + test('create-with-predefined-acl-delete', () { + return withTestBucket((Bucket bucket) { + Future test(objectName, predefinedAcl, expectedLength) { + var bucketName = generateBucketName(); + return bucket.writeBytes( + objectName, [1, 2, 3], predefinedAcl: predefinedAcl) + .then(expectAsync((result) { + expect(result, isNotNull); + return bucket.info(objectName).then(expectAsync((info) { + var acl = info.metadata.acl; + expect(info.name, objectName); + expect(info.etag, isNotNull); + expect(acl.entries.length, expectedLength); + return bucket.delete(objectName).then(expectAsync((result) { + expect(result, isNull); + })); + })); + })); + } + + return Future.forEach([ + () => test('test-1', PredefinedAcl.authenticatedRead, 2), + () => test('test-2', PredefinedAcl.private, 1), + () => test('test-3', PredefinedAcl.projectPrivate, 4), + () => test('test-4', PredefinedAcl.publicRead, 2), + () => test('test-5', PredefinedAcl.bucketOwnerFullControl, 2), + () => test('test-6', PredefinedAcl.bucketOwnerRead, 2) + ], (f) => f().then(expectAsync((_) {}))); + }); + }); + + test('create-with-acl-delete', () { + return withTestBucket((Bucket bucket) { + Future test(objectName, acl, expectedLength) { + var bucketName = generateBucketName(); + return bucket.writeBytes(objectName, [1, 2, 3], acl: acl) + .then(expectAsync((result) { + expect(result, isNotNull); + return bucket.info(objectName).then(expectAsync((info) { + var acl = info.metadata.acl; + expect(info.name, objectName); + expect(info.etag, isNotNull); + expect(acl.entries.length, expectedLength); + return bucket.delete(objectName).then(expectAsync((result) { + expect(result, isNull); + })); + })); + })); + } + + Acl acl1 = new Acl( + [new AclEntry(AclScope.allAuthenticated, AclPermission.WRITE)]); + Acl acl2 = new Acl( + [new AclEntry(AclScope.allUsers, AclPermission.WRITE), + new AclEntry(new AccountScope('sgjesse@google.com'), + AclPermission.WRITE)]); + Acl acl3 = new Acl( + [new AclEntry(AclScope.allUsers, AclPermission.WRITE), + new AclEntry(new AccountScope('sgjesse@google.com'), + AclPermission.WRITE), + new AclEntry(new AccountScope('misc@dartlang.org'), + AclPermission.READ)]); + Acl acl4 = new Acl( + [new AclEntry(AclScope.allUsers, AclPermission.WRITE), + new AclEntry(new AccountScope('sgjesse@google.com'), + AclPermission.WRITE), + new AclEntry(new GroupScope('misc@dartlang.org'), + AclPermission.READ), + new AclEntry(new DomainScope('dartlang.org'), + AclPermission.FULL_CONTROL)]); + + return Future.forEach([ + () => test('test-1', acl1, 1), + () => test('test-2', acl2, 2), + () => test('test-3', acl3, 3), + () => test('test-4', acl4, 4) + ], (f) => f().then(expectAsync((_) {}))); + }); + }); + + test('create-with-metadata-delete', () { + return withTestBucket((Bucket bucket) { + Future test(objectName, metadata, bytes) { + var bucketName = generateBucketName(); + return bucket.writeBytes(objectName, bytes, metadata: metadata) + .then(expectAsync((result) { + expect(result, isNotNull); + return bucket.info(objectName).then(expectAsync((info) { + var acl = info.metadata.acl; + expect(info.name, objectName); + expect(info.length, bytes.length); + expect(info.updated is DateTime, isTrue); + expect(info.md5Hash, isNotNull); + expect(info.crc32CChecksum, isNotNull); + expect(info.downloadLink is Uri, isTrue); + expect(info.generation.objectGeneration, isNotNull); + expect(info.generation.metaGeneration, 1); + expect(info.metadata.contentType, metadata.contentType); + expect(info.metadata.cacheControl, metadata.cacheControl); + expect(info.metadata.contentDisposition, + metadata.contentDisposition); + expect(info.metadata.contentEncoding, + metadata.contentEncoding); + expect(info.metadata.contentLanguage, + metadata.contentLanguage); + expect(info.metadata.custom, metadata.custom); + return bucket.delete(objectName).then(expectAsync((result) { + expect(result, isNull); + })); + })); + })); + } + + var metadata1 = new ObjectMetadata(contentType: 'text/plain'); + var metadata2 = new ObjectMetadata( + contentType: 'text/plain', + cacheControl: 'no-cache', + contentDisposition: 'attachment; filename="test.txt"', + contentEncoding: 'gzip', + contentLanguage: 'da', + custom: {'a': 'b', 'c': 'd'}); + + return Future.forEach([ + () => test('test-1', metadata1, [65, 66, 67]), + () => test('test-2', metadata2, [65, 66, 67]), + () => test('test-3', metadata1, bytesResumableUpload), + () => test('test-4', metadata2, bytesResumableUpload) + ], (f) => f().then(expectAsync((_) {}))); + }); + }); + }); +} + +class E2EConfiguration extends SimpleConfiguration { + Storage storage; + final String testBucketName; + E2EConfiguration(this.storage, this.testBucketName): super(); + + onDone(success) { + storage.deleteBucket(testBucketName) + .whenComplete(() => super.onDone(success)); + } } main() { // Share the same storage connection for all tests. - connect().then(runTests); + connect(trace: false).then((Storage storage) { + var bucketName = generateBucketName(); + unittestConfiguration = new E2EConfiguration(storage, bucketName); + // Create a shared bucket for all object tests. + storage.createBucket(bucketName).then((result) { + runTests(storage, storage.bucket(bucketName)); + }); + }); } diff --git a/pkgs/gcloud/test/storage/storage_test.dart b/pkgs/gcloud/test/storage/storage_test.dart index 323b7a2f..a27fb1e3 100644 --- a/pkgs/gcloud/test/storage/storage_test.dart +++ b/pkgs/gcloud/test/storage/storage_test.dart @@ -338,6 +338,7 @@ main() { new storage.Object.fromJson(JSON.decode(mediaUpload.json)); expect(object.name, objectName); expect(mediaUpload.bytes, bytes); + expect(mediaUpload.contentType, 'application/octet-stream'); return mock.respond(new storage.Object()..name = objectName); })); })); @@ -595,6 +596,118 @@ main() { }); }); + test('write-with-metadata-short', () { + var metadata = + [new ObjectMetadata(contentType: 'mime/type'), + new ObjectMetadata(contentType: 'type/mime', + cacheControl: 'control-cache'), + new ObjectMetadata(cacheControl: 'control-cache'), + new ObjectMetadata(cacheControl: 'control-cache', + contentDisposition: 'disp-content'), + new ObjectMetadata(contentDisposition: 'disp-content', + contentEncoding: 'encoding', + contentLanguage: 'language'), + new ObjectMetadata(custom: {'x': 'y'}), + new ObjectMetadata(custom: {'a': 'b', 'x': 'y'}) + ]; + + withMockClient((mock, api) { + int count = 0; + var bytes = [1, 2, 3]; + + mock.registerUpload( + 'POST', 'b/$bucketName/o', expectAsync((request) { + return mock.processNormalMediaUpload(request) + .then(expectAsync((mediaUpload) { + var object = + new storage.Object.fromJson(JSON.decode(mediaUpload.json)); + ObjectMetadata m = metadata[count]; + expect(object.name, objectName); + expect(mediaUpload.bytes, bytes); + var contentType = m.contentType != null + ? m.contentType : 'application/octet-stream'; + expect(mediaUpload.contentType, contentType); + expect(object.cacheControl, m.cacheControl); + expect(object.contentDisposition, m.contentDisposition); + expect(object.contentEncoding, m.contentEncoding); + expect(object.contentLanguage, m.contentLanguage); + expect(object.metadata, m.custom); + count++; + return mock.respond(new storage.Object()..name = objectName); + })); + }, count: metadata.length)); + + var bucket = api.bucket(bucketName); + var futures = []; + for (int i = 0; i < metadata.length; i++) { + futures.add(bucket.writeBytes(objectName, bytes, + metadata: metadata[i])); + } + return Future.wait(futures); + }); + }); + + test('write-with-metadata-long', () { + var metadata = + [new ObjectMetadata(contentType: 'mime/type'), + new ObjectMetadata(contentType: 'type/mime', + cacheControl: 'control-cache'), + new ObjectMetadata(cacheControl: 'control-cache'), + new ObjectMetadata(cacheControl: 'control-cache', + contentDisposition: 'disp-content'), + new ObjectMetadata(contentDisposition: 'disp-content', + contentEncoding: 'encoding', + contentLanguage: 'language'), + new ObjectMetadata(custom: {'x': 'y'}), + new ObjectMetadata(custom: {'a': 'b', 'x': 'y'}) + ]; + + withMockClient((mock, api) { + int countInitial = 0; + int countData = 0; + + mock.registerResumableUpload( + 'POST', 'b/$bucketName/o', expectAsync((request) { + var object = new storage.Object.fromJson(JSON.decode(request.body)); + ObjectMetadata m = metadata[countInitial]; + expect(object.name, objectName); + var contentType = m.contentType != null + ? m.contentType : 'application/octet-stream'; + expect(object.cacheControl, m.cacheControl); + expect(object.contentDisposition, m.contentDisposition); + expect(object.contentEncoding, m.contentEncoding); + expect(object.contentLanguage, m.contentLanguage); + expect(object.metadata, m.custom); + countInitial++; + return mock.respondInitiateResumableUpload(PROJECT); + }, count: metadata.length)); + mock.registerResumableUpload( + 'PUT', 'b/$PROJECT/o', expectAsync((request) { + ObjectMetadata m = metadata[countData % metadata.length]; + var contentType = m.contentType != null + ? m.contentType : 'application/octet-stream'; + expect(request.headers['content-type'], contentType); + bool firstPart = countData < metadata.length; + countData++; + if (firstPart) { + expect(request.bodyBytes.length, MB); + return mock.respondContinueResumableUpload(); + } else { + expect(request.bodyBytes.length, 1); + return mock.respond(new storage.Object()..name = objectName); + } + }, count: metadata.length * 2)); + + var bucket = api.bucket(bucketName); + var futures = []; + for (int i = 0; i < metadata.length; i++) { + futures.add(bucket.writeBytes(objectName, bytesResumableUpload, + metadata: metadata[i])); + } + return Future.wait(futures); + }); + }); + test('write-with-predefined-acl', () { var predefined = [[PredefinedAcl.authenticatedRead, 'authenticatedRead'], @@ -616,6 +729,7 @@ main() { new storage.Object.fromJson(JSON.decode(mediaUpload.json)); expect(object.name, objectName); expect(mediaUpload.bytes, bytes); + expect(mediaUpload.contentType, 'application/octet-stream'); expect(request.url.queryParameters['predefinedAcl'], predefined[count++][1]); expect(object.acl, isNull); @@ -667,6 +781,7 @@ main() { new storage.Object.fromJson(JSON.decode(mediaUpload.json)); expect(object.name, objectName); expect(mediaUpload.bytes, bytes); + expect(mediaUpload.contentType, 'application/octet-stream'); expect(request.url.queryParameters['predefinedAcl'], isNull); expect(object.acl, isNotNull); expect(object.acl.length, count + 1); @@ -738,6 +853,7 @@ main() { new storage.Object.fromJson(JSON.decode(mediaUpload.json)); expect(object.name, objectName); expect(mediaUpload.bytes, bytes); + expect(mediaUpload.contentType, 'application/octet-stream'); expect(request.url.queryParameters['predefinedAcl'], predefined[predefinedIndex][1]); expect(object.acl, isNotNull); @@ -809,6 +925,40 @@ main() { }); }); + test('stat-acl', () { + withMockClient((mock, api) { + mock.register( + 'GET', 'b/$bucketName/o/$objectName', expectAsync((request) { + expect(request.url.queryParameters['alt'], 'json'); + var acl1 = new storage.ObjectAccessControl(); + acl1.entity = 'user-1234567890'; + acl1.role = 'OWNER'; + var acl2 = new storage.ObjectAccessControl(); + acl2.entity = 'user-xxx@yyy.zzz'; + acl2.role = 'OWNER'; + var acl3 = new storage.ObjectAccessControl(); + acl3.entity = 'xxx-1234567890'; + acl3.role = 'OWNER'; + return mock.respond(new storage.Object() + ..name = objectName + ..acl = [acl1, acl2, acl3]); + })); + + var api = new Storage(mock, PROJECT); + var bucket = api.bucket(bucketName); + bucket.info(objectName).then(expectAsync((ObjectInfo info) { + expect(info.name, objectName); + expect(info.metadata.acl.entries.length, 3); + expect(info.metadata.acl.entries[0] is AclEntry, isTrue); + expect(info.metadata.acl.entries[0].scope is StorageIdScope, isTrue); + expect(info.metadata.acl.entries[1] is AclEntry, isTrue); + expect(info.metadata.acl.entries[1].scope is AccountScope, isTrue); + expect(info.metadata.acl.entries[2] is AclEntry, isTrue); + expect(info.metadata.acl.entries[2].scope is OpaqueScope, isTrue); + })); + }); + }); + group('list', () { test('empty', () { withMockClient((mock, api) { From 2b597438d73a2cab2790b039edad512286115755 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Fri, 31 Oct 2014 13:22:53 +0100 Subject: [PATCH 028/239] Remove unused code from test. R=kustermann@google.com BUG= Review URL: https://codereview.chromium.org//693143002 --- pkgs/gcloud/test/db/properties_test.dart | 4 ---- 1 file changed, 4 deletions(-) diff --git a/pkgs/gcloud/test/db/properties_test.dart b/pkgs/gcloud/test/db/properties_test.dart index edf26655..f20c2509 100644 --- a/pkgs/gcloud/test/db/properties_test.dart +++ b/pkgs/gcloud/test/db/properties_test.dart @@ -180,12 +180,8 @@ class ModelDBMock implements ModelDB { return _datastoreKey; } - modelDescriptionState(ModelDescription modelDescription) => null; - Iterable get modelDescriptions => null; Map propertiesForModel(modelDescription) => null; - modelClass(ModelDescription md) => null; Model fromDatastoreEntity(datastore.Entity entity) => null; - ModelDescription modelDescriptionForType(Type type) => null; datastore.Entity toDatastoreEntity(Model model) => null; String fieldNameToPropertyName(String kind, String fieldName) => null; String kindName(Type type) => null; From 245f1e6ec83dede79131870f4daa6dd87065c9a3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Fri, 31 Oct 2014 13:23:57 +0100 Subject: [PATCH 029/239] Remove Pub/Sub for now R=kustermann@google.com BUG= Review URL: https://codereview.chromium.org//694783002 --- pkgs/gcloud/lib/pubsub.dart | 410 --------- pkgs/gcloud/lib/src/pubsub_impl.dart | 503 ----------- pkgs/gcloud/test/pubsub/pubsub_test.dart | 1044 ---------------------- 3 files changed, 1957 deletions(-) delete mode 100644 pkgs/gcloud/lib/pubsub.dart delete mode 100644 pkgs/gcloud/lib/src/pubsub_impl.dart delete mode 100644 pkgs/gcloud/test/pubsub/pubsub_test.dart diff --git a/pkgs/gcloud/lib/pubsub.dart b/pkgs/gcloud/lib/pubsub.dart deleted file mode 100644 index ae24da11..00000000 --- a/pkgs/gcloud/lib/pubsub.dart +++ /dev/null @@ -1,410 +0,0 @@ -// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file -// for details. All rights reserved. Use of this source code is governed by a -// BSD-style license that can be found in the LICENSE file. - -library gcloud.pubsub; - -import 'dart:async'; -import 'dart:collection'; -import 'dart:convert'; -import 'package:crypto/crypto.dart'; -import 'package:http/http.dart' as http; - -import 'package:googleapis_beta/pubsub/v1beta1.dart' as pubsub; - -import 'common.dart'; -export 'common.dart'; - -part 'src/pubsub_impl.dart'; - -/// A Cloud Pub/Sub client. -/// -/// Connects to the Cloud Pub/Sub service and gives access to its operations. -/// -/// Google Cloud Pub/Sub is a reliable, many-to-many, asynchronous messaging -/// service from Google Cloud Platform. A detailed overview is available on -/// [Pub/Sub docs](https://developers.google.com/pubsub/overview). -/// -/// To access Pub/Sub, an authenticate HTTP client is required. This client -/// should as a minimum provide access to the scopes `PubSub.Scopes`. -/// -/// The following example shows how to access Pub/Sub using a service account -/// and pull a message from a subscription. -/// -/// import 'package:http/http.dart' as http; -/// import 'package:googleapis_auth/auth_io.dart' as auth; -/// import 'package:gcloud/pubsub.dart'; -/// -/// Future createClient() { -/// // Service account credentials retreived from Cloud Console. -/// String creds = -/// r''' -/// { -/// "private_key_id": ..., -/// "private_key": ..., -/// "client_email": ..., -/// "client_id": ..., -/// "type": "service_account" -/// }'''; -/// return auth.clientViaServiceAccount( -/// new auth.ServiceAccountCredentials.fromJson(creds), -/// PubSub.Scopes); -/// } -/// -/// main() { -/// var project = 'my-project'; -/// var client; -/// var pubsub; -/// createClient().then((c) { -/// client = c; -/// pubsub = new PubSub(client, project); -/// return pubsub.lookupSubscription('my-subscription'); -/// }) -/// .then((Subscription subscription) => subscription.pull()) -/// .then((PullEvent event) => print('Message ${event.message.asString}')) -/// .whenComplete(() => client.close()); -/// } -/// -/// When working with topics and subscriptions they are referred to using -/// names. These names can be either relative names or absolute names. -/// -/// An absolute name of a topic starts with `/` and has the form: -/// -/// /topics// -/// -/// When a relative topic name is used, its absolute name is generated by -/// prepending `/topics//`, where `` is the project -/// id passed to the constructor. -/// -/// An absolute name of a subscription starts with `/` and has the form: -/// -/// /subscriptions// -/// -/// When a relative subscription name is used, its absolute name is -/// generated by prepending `/subscriptions//`, where -/// `` is the project id passed to the constructor. -/// -abstract class PubSub { - /// List of required OAuth2 scopes for Pub/Sub operation. - static const Scopes = const [ pubsub.PubsubApi.PubsubScope ]; - - /// Access Pub/Sub using an authenicated client. - /// - /// The [client] is an authentiacted HTTP client. This client must - /// provide access to at least the scopes in `PubSub.Scopes`. - /// - /// The [project] is the name of the Google Cloud project. - /// - /// Returs an object providing access to Pub/Sub. The passed-in [client] will - /// not be closed automatically. The caller is responsible for closing it. - factory PubSub(http.Client client, String project) = _PubSubImpl; - - /// The name of the project. - String get project; - - /// Create a new topic named [name]. - /// - /// The [name] can be either an absolute name or a relative name. - /// - /// Returns a `Future` which completes with the newly created topic. - Future createTopic(String name); - - /// Delete topic named [name]. - /// - /// The [name] can be either an absolute name or a relative name. - /// - /// Returns a `Future` which completes with `null` when the operation - /// is finished. - Future deleteTopic(String name); - - /// Look up topic named [name]. - /// - /// The [name] can be either an absolute name or a relative name. - /// - /// Returns a `Future` which completes with the topic. - Future lookupTopic(String name); - - /// Lists all topics. - /// - /// Returns a `Stream` of topics. - Stream listTopics(); - - /// Start paging through all topics. - /// - /// The maximum number of topics in each page is specified in [pageSize]. - /// - /// Returns a `Future` which completes with a `Page` object holding the - /// first page. Use the `Page` object to move to the next page of topics. - Future> pageTopics({int pageSize: 50}); - - /// Create a new subscription named [name] listening on topic [topic]. - /// - /// If [endpoint] is passed this will create a push subscription. - /// - /// Otherwise this will create a pull subscription. - /// - /// The [name] can be either an absolute name or a relative name. - /// - /// Returns a `Future` which completes with the newly created subscripton. - Future createSubscription( - String name, String topic, {Uri endpoint}); - - /// Delete subscription named [name]. - /// - /// The [name] can be either an absolute name or a relative name. - /// - /// Returns a `Future` which completes with the subscription. - Future deleteSubscription(String name); - - /// Lookup subscription with named [name]. - /// - /// The [name] can be either an absolute name or a relative name. - /// - /// Returns a `Future` which completes with the subscription. - Future lookupSubscription(String name); - - /// List subscriptions. - /// - /// If [query] is passed this will list all subscriptions matching the query. - /// - /// Otherwise this will list all subscriptions. - /// - /// The only supported query string is the name of a topic. If a name of a - /// topic is passed as [query], this will list all subscriptions on that - /// topic. - /// - /// Returns a `Stream` of subscriptions. - Stream listSubscriptions([String query]); - - /// Start paging through subscriptions. - /// - /// If [topic] is passed this will list all subscriptions to that topic. - /// - /// Otherwise this will list all subscriptions. - /// - /// The maximum number of subscriptions in each page is specified in - /// [pageSize] - /// - /// Returns a `Future` which completes with a `Page` object holding the - /// first page. Use the `Page` object to move to the next page of - /// subscriptions. - Future> pageSubscriptions( - {String topic, int pageSize: 50}); -} - -/// A Pub/Sub topic. -/// -/// A topic is used by a publisher to publish (send) messages. -abstract class Topic { - /// The relative name of this topic. - String get name; - - /// The name of the project for this topic. - String get project; - - /// The absolute name of this topic. - String get absoluteName; - - /// Delete this topic. - /// - /// Returns a `Future` which completes with `null` when the operation - /// is finished. - Future delete(); - - /// Publish a message. - /// - /// Returns a `Future` which completes with `null` when the operation - /// is finished. - Future publish(Message message); - - /// Publish a string as a message. - /// - /// The message will get the labels specified in [labels]. The keys in this - /// map must be strings and the values must be either Strings or integers. - /// - /// The [labels] are passed together with the message to the receiver. - /// - /// Returns a `Future` which completes with `null` when the operation - /// is finished. - Future publishString(String message, {Map labels}); - - /// Publish bytes as a message. - /// - /// The message will get the labels specified in [labels]. The keys in this - /// map must be strings and the values must be either Strings or integers. - /// - /// The [labels] are passed together with the message to the receiver. - /// - /// Returns a `Future` which completes with `null` when the operation - /// is finished. - Future publishBytes(List message, {Map labels}); -} - -/// A Pub/Sub subscription -/// -/// A subscription is used to receive messages. A subscriber application -/// create a subscription on a topic to receive messages from it. -/// -/// Subscriptions can be either pull subscriptions or push subscriptions. -/// -/// For a pull subscription the receiver calls the `Subscription.pull` -/// method on the subscription object to get the next message. -/// -/// For a push subscription a HTTPS endpoint is configured. This endpoint get -/// POST requests with the messages. -abstract class Subscription { - /// The relative name of this subscription. - String get name; - - /// The name of the project for this subscription. - String get project; - - /// The absolute name of this subscription. - String get absoluteName; - - /// The topic subscribed to. - Topic get topic; - - /// Whether this is a push subscription. - /// - /// A push subscription is configured with an endpoint URI, and messages - /// are automatically sent to this endpoint without needing to call [pull]. - bool get isPush; - - /// Whether this is a pull subscription. - /// - /// A subscription without a configured endpoint URI is a pull subscripton. - /// Messages are not delivered automatically, but must instead be requested - /// using [pull]. - bool get isPull; - - /// The URI for the push endpoint. - /// - /// If this is a pull subscription this is `null`. - Uri get endpoint; - - /// Update the push configuration with a new endpoint. - /// - /// if [endpoint] is `null`, the subscription stops delivering messages - /// automatically, and becomes a pull subscription, if it isn't already. - /// - /// If [endpoint] is not `null`, the subscription will be a push - /// subscription, if it wasn't already, and Pub/Sub will start automatically - /// delivering message to the endpoint URI. - /// - /// Returns a `Future` which completes when the operation completes. - Future updatePushConfiguration(Uri endpoint); - - /// Delete this subscription. - /// - /// Returns a `Future` which completes when the operation completes. - Future delete(); - - - /// Pull a message from the subscription. - /// - /// If [noWait] is true, the method will complete the returned `Future` - /// with `null` if it finds that there are no messages available. - /// - /// If `noWait` is false, the method will wait for a message to become - /// available, and will then complete the `Future` with a `PullEvent` - /// containing the message. - Future pull({bool noWait: true}); -} - -/// The content of a Pub/Sub message. -/// -/// All Pub/Sub messages consist of a body of binary data and has an optional -/// set of labels (key-value pairs) associated with it. -/// -/// A `Message` contains the message body a list of bytes. The message body can -/// be read and written as a String, in which case the string is converted to -/// or from UTF-8 automatically. -abstract class Message { - /// Creates a new message with a String for the body. The String will - /// be UTF-8 encoded to create the actual binary body for the message. - /// - /// Message labels can be passed in the [labels] Map. The values in this - /// map must be either Strings or integers. Integers must be positive - /// 64-bit integers. - factory Message.withString(String message, {Map labels}) = - _MessageImpl.withString; - - /// Creates a new message with a binary body. - /// - /// Message labels can be passed in the [labels] Map. The values in this - /// map must be either Strings or integers. Integers must be positive - /// 64-bit integers. - factory Message.withBytes(List message, {Map labels}) = - _MessageImpl.withBytes; - - /// The message body as a String. - /// - /// The binary body is decoded into a String using an UTF-8 decoder. - /// - /// If the body is not UTF-8 encoded use the [asBytes] getter and manually - /// apply the corect decoding. - String get asString; - - /// The message body as bytes. - List get asBytes; - - /// The labels for this message. The values in the Map are either - /// Strings or integers. - /// - /// Values can be 64-bit integers. - Map get labels; -} - -/// A Pub/Sub pull event. -/// -/// Instances of this class are returned when pulling messages with -/// [Subscription.pull]. -abstract class PullEvent { - /// The message content. - Message get message; - - /// Whether the message was truncated. - bool get isTruncated; - - /// Acknowledge reception of this message. - /// - /// Returns a `Future` which completes with `null` when the acknowledge has - /// been processed. - Future acknowledge(); -} - -/// Pub/Sub push event. -/// -/// This class can be used in a HTTP server for decoding messages pushed to -/// an endpoint. -/// -/// When a message is received on a push endpoint use the [PushEvent.fromJson] -/// constructor with the HTTP body to decode the received message. -/// -/// E.g. with a `dart:io` HTTP handler: -/// -/// void pushHandler(HttpRequest request) { -/// // Decode the JSON body. -/// request.transform(UTF8.decoder).join('').then((body) { -/// // Decode the JSON into a push message. -/// var message = new PushMessage.fromJson(body) -/// -/// // Process the message... -/// -/// // Respond with status code 20X to acknowledge the message. -/// response.statusCode = statusCode; -/// response.close(); -/// }); -/// } -//// -abstract class PushEvent { - /// The message content. - Message get message; - - /// The absolute name of the subscription. - String get subscriptionName; - - /// Create a `PushMessage` from JSON received on a Pub/Sub push endpoint. - factory PushEvent.fromJson(String json) = _PushEventImpl.fromJson; -} diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart deleted file mode 100644 index 2ce7b44e..00000000 --- a/pkgs/gcloud/lib/src/pubsub_impl.dart +++ /dev/null @@ -1,503 +0,0 @@ -// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file -// for details. All rights reserved. Use of this source code is governed by a -// BSD-style license that can be found in the LICENSE file. - -part of gcloud.pubsub; - -class _PubSubImpl implements PubSub { - final http.Client _client; - final String project; - final pubsub.PubsubApi _api; - final String _topicPrefix; - final String _subscriptionPrefix; - - _PubSubImpl(client, project) : - this._client = client, - this.project = project, - _api = new pubsub.PubsubApi(client), - _topicPrefix = '/topics/$project/', - _subscriptionPrefix = '/subscriptions/$project/'; - - - String _fullTopicName(String name) { - if (name.startsWith('/') && !name.startsWith('/topics')) { - throw new ArgumentError("Illegal absolute topic name. Absolute topic " - "name must start with '/topics'"); - } - return name.startsWith('/topics') ? name : '${_topicPrefix}$name'; - } - - String _fullSubscriptionName(name) { - if (name.startsWith('/') && !name.startsWith('/subscriptions')) { - throw new ArgumentError("Illegal absolute topic name. Absolute topic " - "name must start with '/subscriptions'"); - } - return name.startsWith('/subscriptions') ? name - : '${_subscriptionPrefix}$name'; - } - - Future _createTopic(String name) { - return _api.topics.create(new pubsub.Topic()..name = name); - } - - Future _deleteTopic(String name) { - return _api.topics.delete(name); - } - - Future _getTopic(String name) { - return _api.topics.get(name); - } - - Future _listTopics( - int pageSize, String nextPageToken) { - var query = 'cloud.googleapis.com/project in (/projects/$project)'; - return _api.topics.list( - query: query, maxResults: pageSize, pageToken: nextPageToken); - } - - Future _createSubscription( - String name, String topic, Uri endpoint) { - var subscription = new pubsub.Subscription() - ..name = name - ..topic = topic; - if (endpoint != null) { - var pushConfig = - new pubsub.PushConfig()..pushEndpoint = endpoint.toString(); - subscription.pushConfig = pushConfig; - } - return _api.subscriptions.create(subscription); - } - - Future _deleteSubscription(String name) { - return _api.subscriptions.delete(_fullSubscriptionName(name)); - } - - Future _getSubscription(String name) { - return _api.subscriptions.get(name); - } - - Future _listSubscriptions( - String topic, int pageSize, String nextPageToken) { - // See https://developers.google.com/pubsub/v1beta1/subscriptions/list for - // the specification of the query format. - var query = topic == null - ? 'cloud.googleapis.com/project in (/projects/$project)' - : 'pubsub.googleapis.com/topic in (/topics/$project/$topic)'; - return _api.subscriptions.list( - query: query, maxResults: pageSize, pageToken: nextPageToken); - } - - Future _modifyPushConfig(String subscription, Uri endpoint) { - var pushConfig = new pubsub.PushConfig() - ..pushEndpoint = endpoint != null ? endpoint.toString() : null; - var request = new pubsub.ModifyPushConfigRequest() - ..subscription = subscription - ..pushConfig = pushConfig; - return _api.subscriptions.modifyPushConfig(request); - } - - Future _publish( - String topic, List message, Map labels) { - var l = null; - if (labels != null) { - l = []; - labels.forEach((key, value) { - if (value is String) { - l.add(new pubsub.Label()..key = key..strValue = value); - } else { - l.add(new pubsub.Label()..key = key..numValue = value.toString()); - } - }); - } - var request = new pubsub.PublishRequest() - ..topic = topic - ..message = (new pubsub.PubsubMessage() - ..dataAsBytes = message - ..label = l); - return _api.topics.publish(request); - } - - Future _pull( - String subscription, bool returnImmediately) { - var request = new pubsub.PullRequest() - ..subscription = subscription - ..returnImmediately = returnImmediately; - return _api.subscriptions.pull(request); - } - - Future _ack(String ackId, String subscription) { - var request = new pubsub.AcknowledgeRequest() - ..ackId = [ ackId ] - ..subscription = subscription; - return _api.subscriptions.acknowledge(request); - } - - void _checkTopicName(name) { - if (name.startsWith('/') && !name.startsWith(_topicPrefix)) { - throw new ArgumentError( - "Illegal topic name. Absolute topic names for project '$project' " - "must start with $_topicPrefix"); - } - if (name.length == _topicPrefix.length) { - throw new ArgumentError( - 'Illegal topic name. Relative part of the name cannot be empty'); - } - } - - void _checkSubscriptionName(name) { - if (name.startsWith('/') && !name.startsWith(_subscriptionPrefix)) { - throw new ArgumentError( - "Illegal subscription name. Absolute subscription names for project " - "'$project' must start with $_subscriptionPrefix"); - } - if (name.length == _subscriptionPrefix.length) { - throw new ArgumentError( - 'Illegal subscription name. ' - 'Relative part of the name cannot be empty'); - } - } - - Future createTopic(String name) { - _checkTopicName(name); - return _createTopic(_fullTopicName(name)) - .then((top) => new _TopicImpl(this, top)); - } - - Future deleteTopic(String name) { - _checkTopicName(name); - return _deleteTopic(_fullTopicName(name)); - } - - Future lookupTopic(String name) { - _checkTopicName(name); - return _getTopic(_fullTopicName(name)) - .then((top) => new _TopicImpl(this, top)); - } - - Stream listTopics() { - Future> firstPage(pageSize) { - return _listTopics(pageSize, null) - .then((response) => new _TopicPageImpl(this, pageSize, response)); - } - return new StreamFromPages(firstPage).stream; - } - - Future> pageTopics({int pageSize: 50}) { - return _listTopics(pageSize, null).then((response) { - return new _TopicPageImpl(this, pageSize, response); - }); - } - - Future createSubscription( - String name, String topic, {Uri endpoint}) { - _checkSubscriptionName(name); - _checkTopicName(topic); - return _createSubscription(_fullSubscriptionName(name), - _fullTopicName(topic), - endpoint) - .then((sub) => new _SubscriptionImpl(this, sub)); - } - - Future deleteSubscription(String name) { - _checkSubscriptionName(name); - return _deleteSubscription(_fullSubscriptionName(name)); - } - - Future lookupSubscription(String name) { - _checkSubscriptionName(name); - return _getSubscription(_fullSubscriptionName(name)) - .then((sub) => new _SubscriptionImpl(this, sub)); - } - - Stream listSubscriptions([String query]) { - Future> firstPage(pageSize) { - return _listSubscriptions(query, pageSize, null) - .then((response) => - new _SubscriptionPageImpl(this, query, pageSize, response)); - } - return new StreamFromPages(firstPage).stream; - } - - Future> pageSubscriptions( - {String topic, int pageSize: 50}) { - return _listSubscriptions(topic, pageSize, null).then((response) { - return new _SubscriptionPageImpl(this, topic, pageSize, response); - }); - } -} - -/// Message class for messages constructed through 'new Message()'. It stores -/// the user supplied body as either String or bytes. -class _MessageImpl implements Message { - // The message body, if it is a `String`. In that case, [bytesMessage] is - // null. - final String _stringMessage; - - // The message body, if it is a byte list. In that case, [stringMessage] is - // null. - final List _bytesMessage; - - final Map labels; - - _MessageImpl.withString(this._stringMessage, {this.labels}) - : _bytesMessage = null; - - _MessageImpl.withBytes(this._bytesMessage, {this.labels}) - : _stringMessage = null; - - List get asBytes => - _bytesMessage != null ? _bytesMessage : UTF8.encode(_stringMessage); - - String get asString => - _stringMessage != null ? _stringMessage : UTF8.decode(_bytesMessage); -} - -/// Message received using [Subscription.pull]. -/// -/// Contains the [pubsub.PubsubMessage] received from Pub/Sub, and -/// makes the message body and labels available on request. -/// -/// The labels map is lazily created when first accessed. -class _PullMessage implements Message { - final pubsub.PubsubMessage _message; - List _bytes; - String _string; - Map _labels; - - _PullMessage(this._message); - - List get asBytes { - if (_bytes == null) _bytes = _message.dataAsBytes; - return _bytes; - } - - String get asString { - if (_string == null) _string = UTF8.decode(_message.dataAsBytes); - return _string; - } - - Map get labels { - if (_labels == null) { - _labels = {}; - _message.label.forEach((label) { - _labels[label.key] = - label.numValue != null ? label.numValue : label.strValue; - }); - } - return _labels; - } -} - -/// Message received through Pub/Sub push delivery. -/// -/// Stores the message body received from Pub/Sub as the Base64 encoded string -/// from the wire protocol. -/// -/// The labels have been decoded into a Map. -class _PushMessage implements Message { - final String _base64Message; - final Map labels; - - _PushMessage(this._base64Message, this.labels); - - List get asBytes => CryptoUtils.base64StringToBytes(_base64Message); - - String get asString => UTF8.decode(asBytes); -} - -/// Pull event received from Pub/Sub pull delivery. -/// -/// Stores the pull response received from Pub/Sub. -class _PullEventImpl implements PullEvent { - /// Pub/Sub API object. - final _PubSubImpl _api; - /// Low level response received from Pub/Sub. - final pubsub.PullResponse _response; - final Message message; - - _PullEventImpl(this._api, response) - : this._response = response, - message = new _PullMessage(response.pubsubEvent.message); - - bool get isTruncated => _response.pubsubEvent.truncated; - - Future acknowledge() { - return _api._ack(_response.ackId, _response.pubsubEvent.subscription); - } - -} - -/// Push event received from Pub/Sub push delivery. -/// -/// decoded from JSON encoded push HTTP request body. -class _PushEventImpl implements PushEvent { - static const PREFIX = '/subscriptions/'; - final Message _message; - final String _subscriptionName; - - Message get message => _message; - - String get subscriptionName => _subscriptionName; - - _PushEventImpl(this._message, this._subscriptionName); - - factory _PushEventImpl.fromJson(String json) { - Map body = JSON.decode(json); - String data = body['message']['data']; - Map labels = new HashMap(); - body['message']['labels'].forEach((label) { - var key = label['key']; - var value = label['strValue']; - if (value == null) value = label['numValue']; - labels[key] = value; - }); - String subscription = body['subscription']; - // TODO(#1): Remove this when the push event subscription name is prefixed - // with '/subscriptions/'. - if (!subscription.startsWith(PREFIX)) { - subscription = PREFIX + subscription; - } - return new _PushEventImpl(new _PushMessage(data, labels), subscription); - } -} - -class _TopicImpl implements Topic { - final _PubSubImpl _api; - final pubsub.Topic _topic; - - _TopicImpl(this._api, this._topic); - - String get name { - assert(_topic.name.startsWith(_api._topicPrefix)); - return _topic.name.substring(_api._topicPrefix.length); - } - - String get project { - assert(_topic.name.startsWith(_api._topicPrefix)); - return _api.project; - } - - String get absoluteName => _topic.name; - - Future publish(Message message) { - return _api._publish(_topic.name, message.asBytes, message.labels); - } - - Future delete() => _api._deleteTopic(_topic.name); - - Future publishString(String message, {Map labels}) { - return _api._publish(_topic.name, UTF8.encode(message), labels); - } - - Future publishBytes(List message, {Map labels}) { - return _api._publish(_topic.name, message, labels); - } -} - -class _SubscriptionImpl implements Subscription { - final _PubSubImpl _api; - final pubsub.Subscription _subscription; - - _SubscriptionImpl(this._api, this._subscription); - - String get name { - assert(_subscription.name.startsWith(_api._subscriptionPrefix)); - return _subscription.name.substring(_api._subscriptionPrefix.length); - } - - String get project { - assert(_subscription.name.startsWith(_api._subscriptionPrefix)); - return _api.project; - } - - String get absoluteName => _subscription.name; - - Topic get topic { - var topic = new pubsub.Topic()..name = _subscription.topic; - return new _TopicImpl(_api, topic); - } - - Future delete() => _api._deleteSubscription(_subscription.name); - - Future pull({bool noWait: true}) { - return _api._pull(_subscription.name, noWait) - .then((response) { - return new _PullEventImpl(_api, response); - }).catchError((e) => null, - test: (e) => e is pubsub.DetailedApiRequestError && - e.status == 400); - } - - Uri get endpoint => null; - - bool get isPull => endpoint == null; - - bool get isPush => endpoint != null; - - Future updatePushConfiguration(Uri endpoint) { - return _api._modifyPushConfig(_subscription.name, endpoint); - } -} - -class _TopicPageImpl implements Page { - final _PubSubImpl _api; - final int _pageSize; - final String _nextPageToken; - final List items; - - _TopicPageImpl(this._api, - this._pageSize, - pubsub.ListTopicsResponse response) - : items = new List(response.topic.length), - _nextPageToken = response.nextPageToken { - for (int i = 0; i < response.topic.length; i++) { - items[i] = new _TopicImpl(_api, response.topic[i]); - } - } - - bool get isLast => _nextPageToken == null; - - Future> next({int pageSize}) { - if (isLast) return new Future.value(null); - if (pageSize == null) pageSize = this._pageSize; - - return _api._listTopics(pageSize, _nextPageToken).then((response) { - return new _TopicPageImpl(_api, pageSize, response); - }); - } -} - -class _SubscriptionPageImpl implements Page { - final _PubSubImpl _api; - final String _topic; - final int _pageSize; - final String _nextPageToken; - final List items; - - _SubscriptionPageImpl(this._api, - this._topic, - this._pageSize, - pubsub.ListSubscriptionsResponse response) - : items = new List(response.subscription != null - ? response.subscription.length - : 0), - _nextPageToken = response.nextPageToken{ - if (response.subscription != null) { - for (int i = 0; i < response.subscription.length; i++) { - items[i] = new _SubscriptionImpl(_api, response.subscription[i]); - } - } - } - - bool get isLast => _nextPageToken == null; - - Future> next({int pageSize}) { - if (_nextPageToken == null) return new Future.value(null); - if (pageSize == null) pageSize = this._pageSize; - - return _api._listSubscriptions( - _topic, pageSize, _nextPageToken).then((response) { - return new _SubscriptionPageImpl(_api, _topic, pageSize, response); - }); - } -} \ No newline at end of file diff --git a/pkgs/gcloud/test/pubsub/pubsub_test.dart b/pkgs/gcloud/test/pubsub/pubsub_test.dart deleted file mode 100644 index 95c53d64..00000000 --- a/pkgs/gcloud/test/pubsub/pubsub_test.dart +++ /dev/null @@ -1,1044 +0,0 @@ -// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file -// for details. All rights reserved. Use of this source code is governed by a -// BSD-style license that can be found in the LICENSE file. - -import 'dart:async'; -import 'dart:convert'; - -import 'package:crypto/crypto.dart' as crypto; -import 'package:http/http.dart' as http; -import 'package:unittest/unittest.dart'; - -import 'package:gcloud/pubsub.dart'; - -import 'package:googleapis_beta/pubsub/v1beta1.dart' as pubsub; - -import '../common.dart'; - -const String ROOT_PATH = '/pubsub/v1beta1/'; - -http.Client mockClient() => new MockClient(ROOT_PATH); - -main() { - group('api', () { - var badTopicNames = [ - '/', '/topics', '/topics/$PROJECT', '/topics/$PROJECT/', - '/topics/${PROJECT}x', '/topics/${PROJECT}x/']; - - var badSubscriptionNames = [ - '/', '/subscriptions', '/subscriptions/$PROJECT', - '/subscriptions/$PROJECT/', '/subscriptions/${PROJECT}x', - '/subscriptions/${PROJECT}x/']; - - group('topic', () { - var name = 'test-topic'; - var absoluteName = '/topics/$PROJECT/test-topic'; - - test('create', () { - var mock = mockClient(); - mock.register('POST', 'topics', expectAsync((request) { - var requestTopic = - new pubsub.Topic.fromJson(JSON.decode(request.body)); - expect(requestTopic.name, absoluteName); - return mock.respond(new pubsub.Topic()..name = absoluteName); - }, count: 2)); - - var api = new PubSub(mock, PROJECT); - return api.createTopic(name).then(expectAsync((topic) { - expect(topic.name, name); - expect(topic.project, PROJECT); - expect(topic.absoluteName, absoluteName); - return api.createTopic(absoluteName).then(expectAsync((topic) { - expect(topic.name, name); - expect(topic.absoluteName, absoluteName); - })); - })); - }); - - test('create-error', () { - var mock = mockClient(); - var api = new PubSub(mock, PROJECT); - badTopicNames.forEach((name) { - expect(() => api.createTopic(name), throwsArgumentError); - }); - badSubscriptionNames.forEach((name) { - expect(() => api.createTopic(name), throwsArgumentError); - }); - }); - - test('delete', () { - var mock = mockClient(); - mock.register( - 'DELETE', new RegExp(r'topics/[a-z/-]*$'), expectAsync((request) { - expect(request.url.path, '${ROOT_PATH}topics/$absoluteName'); - expect(request.body.length, 0); - return mock.respondEmpty(); - }, count: 2)); - - var api = new PubSub(mock, PROJECT); - return api.deleteTopic(name).then(expectAsync((result) { - expect(result, isNull); - return api.deleteTopic(absoluteName).then(expectAsync((topic) { - expect(result, isNull); - })); - })); - }); - - test('delete-error', () { - var mock = mockClient(); - var api = new PubSub(mock, PROJECT); - badTopicNames.forEach((name) { - expect(() => api.deleteTopic(name), throwsArgumentError); - }); - badSubscriptionNames.forEach((name) { - expect(() => api.deleteTopic(name), throwsArgumentError); - }); - }); - - test('lookup', () { - var mock = mockClient(); - mock.register( - 'GET', new RegExp(r'topics/[a-z/-]*$'), expectAsync((request) { - expect(request.url.path, '${ROOT_PATH}topics/$absoluteName'); - expect(request.body.length, 0); - return mock.respond(new pubsub.Topic()..name = absoluteName); - }, count: 2)); - - var api = new PubSub(mock, PROJECT); - return api.lookupTopic(name).then(expectAsync((topic) { - expect(topic.name, name); - expect(topic.project, PROJECT); - expect(topic.absoluteName, absoluteName); - return api.lookupTopic(absoluteName).then(expectAsync((topic) { - expect(topic.name, name); - expect(topic.absoluteName, absoluteName); - })); - })); - }); - - test('lookup-error', () { - var mock = mockClient(); - var api = new PubSub(mock, PROJECT); - badTopicNames.forEach((name) { - expect(() => api.lookupTopic(name), throwsArgumentError); - }); - badSubscriptionNames.forEach((name) { - expect(() => api.lookupTopic(name), throwsArgumentError); - }); - }); - - group('query', () { - var query = 'cloud.googleapis.com/project in (/projects/$PROJECT)'; - var defaultPageSize = 50; - - addTopics(pubsub.ListTopicsResponse response, int first, int count) { - response.topic = []; - for (int i = 0; i < count; i++) { - response.topic.add(new pubsub.Topic()..name = 'topic-${first + i}'); - } - } - - // Mock that expect/generates [n] topics in pages of page size - // [pageSize]. - registerQueryMock(mock, n, pageSize, [totalCalls]) { - var totalPages = (n + pageSize - 1) ~/ pageSize; - // No items still generate one request. - if (totalPages == 0) totalPages = 1; - // Can pass in total calls if this mock is overwritten before all - // expected pages are done, e.g. when testing errors. - if (totalCalls == null) { - totalCalls = totalPages; - } - var pageCount = 0; - mock.register('GET', 'topics', expectAsync((request) { - pageCount++; - expect(request.url.queryParameters['query'], query); - expect(request.url.queryParameters['maxResults'], '$pageSize'); - expect(request.body.length, 0); - if (pageCount > 1) { - expect(request.url.queryParameters['pageToken'], 'next-page'); - } - - var response = new pubsub.ListTopicsResponse(); - var first = (pageCount - 1) * pageSize + 1; - if (pageCount < totalPages) { - response.nextPageToken = 'next-page'; - addTopics(response, first, pageSize); - } else { - addTopics(response, first, n - (totalPages - 1) * pageSize); - } - return mock.respond(response); - }, count: totalCalls)); - } - - group('list', () { - Future q(count) { - var mock = mockClient(); - registerQueryMock(mock, count, 50); - - var api = new PubSub(mock, PROJECT); - return api.listTopics().listen( - expectAsync((_) => null, count: count)).asFuture(); - } - - test('simple', () { - return q(0) - .then((_) => q(1)) - .then((_) => q(1)) - .then((_) => q(49)) - .then((_) => q(50)) - .then((_) => q(51)) - .then((_) => q(99)) - .then((_) => q(100)) - .then((_) => q(101)) - .then((_) => q(170)); - }); - - test('immediate-pause-resume', () { - var mock = mockClient(); - registerQueryMock(mock, 70, 50); - - var api = new PubSub(mock, PROJECT); - api.listTopics().listen( - expectAsync(((_) => null), count: 70), - onDone: expectAsync(() => null)) - ..pause() - ..resume() - ..pause() - ..resume(); - }); - - test('pause-resume', () { - var mock = mockClient(); - registerQueryMock(mock, 70, 50); - - var api = new PubSub(mock, PROJECT); - var count = 0; - var subscription; - subscription = api.listTopics().listen( - expectAsync(((_) { - subscription..pause()..resume()..pause(); - if ((count % 2) == 0) { - subscription.resume(); - } else { - scheduleMicrotask(() => subscription.resume()); - } - return null; - }), count: 70), - onDone: expectAsync(() => null)) - ..pause(); - scheduleMicrotask(() => subscription.resume()); - }); - - test('immediate-cancel', () { - var mock = mockClient(); - registerQueryMock(mock, 70, 50, 1); - - var api = new PubSub(mock, PROJECT); - api.listTopics().listen( - (_) => throw 'Unexpected', - onDone: () => throw 'Unexpected') - ..cancel(); - }); - - test('cancel', () { - var mock = mockClient(); - registerQueryMock(mock, 170, 50, 1); - - var api = new PubSub(mock, PROJECT); - var subscription; - subscription = api.listTopics().listen( - expectAsync((_) => subscription.cancel()), - onDone: () => throw 'Unexpected'); - }); - - test('error', () { - runTest(bool withPause) { - // Test error on first GET request. - var mock = mockClient(); - mock.register('GET', 'topics', expectAsync((request) { - return mock.respondError(500); - })); - var api = new PubSub(mock, PROJECT); - var subscription; - subscription = api.listTopics().listen( - (_) => throw 'Unexpected', - onDone: expectAsync(() => null), - onError: expectAsync( - (e) => e is pubsub.DetailedApiRequestError)); - if (withPause) { - subscription.pause(); - scheduleMicrotask(() => subscription.resume()); - } - } - - runTest(false); - runTest(true); - }); - - test('error-2', () { - // Test error on second GET request. - void runTest(bool withPause) { - var mock = mockClient(); - registerQueryMock(mock, 51, 50, 1); - - var api = new PubSub(mock, PROJECT); - - int count = 0; - var subscription; - subscription = api.listTopics().listen( - expectAsync(((_) { - count++; - if (count == 50) { - if (withPause) { - subscription.pause(); - scheduleMicrotask(() => subscription.resume()); - } - mock.clear(); - mock.register('GET', 'topics', expectAsync((request) { - return mock.respondError(500); - })); - } - return null; - }), count: 50), - onDone: expectAsync(() => null), - onError: expectAsync( - (e) => e is pubsub.DetailedApiRequestError)); - } - - runTest(false); - runTest(true); - }); - }); - - group('page', () { - test('empty', () { - var mock = mockClient(); - registerQueryMock(mock, 0, 50); - - var api = new PubSub(mock, PROJECT); - return api.pageTopics().then(expectAsync((page) { - expect(page.items.length, 0); - expect(page.isLast, isTrue); - expect(page.next(), completion(isNull)); - - mock.clear(); - registerQueryMock(mock, 0, 20); - return api.pageTopics(pageSize: 20).then(expectAsync((page) { - expect(page.items.length, 0); - expect(page.isLast, isTrue); - expect(page.next(), completion(isNull)); - })); - })); - }); - - test('single', () { - var mock = mockClient(); - registerQueryMock(mock, 10, 50); - - var api = new PubSub(mock, PROJECT); - return api.pageTopics().then(expectAsync((page) { - expect(page.items.length, 10); - expect(page.isLast, isTrue); - expect(page.next(), completion(isNull)); - - mock.clear(); - registerQueryMock(mock, 20, 20); - return api.pageTopics(pageSize: 20).then(expectAsync((page) { - expect(page.items.length, 20); - expect(page.isLast, isTrue); - expect(page.next(), completion(isNull)); - })); - })); - }); - - test('multiple', () { - runTest(n, pageSize) { - var totalPages = (n + pageSize - 1) ~/ pageSize; - var pageCount = 0; - - var completer = new Completer(); - var mock = mockClient(); - registerQueryMock(mock, n, pageSize); - - handlePage(page) { - pageCount++; - expect(page.isLast, pageCount == totalPages); - expect(page.items.length, - page.isLast ? n - (totalPages - 1) * pageSize - : pageSize ); - page.next().then(expectAsync((page) { - if (page != null) { - handlePage(page); - } else { - expect(pageCount, totalPages); - completer.complete(); - } - })); - } - - var api = new PubSub(mock, PROJECT); - api.pageTopics(pageSize: pageSize).then(expectAsync(handlePage)); - - return completer.future; - } - - return runTest(70, 50) - .then((_) => runTest(99, 1)) - .then((_) => runTest(99, 50)) - .then((_) => runTest(99, 98)) - .then((_) => runTest(99, 99)) - .then((_) => runTest(99, 100)) - .then((_) => runTest(100, 1)) - .then((_) => runTest(100, 50)) - .then((_) => runTest(100, 100)) - .then((_) => runTest(101, 50)); - }); - }); - }); - }); - - group('subscription', () { - var name = 'test-subscription'; - var absoluteName = '/subscriptions/$PROJECT/test-subscription'; - var topicName = 'test-topic'; - var absoluteTopicName = '/topics/$PROJECT/test-topic'; - - test('create', () { - var mock = mockClient(); - mock.register('POST', 'subscriptions', expectAsync((request) { - var requestSubscription = - new pubsub.Subscription.fromJson(JSON.decode(request.body)); - expect(requestSubscription.name, absoluteName); - return mock.respond(new pubsub.Subscription()..name = absoluteName); - }, count: 2)); - - var api = new PubSub(mock, PROJECT); - return api.createSubscription(name, topicName) - .then(expectAsync((subscription) { - expect(subscription.name, name); - expect(subscription.absoluteName, absoluteName); - return api.createSubscription(absoluteName, absoluteTopicName) - .then(expectAsync((subscription) { - expect(subscription.name, name); - expect(subscription.project, PROJECT); - expect(subscription.absoluteName, absoluteName); - })); - })); - }); - - test('create-error', () { - var mock = mockClient(); - var api = new PubSub(mock, PROJECT); - badSubscriptionNames.forEach((name) { - expect(() => api.createSubscription(name, 'test-topic'), - throwsArgumentError); - }); - badTopicNames.forEach((name) { - expect(() => api.createSubscription('test-subscription', name), - throwsArgumentError); - }); - }); - - test('delete', () { - var mock = mockClient(); - mock.register( - 'DELETE', - new RegExp(r'subscriptions/[a-z/-]*$'), expectAsync((request) { - expect(request.url.path, '${ROOT_PATH}subscriptions/$absoluteName'); - expect(request.body.length, 0); - return mock.respondEmpty(); - }, count: 2)); - - var api = new PubSub(mock, PROJECT); - return api.deleteSubscription(name).then(expectAsync((result) { - expect(result, isNull); - return api.deleteSubscription(absoluteName).then(expectAsync((topic) { - expect(result, isNull); - })); - })); - }); - - test('delete-error', () { - var mock = mockClient(); - var api = new PubSub(mock, PROJECT); - badSubscriptionNames.forEach((name) { - expect(() => api.deleteSubscription(name), throwsArgumentError); - }); - badTopicNames.forEach((name) { - expect(() => api.deleteSubscription(name), throwsArgumentError); - }); - }); - - test('lookup', () { - var mock = mockClient(); - mock.register( - 'GET', - new RegExp(r'subscriptions/[a-z/-]*$'), expectAsync((request) { - expect(request.url.path, '${ROOT_PATH}subscriptions/$absoluteName'); - expect(request.body.length, 0); - return mock.respond(new pubsub.Subscription()..name = absoluteName); - }, count: 2)); - - var api = new PubSub(mock, PROJECT); - return api.lookupSubscription(name).then(expectAsync((subscription) { - expect(subscription.name, name); - expect(subscription.absoluteName, absoluteName); - return api.lookupSubscription(absoluteName) - .then(expectAsync((subscription) { - expect(subscription.name, name); - expect(subscription.project, PROJECT); - expect(subscription.absoluteName, absoluteName); - })); - })); - }); - - test('lookup-error', () { - var mock = mockClient(); - var api = new PubSub(mock, PROJECT); - badSubscriptionNames.forEach((name) { - expect(() => api.lookupSubscription(name), throwsArgumentError); - }); - badTopicNames.forEach((name) { - expect(() => api.lookupSubscription(name), throwsArgumentError); - }); - }); - - group('query', () { - var query = 'cloud.googleapis.com/project in (/projects/$PROJECT)'; - var topicQuery = - 'pubsub.googleapis.com/topic in (/topics/$PROJECT/topic)'; - var defaultPageSize = 50; - - addSubscriptions( - pubsub.ListSubscriptionsResponse response, int first, int count) { - response.subscription = []; - for (int i = 0; i < count; i++) { - response.subscription.add( - new pubsub.Subscription()..name = 'subscription-${first + i}'); - } - } - - - // Mock that expect/generates [n] subscriptions in pages of page size - // [pageSize]. - registerQueryMock(mock, n, pageSize, {String topic, int totalCalls}) { - var totalPages = (n + pageSize - 1) ~/ pageSize; - // No items still generate one request. - if (totalPages == 0) totalPages = 1; - // Can pass in total calls if this mock is overwritten before all - // expected pages are done, e.g. when testing errors. - if (totalCalls == null) { - totalCalls = totalPages; - } - var pageCount = 0; - mock.register('GET', 'subscriptions', expectAsync((request) { - pageCount++; - expect(request.url.queryParameters['query'], - topic == null ? query : topicQuery); - expect(request.url.queryParameters['maxResults'], '$pageSize'); - expect(request.body.length, 0); - if (pageCount > 1) { - expect(request.url.queryParameters['pageToken'], 'next-page'); - } - - var response = new pubsub.ListSubscriptionsResponse(); - var first = (pageCount - 1) * pageSize + 1; - if (pageCount < totalPages) { - response.nextPageToken = 'next-page'; - addSubscriptions(response, first, pageSize); - } else { - addSubscriptions( - response, first, n - (totalPages - 1) * pageSize); - } - return mock.respond(response); - }, count: totalCalls)); - } - - group('list', () { - Future q(topic, count) { - var mock = mockClient(); - registerQueryMock(mock, count, 50, topic: topic); - - var api = new PubSub(mock, PROJECT); - return api.listSubscriptions(topic).listen( - expectAsync((_) => null, count: count)).asFuture(); - } - - test('simple', () { - return q(null, 0) - .then((_) => q('topic', 0)) - .then((_) => q(null, 1)) - .then((_) => q('topic', 1)) - .then((_) => q(null, 10)) - .then((_) => q('topic', 10)) - .then((_) => q(null, 49)) - .then((_) => q('topic', 49)) - .then((_) => q(null, 50)) - .then((_) => q('topic', 50)) - .then((_) => q(null, 51)) - .then((_) => q('topic', 51)) - .then((_) => q(null, 99)) - .then((_) => q('topic', 99)) - .then((_) => q(null, 100)) - .then((_) => q('topic', 100)) - .then((_) => q(null, 101)) - .then((_) => q('topic', 101)) - .then((_) => q(null, 170)) - .then((_) => q('topic', 170)); - }); - - test('immediate-pause-resume', () { - var mock = mockClient(); - registerQueryMock(mock, 70, 50); - - var api = new PubSub(mock, PROJECT); - api.listSubscriptions().listen( - expectAsync(((_) => null), count: 70), - onDone: expectAsync(() => null)) - ..pause() - ..resume() - ..pause() - ..resume(); - }); - - test('pause-resume', () { - var mock = mockClient(); - registerQueryMock(mock, 70, 50); - - var api = new PubSub(mock, PROJECT); - var count = 0; - var subscription; - subscription = api.listSubscriptions().listen( - expectAsync(((_) { - subscription..pause()..resume()..pause(); - if ((count % 2) == 0) { - subscription.resume(); - } else { - scheduleMicrotask(() => subscription.resume()); - } - return null; - }), count: 70), - onDone: expectAsync(() => null)) - ..pause(); - scheduleMicrotask(() => subscription.resume()); - }); - - test('immediate-cancel', () { - var mock = mockClient(); - registerQueryMock(mock, 70, 50, totalCalls: 1); - - var api = new PubSub(mock, PROJECT); - api.listSubscriptions().listen( - (_) => throw 'Unexpected', - onDone: () => throw 'Unexpected') - ..cancel(); - }); - - test('cancel', () { - var mock = mockClient(); - registerQueryMock(mock, 170, 50, totalCalls: 1); - - var api = new PubSub(mock, PROJECT); - var subscription; - subscription = api.listSubscriptions().listen( - expectAsync((_) => subscription.cancel()), - onDone: () => throw 'Unexpected'); - }); - - test('error', () { - runTest(bool withPause) { - // Test error on first GET request. - var mock = mockClient(); - mock.register('GET', 'subscriptions', expectAsync((request) { - return mock.respondError(500); - })); - var api = new PubSub(mock, PROJECT); - var subscription; - subscription = api.listSubscriptions().listen( - (_) => throw 'Unexpected', - onDone: expectAsync(() => null), - onError: expectAsync( - (e) => e is pubsub.DetailedApiRequestError)); - if (withPause) { - subscription.pause(); - scheduleMicrotask(() => subscription.resume()); - } - } - - runTest(false); - runTest(true); - }); - - test('error-2', () { - runTest(bool withPause) { - // Test error on second GET request. - var mock = mockClient(); - registerQueryMock(mock, 51, 50, totalCalls: 1); - - var api = new PubSub(mock, PROJECT); - - int count = 0; - var subscription; - subscription = api.listSubscriptions().listen( - expectAsync(((_) { - count++; - if (count == 50) { - if (withPause) { - subscription.pause(); - scheduleMicrotask(() => subscription.resume()); - } - mock.clear(); - mock.register( - 'GET', 'subscriptions', expectAsync((request) { - return mock.respondError(500); - })); - } - return null; - }), count: 50), - onDone: expectAsync(() => null), - onError: expectAsync( - (e) => e is pubsub.DetailedApiRequestError)); - } - - runTest(false); - runTest(true); - }); - }); - - group('page', () { - emptyTest(String topic) { - var mock = mockClient(); - registerQueryMock(mock, 0, 50, topic: topic); - - var api = new PubSub(mock, PROJECT); - return api.pageSubscriptions(topic: topic).then(expectAsync((page) { - expect(page.items.length, 0); - expect(page.isLast, isTrue); - expect(page.next(), completion(isNull)); - - mock.clear(); - registerQueryMock(mock, 0, 20, topic: topic); - return api.pageSubscriptions(topic: topic, pageSize: 20) - .then(expectAsync((page) { - expect(page.items.length, 0); - expect(page.isLast, isTrue); - expect(page.next(), completion(isNull)); - })); - })); - } - - test('empty', () { - emptyTest(null); - emptyTest('topic'); - }); - - singleTest(String topic) { - var mock = mockClient(); - registerQueryMock(mock, 10, 50, topic: topic); - - var api = new PubSub(mock, PROJECT); - return api.pageSubscriptions(topic: topic).then(expectAsync((page) { - expect(page.items.length, 10); - expect(page.isLast, isTrue); - expect(page.next(), completion(isNull)); - - mock.clear(); - registerQueryMock(mock, 20, 20, topic: topic); - return api.pageSubscriptions(topic: topic, pageSize: 20) - .then(expectAsync((page) { - expect(page.items.length, 20); - expect(page.isLast, isTrue); - expect(page.next(), completion(isNull)); - })); - })); - } - - test('single', () { - singleTest(null); - singleTest('topic'); - }); - - multipleTest(n, pageSize, topic) { - var totalPages = (n + pageSize - 1) ~/ pageSize; - var pageCount = 0; - - var completer = new Completer(); - var mock = mockClient(); - registerQueryMock(mock, n, pageSize, topic: topic); - - handlingPage(page) { - pageCount++; - expect(page.isLast, pageCount == totalPages); - expect(page.items.length, - page.isLast ? n - (totalPages - 1) * pageSize - : pageSize ); - page.next().then((page) { - if (page != null) { - handlingPage(page); - } else { - expect(pageCount, totalPages); - completer.complete(); - } - }); - } - - var api = new PubSub(mock, PROJECT); - api.pageSubscriptions(topic: topic, pageSize: pageSize) - .then(handlingPage); - - return completer.future; - } - - test('multiple', () { - return multipleTest(70, 50, null) - .then((_) => multipleTest(99, 1, null)) - .then((_) => multipleTest(99, 50, null)) - .then((_) => multipleTest(99, 98, null)) - .then((_) => multipleTest(99, 99, null)) - .then((_) => multipleTest(99, 100, null)) - .then((_) => multipleTest(100, 1, null)) - .then((_) => multipleTest(100, 50, null)) - .then((_) => multipleTest(100, 100, null)) - .then((_) => multipleTest(101, 50, null)) - .then((_) => multipleTest(70, 50, 'topic')) - .then((_) => multipleTest(99, 1, 'topic')) - .then((_) => multipleTest(99, 50, 'topic')) - .then((_) => multipleTest(99, 98, 'topic')) - .then((_) => multipleTest(99, 99, 'topic')) - .then((_) => multipleTest(99, 100, 'topic')) - .then((_) => multipleTest(100, 1, 'topic')) - .then((_) => multipleTest(100, 50, 'topic')) - .then((_) => multipleTest(100, 100, 'topic')) - .then((_) => multipleTest(101, 50, 'topic')); - }); - }); - }); - }); - }); - - group('topic', () { - var name = 'test-topic'; - var absoluteName = '/topics/$PROJECT/test-topic'; - var message = 'Hello, world!'; - var messageBytes = UTF8.encode(message); - var messageBase64 = crypto.CryptoUtils.bytesToBase64(messageBytes); - var labels = {'a': 1, 'b': 'text'}; - - registerLookup(mock) { - mock.register( - 'GET', new RegExp(r'topics/[a-z/-]*$'), expectAsync((request) { - expect(request.url.path, '${ROOT_PATH}topics/$absoluteName'); - expect(request.body.length, 0); - return mock.respond(new pubsub.Topic()..name = absoluteName); - })); - } - - registerPublish(mock, count, fn) { - mock.register('POST', 'topics/publish', expectAsync((request) { - var publishRequest = - new pubsub.PublishRequest.fromJson(JSON.decode(request.body)); - return fn(publishRequest); - }, count: count)); - } - - test('publish', () { - var mock = mockClient(); - registerLookup(mock); - - var api = new PubSub(mock, PROJECT); - return api.lookupTopic(name).then(expectAsync((topic) { - mock.clear(); - registerPublish(mock, 4, ((request) { - expect(request.topic, absoluteName); - expect(request.message.data, messageBase64); - expect(request.message.label, isNull); - return mock.respondEmpty(); - })); - - return topic.publishString(message).then(expectAsync((result) { - expect(result, isNull); - return topic.publishBytes(messageBytes).then(expectAsync((result) { - expect(result, isNull); - return topic.publish( - new Message.withString(message)).then(expectAsync((result) { - expect(result, isNull); - return topic.publish( - new Message.withBytes( - messageBytes)).then(expectAsync((result) { - expect(result, isNull); - })); - })); - })); - })); - })); - }); - - test('publish-with-labels', () { - var mock = mockClient(); - registerLookup(mock); - - var api = new PubSub(mock, PROJECT); - return api.lookupTopic(name).then(expectAsync((topic) { - mock.clear(); - registerPublish(mock, 4, ((request) { - expect(request.topic, absoluteName); - expect(request.message.data, messageBase64); - expect(request.message.label, isNotNull); - expect(request.message.label.length, labels.length); - request.message.label.forEach((label) { - expect(labels.containsKey(label.key), isTrue); - if (label.numValue != null) { - expect(label.strValue, isNull); - expect(labels[label.key], int.parse(label.numValue)); - } else { - expect(label.strValue, isNotNull); - expect(labels[label.key], label.strValue); - } - }); - return mock.respondEmpty(); - })); - - return topic.publishString(message, labels: labels) - .then(expectAsync((result) { - expect(result, isNull); - return topic.publishBytes(messageBytes, labels: labels) - .then(expectAsync((result) { - expect(result, isNull); - return topic.publish( - new Message.withString(message, labels: labels)) - .then(expectAsync((result) { - expect(result, isNull); - return topic.publish( - new Message.withBytes(messageBytes, labels: labels)) - .then(expectAsync((result) { - expect(result, isNull); - })); - })); - })); - })); - })); - }); - - test('delete', () { - var mock = mockClient(); - mock.register( - 'GET', new RegExp(r'topics/[a-z/-]*$'), expectAsync((request) { - expect(request.url.path, '${ROOT_PATH}topics/$absoluteName'); - expect(request.body.length, 0); - return mock.respond(new pubsub.Topic()..name = absoluteName); - })); - - var api = new PubSub(mock, PROJECT); - return api.lookupTopic(name).then(expectAsync((topic) { - expect(topic.name, name); - expect(topic.absoluteName, absoluteName); - - mock.register( - 'DELETE', new RegExp(r'topics/[a-z/-]*$'), expectAsync((request) { - expect(request.url.path, '${ROOT_PATH}topics/$absoluteName'); - expect(request.body.length, 0); - return mock.respondEmpty(); - })); - - return topic.delete().then(expectAsync((result) { - expect(result, isNull); - })); - })); - }); - }); - - group('subscription', () { - var name = 'test-subscription'; - var absoluteName = '/subscriptions/$PROJECT/test-subscription'; - var topicName = 'test-topic'; - var absoluteTopicName = '/topics/$PROJECT/test-topic'; - - test('delete', () { - var mock = mockClient(); - mock.register( - 'GET', new RegExp(r'subscriptions/[a-z/-]*$'), expectAsync((request) { - expect(request.url.path, '${ROOT_PATH}subscriptions/$absoluteName'); - expect(request.body.length, 0); - return mock.respond(new pubsub.Topic()..name = absoluteName); - })); - - var api = new PubSub(mock, PROJECT); - return api.lookupSubscription(name).then(expectAsync((subscription) { - expect(subscription.name, name); - expect(subscription.absoluteName, absoluteName); - - mock.register( - 'DELETE', - new RegExp(r'subscriptions/[a-z/-]*$'), expectAsync((request) { - expect(request.url.path, '${ROOT_PATH}subscriptions/$absoluteName'); - expect(request.body.length, 0); - return mock.respondEmpty(); - })); - - return subscription.delete().then(expectAsync((result) { - expect(result, isNull); - })); - })); - }); - }); - - group('push', () { - var relativeSubscriptionName = 'sgjesse-managed-vm/test-push-subscription'; - var absoluteSubscriptionName = '/subscriptions/$relativeSubscriptionName'; - - test('event', () { - var requestBody = -''' -{ - "message": { - "data":"SGVsbG8sIHdvcmxkIDMwIG9mIDUwIQ==", - "labels": [ - { - "key":"messageNo", - "numValue":30 - }, - { - "key":"test", - "strValue":"hello" - } - ] - }, - "subscription":"$absoluteSubscriptionName" -} -'''; - var event = new PushEvent.fromJson(requestBody); - expect(event.message.asString, "Hello, world 30 of 50!"); - expect(event.message.labels['messageNo'], 30); - expect(event.message.labels['test'], 'hello'); - expect(event.subscriptionName, absoluteSubscriptionName); - }); - - test('event-short-subscription-name', () { - var requestBody = - ''' -{ - "message": { - "data":"SGVsbG8sIHdvcmxkIDMwIG9mIDUwIQ==", - "labels": [ - { - "key":"messageNo", - "numValue":30 - }, - { - "key":"test", - "strValue":"hello" - } - ] - }, - "subscription":"$relativeSubscriptionName" -} -'''; - var event = new PushEvent.fromJson(requestBody); - expect(event.message.asString, "Hello, world 30 of 50!"); - expect(event.message.labels['messageNo'], 30); - expect(event.message.labels['test'], 'hello'); - expect(event.subscriptionName, absoluteSubscriptionName); - }); - }); -} From 2e40c1fa3c694d501904778619c9adb46275ade8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Fri, 31 Oct 2014 13:31:18 +0100 Subject: [PATCH 030/239] Add AclScope cmparison again Was lost while resolving a merge conflict. TBR=kustermann@google.com BUG= Review URL: https://codereview.chromium.org//686653004 --- pkgs/gcloud/lib/storage.dart | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index 4c763fae..3295782a 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -261,6 +261,12 @@ abstract class AclScope { const AclScope._(this._type, this._id); + int get hashCode => _jenkinsHash([_type, _id]); + + bool operator==(Object other) { + return other is AclScope && _type == other._type && _id == other._id; + } + String toString() => 'AclScope($_storageEntity)'; String get _storageEntity; From a0ee4d461f73bf510bae225495751e698170dd4f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Fri, 31 Oct 2014 14:16:12 +0100 Subject: [PATCH 031/239] Cache the calculated hash code R=lrn@google.com BUG= Review URL: https://codereview.chromium.org//692053002 --- pkgs/gcloud/lib/storage.dart | 42 +++++++++++++++------- pkgs/gcloud/test/storage/storage_test.dart | 18 +++++----- 2 files changed, 38 insertions(+), 22 deletions(-) diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index 3295782a..5aadeb63 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -85,6 +85,7 @@ int _jenkinsHash(List e) { /// The access controls are described by [AclEntry] objects. class Acl { final List _entries; + int _cachedHashCode; /// The entries in the ACL. List get entries => new UnmodifiableListView(_entries); @@ -156,7 +157,11 @@ class Acl { return _entries.map((entry) => entry._toObjectAccessControl()).toList(); } - int get hashCode => _jenkinsHash(_entries); + int get hashCode { + return _cachedHashCode != null + ? _cachedHashCode + : _cachedHashCode = _jenkinsHash(_entries); + } bool operator==(Object other) { if (other is Acl) { @@ -181,8 +186,9 @@ class Acl { class AclEntry { final AclScope scope; final AclPermission permission; + int _cachedHashCode; - const AclEntry(this.scope, this.permission); + AclEntry(this.scope, this.permission); storage.BucketAccessControl _toBucketAccessControl() { var acl = new storage.BucketAccessControl(); @@ -198,7 +204,11 @@ class AclEntry { return acl; } - int get hashCode => _jenkinsHash([scope, permission]); + int get hashCode { + return _cachedHashCode != null + ? _cachedHashCode + : _cachedHashCode = _jenkinsHash([scope, permission]); + } bool operator==(Object other) { return other is AclEntry && @@ -222,6 +232,8 @@ class AclEntry { /// /// See https://cloud.google.com/storage/docs/accesscontrol for more details. abstract class AclScope { + int _cachedHashCode; + /// ACL type for scope representing a Google Storage id. static const int _TYPE_STORAGE_ID = 0; @@ -254,14 +266,18 @@ abstract class AclScope { final int _type; /// ACL scope for all authenticated users. - static const allAuthenticated = const AllAuthenticatedScope(); + static AllAuthenticatedScope allAuthenticated = new AllAuthenticatedScope(); /// ACL scope for all users. - static const allUsers = const AllUsersScope(); + static AllUsersScope allUsers = new AllUsersScope(); - const AclScope._(this._type, this._id); + AclScope._(this._type, this._id); - int get hashCode => _jenkinsHash([_type, _id]); + int get hashCode { + return _cachedHashCode != null + ? _cachedHashCode + : _cachedHashCode = _jenkinsHash([_type, _id]); + } bool operator==(Object other) { return other is AclScope && _type == other._type && _id == other._id; @@ -277,7 +293,7 @@ abstract class AclScope { /// The [storageId] is a string of 64 hexadecimal digits that identifies a /// specific Google account holder or a specific Google group. class StorageIdScope extends AclScope { - const StorageIdScope(String storageId) + StorageIdScope(String storageId) : super._(AclScope._TYPE_STORAGE_ID, storageId); /// Google Storage ID. @@ -288,7 +304,7 @@ class StorageIdScope extends AclScope { /// An ACL scope for an entity identified by an individual email address. class AccountScope extends AclScope { - const AccountScope(String email): super._(AclScope._TYPE_ACCOUNT, email); + AccountScope(String email): super._(AclScope._TYPE_ACCOUNT, email); /// Email address. String get email => _id; @@ -298,7 +314,7 @@ class AccountScope extends AclScope { /// An ACL scope for an entity identified by an Google Groups email. class GroupScope extends AclScope { - const GroupScope(String group): super._(AclScope._TYPE_GROUP, group); + GroupScope(String group): super._(AclScope._TYPE_GROUP, group); /// Group name. String get group => _id; @@ -308,7 +324,7 @@ class GroupScope extends AclScope { /// An ACL scope for an entity identified by a domain name. class DomainScope extends AclScope { - const DomainScope(String domain): super._(AclScope._TYPE_DOMAIN, domain); + DomainScope(String domain): super._(AclScope._TYPE_DOMAIN, domain); /// Domain name. String get domain => _id; @@ -341,7 +357,7 @@ class OpaqueScope extends AclScope { /// ACL scope for a all authenticated users. class AllAuthenticatedScope extends AclScope { - const AllAuthenticatedScope() + AllAuthenticatedScope() : super._(AclScope._TYPE_ALL_AUTHENTICATED, null); String get _storageEntity => 'allAuthenticatedUsers'; @@ -349,7 +365,7 @@ class AllAuthenticatedScope extends AclScope { /// ACL scope for a all users. class AllUsersScope extends AclScope { - const AllUsersScope(): super._(AclScope._TYPE_ALL_USERS, null); + AllUsersScope(): super._(AclScope._TYPE_ALL_USERS, null); String get _storageEntity => 'allUsers'; } diff --git a/pkgs/gcloud/test/storage/storage_test.dart b/pkgs/gcloud/test/storage/storage_test.dart index a27fb1e3..184a6596 100644 --- a/pkgs/gcloud/test/storage/storage_test.dart +++ b/pkgs/gcloud/test/storage/storage_test.dart @@ -995,15 +995,15 @@ main() { }); group('acl', () { - const id = const StorageIdScope('1234567890'); - const user = const AccountScope('sgjesse@google.com'); - const group = const GroupScope('dart'); - const domain = const DomainScope('dartlang.org'); - - const userRead = const AclEntry(user, AclPermission.READ); - const groupWrite = const AclEntry(group, AclPermission.WRITE); - const domainFullControl = - const AclEntry(domain, AclPermission.FULL_CONTROL); + var id = new StorageIdScope('1234567890'); + var user = new AccountScope('sgjesse@google.com'); + var group = new GroupScope('dart'); + var domain = new DomainScope('dartlang.org'); + + var userRead = new AclEntry(user, AclPermission.READ); + var groupWrite = new AclEntry(group, AclPermission.WRITE); + var domainFullControl = + new AclEntry(domain, AclPermission.FULL_CONTROL); test('compare-scope', () { expect(id, new StorageIdScope('1234567890')); From e3b8bdcce52677bcb93d538d4269f2bcef1c22bb Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Fri, 31 Oct 2014 14:39:43 +0100 Subject: [PATCH 032/239] Workaround for possible bug in Apiary Datastore Server R=sgjesse@google.com Review URL: https://codereview.chromium.org//694713003 --- pkgs/gcloud/lib/src/datastore_impl.dart | 33 +++++++++++++++++++++++-- 1 file changed, 31 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/lib/src/datastore_impl.dart b/pkgs/gcloud/lib/src/datastore_impl.dart index 1638d341..36a91dee 100644 --- a/pkgs/gcloud/lib/src/datastore_impl.dart +++ b/pkgs/gcloud/lib/src/datastore_impl.dart @@ -20,6 +20,10 @@ class TransactionImpl implements datastore.Transaction { } class DatastoreImpl implements datastore.Datastore { + static const List Scopes = const [ + api.DatastoreApi.DatastoreScope + ]; + final api.DatastoreApi _api; final String _project; @@ -605,6 +609,10 @@ class QueryPageImpl implements Page { '(${request.query.limit}) was.'); } + + // FIXME: TODO: Big hack! + // It looks like Apiary/Atlas is currently broken. + /* if (limit != null && returnedEntities.length < batchLimit && response.batch.moreResults == 'MORE_RESULTS_AFTER_LIMIT') { @@ -612,6 +620,7 @@ class QueryPageImpl implements Page { 'Server returned response with less entities then the limit was, ' 'but signals there are more results after the limit.'); } + */ // In case a limit was specified, we need to subtraction the number of // entities we already got. @@ -621,8 +630,28 @@ class QueryPageImpl implements Page { remainingEntities = limit - returnedEntities.length; } - bool isLast = ((limit != null && remainingEntities == 0) || - response.batch.moreResults == 'NO_MORE_RESULTS'); + // If the server signals there are more entities and we either have no + // limit or our limit has not been reached, we set `moreBatches` to + // `true`. + bool moreBatches = + (remainingEntities == null || remainingEntities > 0) && + response.batch.moreResults == 'MORE_RESULTS_AFTER_LIMIT'; + + bool gotAll = limit != null && remainingEntities == 0; + bool noMore = response.batch.moreResults == 'NO_MORE_RESULTS'; + bool isLast = gotAll || noMore; + + // As a sanity check, we assert that `moreBatches XOR isLast`. + assert (isLast != moreBatches); + + // FIXME: TODO: Big hack! + // It looks like Apiary/Atlas is currently broken. + if (moreBatches && returnedEntities.length == 0) { + print('Warning: Api to Google Cloud Datastore returned bogus response. ' + 'Trying a workaround.'); + isLast = true; + moreBatches = false; + } if (!isLast && response.batch.endCursor == null) { throw new datastore.DatastoreError( From fa78dc5c7b3b54c0185649b32f9b18dc59bf535a Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Mon, 3 Nov 2014 13:12:31 +0100 Subject: [PATCH 033/239] Enable end2end tests for datastore/db R=sgjesse@google.com Review URL: https://codereview.chromium.org//698563002 --- pkgs/gcloud/.status | 14 ++- pkgs/gcloud/lib/src/datastore_impl.dart | 5 +- pkgs/gcloud/lib/storage.dart | 3 +- pkgs/gcloud/test/common.dart | 1 - pkgs/gcloud/test/common_e2e.dart | 109 ++++++++++++++++++ ...ore_test.dart => datastore_test_impl.dart} | 49 ++++++-- .../e2e/{db_test.dart => db_test_impl.dart} | 32 +++-- ...del_test.dart => metamodel_test_impl.dart} | 5 +- pkgs/gcloud/test/db_all_e2e_test.dart | 45 ++++++++ pkgs/gcloud/test/storage/e2e_test.dart | 99 ++-------------- pkgs/gcloud/test/storage/storage_test.dart | 1 + 11 files changed, 248 insertions(+), 115 deletions(-) create mode 100644 pkgs/gcloud/test/common_e2e.dart rename pkgs/gcloud/test/datastore/e2e/{datastore_test.dart => datastore_test_impl.dart} (96%) rename pkgs/gcloud/test/db/e2e/{db_test.dart => db_test_impl.dart} (95%) rename pkgs/gcloud/test/db/e2e/{metamodel_test.dart => metamodel_test_impl.dart} (95%) create mode 100644 pkgs/gcloud/test/db_all_e2e_test.dart diff --git a/pkgs/gcloud/.status b/pkgs/gcloud/.status index fd22c860..da4c517a 100644 --- a/pkgs/gcloud/.status +++ b/pkgs/gcloud/.status @@ -2,15 +2,19 @@ */*/packages/*: Skip */*/*/packages/*: Skip -build/test/datastore/e2e/*: Skip -build/test/db/e2e/*: Skip - -test/datastore/e2e/*: Skip -test/db/e2e/*: Skip +# This test is slow because +# - eventual consistency forces us to put in sleep()s +# - it does e2e testing +# - it combines several tests to avoid concurrent tests touching the same data +build/test/db_all_e2e_test: Slow, Pass +test/db_all_e2e_test: Slow, Pass [ $browser ] build/test/storage/e2e_test: Skip test/storage/e2e_test: Skip +build/test/db_all_e2e_test: Skip +test/db_all_e2e_test: Skip + [ $compiler == dart2js ] *: Skip diff --git a/pkgs/gcloud/lib/src/datastore_impl.dart b/pkgs/gcloud/lib/src/datastore_impl.dart index 36a91dee..aaca2a38 100644 --- a/pkgs/gcloud/lib/src/datastore_impl.dart +++ b/pkgs/gcloud/lib/src/datastore_impl.dart @@ -20,8 +20,9 @@ class TransactionImpl implements datastore.Transaction { } class DatastoreImpl implements datastore.Datastore { - static const List Scopes = const [ - api.DatastoreApi.DatastoreScope + static const List SCOPES = const [ + api.DatastoreApi.DatastoreScope, + api.DatastoreApi.UserinfoEmailScope, ]; final api.DatastoreApi _api; diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index 5aadeb63..9fd1a344 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -471,7 +471,8 @@ abstract class BucketInfo { /// Access to Cloud Storage abstract class Storage { /// List of required OAuth2 scopes for Cloud Storage operation. - static const Scopes = const [storage.StorageApi.DevstorageFullControlScope]; + static const List SCOPES = + const [storage.StorageApi.DevstorageFullControlScope]; /// Initializes access to cloud storage. factory Storage(http.Client client, String project) = _StorageImpl; diff --git a/pkgs/gcloud/test/common.dart b/pkgs/gcloud/test/common.dart index e71ee3e2..acac02aa 100644 --- a/pkgs/gcloud/test/common.dart +++ b/pkgs/gcloud/test/common.dart @@ -12,7 +12,6 @@ import 'package:http_parser/http_parser.dart' as http_parser; import 'package:mime/mime.dart' as mime; import 'package:unittest/unittest.dart'; -const PROJECT = 'test-project'; const CONTENT_TYPE_JSON_UTF8 = 'application/json; charset=utf-8'; const RESPONSE_HEADERS = const { diff --git a/pkgs/gcloud/test/common_e2e.dart b/pkgs/gcloud/test/common_e2e.dart new file mode 100644 index 00000000..976eee54 --- /dev/null +++ b/pkgs/gcloud/test/common_e2e.dart @@ -0,0 +1,109 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +library gcloud.test.common_e2e; + +import 'dart:async'; +import 'dart:io'; + +import 'package:unittest/unittest.dart'; +import 'package:googleapis_auth/auth_io.dart' as auth; +import 'package:http/http.dart' as http; + +import 'common.dart'; + +const PROJECT = 'test-project'; + +// Enviroment variables for specifying the cloud project to use and the +// location of the service account key for that project. +const String PROJECT_ENV = 'GCLOUD_E2E_TEST_PROJECT'; +const String SERVICE_KEY_LOCATION_ENV = 'GCLOUD_E2E_TEST_KEY'; + +// Default project and service key location used when running on the package +// bot. +const String DEFAULT_PROJECT = 'dart-gcloud-e2e'; +const String DEFAULT_KEY_LOCATION = + 'gs://dart-archive-internal/keys/dart-gcloud-e2e.json'; + +bool onBot() { + // When running on the package-bot the current user is chrome-bot. + var envName; + if (Platform.isWindows) { + envName = 'USERNAME'; + } else { + envName = 'USER'; + } + return Platform.environment[envName] == 'chrome-bot'; +} + +// Get the service key from the specified location. +Future serviceKeyJson(String serviceKeyLocation) { + if (!serviceKeyLocation.startsWith('gs://')) { + throw new Exception('Service key location must start with gs://'); + } + var future; + if (onBot()) { + future = Process.run( + 'python', ['third_party/gsutil/gsutil', 'cat', serviceKeyLocation], + runInShell: true); + } else { + var gsutil = Platform.isWindows ? 'gsutil.cmd' : 'gsutil'; + future = Process.run(gsutil, ['cat', serviceKeyLocation]); + } + return future.then((result) { + if (result.exitCode != 0) { + throw new Exception('Failed to run gsutil, ${result.stderr}'); + } + return result.stdout; + }); +} + +typedef Future AuthCallback(String project, http.Client client); + +Future withAuthClient(List scopes, + AuthCallback callback, + {bool trace: false}) { + String project = Platform.environment[PROJECT_ENV]; + String serviceKeyLocation = Platform.environment[SERVICE_KEY_LOCATION_ENV]; + + if (!onBot() && (project == null || serviceKeyLocation == null)) { + throw new StateError( + 'Envoronment variables $PROJECT_ENV and $SERVICE_KEY_LOCATION_ENV ' + 'required when not running on the package bot'); + } + + project = project != null ? project : DEFAULT_PROJECT; + serviceKeyLocation = + serviceKeyLocation != null ? serviceKeyLocation : DEFAULT_KEY_LOCATION; + + return serviceKeyJson(serviceKeyLocation).then((keyJson) { + var creds = new auth.ServiceAccountCredentials.fromJson(keyJson); + return auth.clientViaServiceAccount(creds, scopes).then((client) { + if (trace) client = new TraceClient(client); + return callback(project, client).whenComplete(() => client.close()); + }); + }); +} + +Future runE2EUnittest(Function callback) { + var config = new E2EConfiguration(); + + unittestConfiguration = config; + callback(); + + return config.done; +} + +class E2EConfiguration extends SimpleConfiguration { + final Completer _completer = new Completer(); + + Future get done => _completer.future; + + onDone(success) { + new Future.sync(() { + super.onDone(success); + }).then((_) => _completer.complete(_)) + .catchError((error, stack) => _completer.completeError(error, stack)); + } +} diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart similarity index 96% rename from pkgs/gcloud/test/datastore/e2e/datastore_test.dart rename to pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index 410582b4..ef550662 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -30,12 +30,23 @@ library datastore_test; import 'dart:async'; import 'package:gcloud/datastore.dart'; +import 'package:gcloud/src/datastore_impl.dart' as datastore_impl; import 'package:gcloud/common.dart'; import 'package:unittest/unittest.dart'; import '../error_matchers.dart'; import 'utils.dart'; +import '../../common_e2e.dart'; + +// Note: +// Non-ancestor queries (i.e. queries not lookups) result in index scans. +// The index tables are updated in a "eventually consistent" way. +// +// So this can make tests flaky, the index updates take longer than the +// following constant. +const INDEX_UPDATE_DELAY = const Duration(seconds: 10); + Future sleep(Duration duration) { var completer = new Completer(); new Timer(duration, completer.complete); @@ -717,14 +728,6 @@ runTests(Datastore datastore) { var indexedEntity = sorted.where(indexFilterMatches).toList(); expect(indexedEntity.length, equals(1)); - // Note: - // Non-ancestor queries (i.e. queries not lookups) result in index scans. - // The index tables are updated in a "eventually consistent" way. - // - // So this can make tests flaky, the index updates take longer than the - // following constant. - var INDEX_UPDATE_DELAY = const Duration(seconds: 10); - var filters = [ new Filter(FilterRelation.GreatherThan, QUERY_KEY, QUERY_LOWER_BOUND), new Filter(FilterRelation.LessThan, QUERY_KEY, QUERY_UPPER_BOUND), @@ -1011,3 +1014,33 @@ runTests(Datastore datastore) { }); }); } + +Future cleanupDB(Datastore db) { + // cleanup() will call itself again as long as the DB is not clean. + cleanup() { + var q = new Query(limit: 500); + return consumePages((_) => db.query(q)).then((List entities) { + entities = entities.where((entity) { + return !entity.key.elements[0].kind.contains('__'); + }).toList(); + + if (entities.length == 0) return null; + + print('[cleanupDB]: Removing left-over ${entities.length} entities'); + var deletes = entities.map((e) => e.key).toList(); + return db.commit(deletes: deletes).then((_) => cleanup()); + }); + } + return cleanup(); +} + +main() { + var scopes = datastore_impl.DatastoreImpl.SCOPES; + + withAuthClient(scopes, (String project, httpClient) { + var datastore = new datastore_impl.DatastoreImpl(httpClient, 's~$project'); + return cleanupDB(datastore).then((_) { + return runE2EUnittest(() => runTests(datastore)); + }); + }); +} diff --git a/pkgs/gcloud/test/db/e2e/db_test.dart b/pkgs/gcloud/test/db/e2e/db_test_impl.dart similarity index 95% rename from pkgs/gcloud/test/db/e2e/db_test.dart rename to pkgs/gcloud/test/db/e2e/db_test_impl.dart index 490027bd..32abf45a 100644 --- a/pkgs/gcloud/test/db/e2e/db_test.dart +++ b/pkgs/gcloud/test/db/e2e/db_test_impl.dart @@ -47,8 +47,19 @@ library db_test; import 'dart:async'; import 'package:unittest/unittest.dart'; - import 'package:gcloud/db.dart' as db; +import 'package:gcloud/src/datastore_impl.dart' as datastore_impl; + +import '../../datastore/e2e/datastore_test_impl.dart' as datastore_test; +import '../../common_e2e.dart'; + +// Note: +// Non-ancestor queries (i.e. queries not lookups) result in index scans. +// The index tables are updated in a "eventually consistent" way. +// +// So this can make tests flaky, if the index updates take longer than the +// following constant. +const INDEX_UPDATE_DELAY = const Duration(seconds: 10); @db.Kind() class Person extends db.Model { @@ -449,14 +460,6 @@ runTests(db.DatastoreDB store) { var barUsers = users.where( (User u) => u.languages.contains('bar')).toList(); - // Note: - // Non-ancestor queries (i.e. queries not lookups) result in index scans. - // The index tables are updated in a "eventually consistent" way. - // - // So this can make tests flaky, if the index updates take longer than the - // following constant. - var INDEX_UPDATE_DELAY = const Duration(seconds: 5); - var allInserts = [] ..addAll(users) ..addAll(expandoPersons); @@ -610,3 +613,14 @@ runTests(db.DatastoreDB store) { }); }); } + +main() { + var scopes = datastore_impl.DatastoreImpl.SCOPES; + + withAuthClient(scopes, (String project, httpClient) { + var datastore = new datastore_impl.DatastoreImpl(httpClient, 's~$project'); + return datastore_test.cleanupDB(datastore).then((_) { + return runE2EUnittest(() => runTests(new db.DatastoreDB(datastore))); + }); + }); +} diff --git a/pkgs/gcloud/test/db/e2e/metamodel_test.dart b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart similarity index 95% rename from pkgs/gcloud/test/db/e2e/metamodel_test.dart rename to pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart index 77eb6c99..14c7e9ce 100644 --- a/pkgs/gcloud/test/db/e2e/metamodel_test.dart +++ b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart @@ -59,7 +59,8 @@ runTests(datastore, db.DatastoreDB store) { return datastore.commit(inserts: entities).then((_) { return sleep(const Duration(seconds: 10)).then((_) { var namespaceQuery = store.query(Namespace); - return namespaceQuery.run().then((List namespaces) { + return namespaceQuery.run().toList() + .then((List namespaces) { expect(namespaces.length, 3); expect(namespaces, contains(cond((ns) => ns.name == null))); expect(namespaces, @@ -71,7 +72,7 @@ runTests(datastore, db.DatastoreDB store) { for (var namespace in namespaces) { var partition = store.newPartition(namespace.name); var kindQuery = store.query(Kind, partition: partition); - futures.add(kindQuery.run().then((List kinds) { + futures.add(kindQuery.run().toList().then((List kinds) { expect(kinds.length, greaterThanOrEqualTo(2)); if (namespace.name == null) { expect(kinds, contains(cond((k) => k.name == 'NullKind'))); diff --git a/pkgs/gcloud/test/db_all_e2e_test.dart b/pkgs/gcloud/test/db_all_e2e_test.dart new file mode 100644 index 00000000..3d231e18 --- /dev/null +++ b/pkgs/gcloud/test/db_all_e2e_test.dart @@ -0,0 +1,45 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +library gcloud.test.db_all_test; + +import 'dart:async'; + +import 'package:gcloud/db.dart' as db; +import 'package:gcloud/src/datastore_impl.dart' as datastore_impl; +import 'package:unittest/unittest.dart'; + +import 'db/e2e/db_test_impl.dart' as db_test; +import 'db/e2e/metamodel_test_impl.dart' as db_metamodel_test; +import 'datastore/e2e/datastore_test_impl.dart' as datastore_test; + + +import 'common_e2e.dart'; + +main() { + var scopes = datastore_impl.DatastoreImpl.SCOPES; + + withAuthClient(scopes, (String project, httpClient) { + var datastore = new datastore_impl.DatastoreImpl(httpClient, 's~$project'); + var datastoreDB = new db.DatastoreDB(datastore); + + return datastore_test.cleanupDB(datastore).then((_) { + return runE2EUnittest(() { + datastore_test.runTests(datastore); + + test('sleep-between-test-suites', () { + expect(new Future.delayed(const Duration(seconds: 10)), completes); + }); + + db_test.runTests(datastoreDB); + + test('sleep-between-test-suites', () { + expect(new Future.delayed(const Duration(seconds: 10)), completes); + }); + + db_metamodel_test.runTests(datastore, datastoreDB); + }); + }); + }); +} diff --git a/pkgs/gcloud/test/storage/e2e_test.dart b/pkgs/gcloud/test/storage/e2e_test.dart index 3cda6da6..f6c52ecf 100644 --- a/pkgs/gcloud/test/storage/e2e_test.dart +++ b/pkgs/gcloud/test/storage/e2e_test.dart @@ -5,82 +5,12 @@ library gcloud.storage; import 'dart:async'; -import 'dart:io'; import 'package:gcloud/storage.dart'; import 'package:googleapis/common/common.dart' as common; -import 'package:googleapis_auth/auth_io.dart' as auth; import 'package:unittest/unittest.dart'; -import '../common.dart'; - -// Enviroment variables for specifying the cloud project to use and the -// location of the service account key for that project. -const String PROJECT_ENV = 'GCLOUD_E2E_TEST_PROJECT'; -const String SERVICE_KEY_LOCATION_ENV = 'GCLOUD_E2E_TEST_KEY'; - -// Default project and service key location used when running on the package -// bot. -const String DEFAULT_PROJECT = 'dart-gcloud-e2e'; -const String DEFAULT_KEY_LOCATION = - 'gs://dart-archive-internal/keys/dart-gcloud-e2e.json'; - -bool onBot() { - // When running on the package-bot the current user is chrome-bot. - var envName; - if (Platform.isWindows) { - envName = 'USERNAME'; - } else { - envName = 'USER'; - } - return Platform.environment[envName] == 'chrome-bot'; -} - -// Get the service key from the specified location. -Future serviceKeyJson(String serviceKeyLocation) { - if (!serviceKeyLocation.startsWith('gs://')) { - throw new Exception('Service key location must start with gs://'); - } - var future; - if (onBot()) { - future = Process.run( - 'python', ['third_party/gsutil/gsutil', 'cat', serviceKeyLocation], - runInShell: true); - } else { - var gsutil = Platform.isWindows ? 'gsutil.cmd' : 'gsutil'; - future = Process.run(gsutil, ['cat', serviceKeyLocation]); - } - return future.then((result) { - if (result.exitCode != 0) { - throw new Exception('Failed to run gsutil, ${result.stderr}'); - } - return result.stdout; - }); -} - -Future connect({bool trace: false}) { - String project = Platform.environment[PROJECT_ENV]; - String serviceKeyLocation = Platform.environment[SERVICE_KEY_LOCATION_ENV]; - - if (!onBot() && (project == null || serviceKeyLocation == null)) { - throw new StateError( - 'Envoronment variables $PROJECT_ENV and $SERVICE_KEY_LOCATION_ENV ' - 'required when not running on the package bot'); - } - - project = project != null ? project : DEFAULT_PROJECT; - serviceKeyLocation = - serviceKeyLocation != null ? serviceKeyLocation : DEFAULT_KEY_LOCATION; - - return serviceKeyJson(serviceKeyLocation).then((keyJson) { - var creds = new auth.ServiceAccountCredentials.fromJson(keyJson); - return auth.clientViaServiceAccount(creds, Storage.Scopes) - .then((client) { - if (trace) client = new TraceClient(client); - return new Storage(client, project); - }); - }); -} +import '../common_e2e.dart'; String generateBucketName() { var id = new DateTime.now().millisecondsSinceEpoch; @@ -318,25 +248,20 @@ runTests(Storage storage, Bucket testBucket) { }); } -class E2EConfiguration extends SimpleConfiguration { - Storage storage; - final String testBucketName; - E2EConfiguration(this.storage, this.testBucketName): super(); +main() { + withAuthClient(Storage.SCOPES, (String project, httpClient) { + var testBucket = generateBucketName(); - onDone(success) { - storage.deleteBucket(testBucketName) - .whenComplete(() => super.onDone(success)); - } -} + // Share the same storage connection for all tests. + var storage = new Storage(httpClient, project); -main() { - // Share the same storage connection for all tests. - connect(trace: false).then((Storage storage) { - var bucketName = generateBucketName(); - unittestConfiguration = new E2EConfiguration(storage, bucketName); // Create a shared bucket for all object tests. - storage.createBucket(bucketName).then((result) { - runTests(storage, storage.bucket(bucketName)); + return storage.createBucket(testBucket).then((_) { + return runE2EUnittest(() { + runTests(storage, storage.bucket(testBucket)); + }).whenComplete(() { + storage.deleteBucket(testBucket); + }); }); }); } diff --git a/pkgs/gcloud/test/storage/storage_test.dart b/pkgs/gcloud/test/storage/storage_test.dart index 184a6596..5d5068ac 100644 --- a/pkgs/gcloud/test/storage/storage_test.dart +++ b/pkgs/gcloud/test/storage/storage_test.dart @@ -16,6 +16,7 @@ import 'package:googleapis/storage/v1.dart' as storage; import 'package:googleapis/common/common.dart' as common; import '../common.dart'; +import '../common_e2e.dart'; const String ROOT_PATH = '/storage/v1/'; From c6064c1a753bf99888404750111497af1f90b5d7 Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Mon, 3 Nov 2014 14:01:47 +0100 Subject: [PATCH 034/239] Fix metamodel.dart: Use model's static member R=sgjesse@google.com Review URL: https://codereview.chromium.org//696263002 --- pkgs/gcloud/lib/db/metamodel.dart | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/gcloud/lib/db/metamodel.dart b/pkgs/gcloud/lib/db/metamodel.dart index 9571066c..81ff5b76 100644 --- a/pkgs/gcloud/lib/db/metamodel.dart +++ b/pkgs/gcloud/lib/db/metamodel.dart @@ -12,7 +12,7 @@ class Namespace extends db.ExpandoModel { String get name { // The default namespace will be reported with id 1. - if (id == NamespaceDescription.EmptyNamespaceId) return null; + if (id == Namespace.EmptyNamespaceId) return null; return id; } } From 39d8c902c18f90fc722e18f67597b51c21de6799 Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Mon, 3 Nov 2014 14:04:01 +0100 Subject: [PATCH 035/239] Prepare for publishing v0.1.0 R=sgjesse@google.com Review URL: https://codereview.chromium.org//697983002 --- pkgs/gcloud/pubspec.yaml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index ad7f62fd..f76e2e16 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,6 +1,8 @@ name: gcloud -version: 0.0.1-dev +version: 0.1.0 +author: Dart Team description: Dart gcloud APIs +homepage: http://www.dartlang.org environment: sdk: '>=1.5.0 <2.0.0' dependencies: @@ -10,7 +12,7 @@ dependencies: http: '>=0.11.0 <0.12.0' dev_dependencies: http_parser: '>=0.0.2+5 <0.1.0' - googleapis_auth: any + googleapis_auth: '>=0.1.1 <0.2.0' mime: '>=0.9.0+3 <0.10.0' unittest: '>=0.11.0 <0.12.0' transformers: From 4ce20b42ed7e6cd80a5c7ad3f75222a6e67aa508 Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Mon, 3 Nov 2014 15:09:32 +0100 Subject: [PATCH 036/239] Increase delay when after eventually consistent operations in datastore/storage Bucket deletion as well as datastore queries are relying on eventually consistent actions being propagated. This CL increases timeouts. Furthermore: Disable storage_test on browsers, since it uses dart:io transitively. BUG= R=sgjesse@google.com Review URL: https://codereview.chromium.org//695533003 --- pkgs/gcloud/.status | 4 ++++ pkgs/gcloud/test/common_e2e.dart | 20 +++++++++++++++++++ .../datastore/e2e/datastore_test_impl.dart | 8 -------- pkgs/gcloud/test/db/e2e/db_test_impl.dart | 8 -------- pkgs/gcloud/test/storage/e2e_test.dart | 6 +++++- 5 files changed, 29 insertions(+), 17 deletions(-) diff --git a/pkgs/gcloud/.status b/pkgs/gcloud/.status index da4c517a..89f3c69c 100644 --- a/pkgs/gcloud/.status +++ b/pkgs/gcloud/.status @@ -16,5 +16,9 @@ test/storage/e2e_test: Skip build/test/db_all_e2e_test: Skip test/db_all_e2e_test: Skip +# Imports common_e2e.dart, which uses dart:io +build/test/storage/storage_test: Skip +test/storage/storage_test: Skip + [ $compiler == dart2js ] *: Skip diff --git a/pkgs/gcloud/test/common_e2e.dart b/pkgs/gcloud/test/common_e2e.dart index 976eee54..daabd595 100644 --- a/pkgs/gcloud/test/common_e2e.dart +++ b/pkgs/gcloud/test/common_e2e.dart @@ -26,6 +26,26 @@ const String DEFAULT_PROJECT = 'dart-gcloud-e2e'; const String DEFAULT_KEY_LOCATION = 'gs://dart-archive-internal/keys/dart-gcloud-e2e.json'; +// Used for db/datastore e2e tests: +// +// Non-ancestor queries (i.e. queries not lookups) result in index scans. +// The index tables are updated in a "eventually consistent" way. +// +// So this can make tests flaky, if the index updates take longer than the +// following constant. +const INDEX_UPDATE_DELAY = const Duration(seconds: 20); + +// Used for storage e2e tests: +// +// List operations on buckets are eventually consistent. Bucket deletion is +// also dependent on list operations to ensure the bucket is empty before +// deletion. +// +// So this can make tests flaky. The following delay is introduced as an +// attempt to account for that. +const STORAGE_LIST_DELAY = const Duration(seconds: 5); + + bool onBot() { // When running on the package-bot the current user is chrome-bot. var envName; diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index ef550662..685840f3 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -39,14 +39,6 @@ import 'utils.dart'; import '../../common_e2e.dart'; -// Note: -// Non-ancestor queries (i.e. queries not lookups) result in index scans. -// The index tables are updated in a "eventually consistent" way. -// -// So this can make tests flaky, the index updates take longer than the -// following constant. -const INDEX_UPDATE_DELAY = const Duration(seconds: 10); - Future sleep(Duration duration) { var completer = new Completer(); new Timer(duration, completer.complete); diff --git a/pkgs/gcloud/test/db/e2e/db_test_impl.dart b/pkgs/gcloud/test/db/e2e/db_test_impl.dart index 32abf45a..529b64f9 100644 --- a/pkgs/gcloud/test/db/e2e/db_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/db_test_impl.dart @@ -53,14 +53,6 @@ import 'package:gcloud/src/datastore_impl.dart' as datastore_impl; import '../../datastore/e2e/datastore_test_impl.dart' as datastore_test; import '../../common_e2e.dart'; -// Note: -// Non-ancestor queries (i.e. queries not lookups) result in index scans. -// The index tables are updated in a "eventually consistent" way. -// -// So this can make tests flaky, if the index updates take longer than the -// following constant. -const INDEX_UPDATE_DELAY = const Duration(seconds: 10); - @db.Kind() class Person extends db.Model { @db.StringProperty() diff --git a/pkgs/gcloud/test/storage/e2e_test.dart b/pkgs/gcloud/test/storage/e2e_test.dart index f6c52ecf..fabbf48d 100644 --- a/pkgs/gcloud/test/storage/e2e_test.dart +++ b/pkgs/gcloud/test/storage/e2e_test.dart @@ -260,7 +260,11 @@ main() { return runE2EUnittest(() { runTests(storage, storage.bucket(testBucket)); }).whenComplete(() { - storage.deleteBucket(testBucket); + // Deleting a bucket relies on eventually consistent behaviour, hence + // the delay in attempt to prevent test flakiness. + return new Future.delayed(STORAGE_LIST_DELAY, () { + return storage.deleteBucket(testBucket); + }); }); }); }); From ebc0e86264ed9307b870cb7e632544b8c7151514 Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Thu, 6 Nov 2014 14:57:54 +0100 Subject: [PATCH 037/239] Widen pubspec version constraints for googleapis{,_auth,_beta} R=sgjesse@google.com Review URL: https://codereview.chromium.org//705073002 --- pkgs/gcloud/pubspec.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index f76e2e16..e8c30a78 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -7,12 +7,12 @@ environment: sdk: '>=1.5.0 <2.0.0' dependencies: crypto: '>=0.9.0 <0.10.0' - googleapis: '>=0.2.0 <0.4.0' - googleapis_beta: '>=0.3.0 <0.5.0' + googleapis: '>=0.2.0 <0.5.0' + googleapis_beta: '>=0.3.0 <0.6.0' http: '>=0.11.0 <0.12.0' dev_dependencies: http_parser: '>=0.0.2+5 <0.1.0' - googleapis_auth: '>=0.1.1 <0.2.0' + googleapis_auth: '>=0.1.1 <0.3.0' mime: '>=0.9.0+3 <0.10.0' unittest: '>=0.11.0 <0.12.0' transformers: From 1925660186a5b9d4a47f3f84a73d739114e23c89 Mon Sep 17 00:00:00 2001 From: Matias Meno Date: Thu, 6 Nov 2014 16:13:58 +0100 Subject: [PATCH 038/239] Update models.dart --- pkgs/gcloud/lib/src/db/models.dart | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/gcloud/lib/src/db/models.dart b/pkgs/gcloud/lib/src/db/models.dart index d27938c3..0419b286 100644 --- a/pkgs/gcloud/lib/src/db/models.dart +++ b/pkgs/gcloud/lib/src/db/models.dart @@ -23,7 +23,7 @@ class Key { } if (id != null && id is! String && id is! int) { throw new ArgumentError( - 'The id argument must not be an integer or a String.'); + 'The id argument must be an integer or a String.'); } } From 59c05ab1cffb872131c2da8cf8baba4a4c25bd02 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Fri, 7 Nov 2014 01:23:21 +0100 Subject: [PATCH 039/239] pkg/gcloud: v0.1.1 remove unused dependencies ready for v0.1.1 release R=kustermann@google.com Review URL: https://codereview.chromium.org//707983002 --- pkgs/gcloud/CHANGELOG.md | 9 +++++++++ pkgs/gcloud/lib/src/datastore_impl.dart | 2 -- pkgs/gcloud/pubspec.yaml | 6 +++--- 3 files changed, 12 insertions(+), 5 deletions(-) create mode 100644 pkgs/gcloud/CHANGELOG.md diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md new file mode 100644 index 00000000..ee1c8f21 --- /dev/null +++ b/pkgs/gcloud/CHANGELOG.md @@ -0,0 +1,9 @@ +## 0.1.0 + +* Increased version constraint on googleapis{,_auth,_beta}. + +* Removed unused imports. + +## 0.1.0 + +* First release. diff --git a/pkgs/gcloud/lib/src/datastore_impl.dart b/pkgs/gcloud/lib/src/datastore_impl.dart index aaca2a38..c79920f4 100644 --- a/pkgs/gcloud/lib/src/datastore_impl.dart +++ b/pkgs/gcloud/lib/src/datastore_impl.dart @@ -5,8 +5,6 @@ library gcloud.datastore_impl; import 'dart:async'; -import 'dart:convert'; -import 'dart:math'; import 'package:http/http.dart' as http; diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index e8c30a78..1f454fc3 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,8 +1,8 @@ name: gcloud -version: 0.1.0 +version: 0.1.1 author: Dart Team description: Dart gcloud APIs -homepage: http://www.dartlang.org +homepage: https://github.com/dart-lang/gcloud environment: sdk: '>=1.5.0 <2.0.0' dependencies: @@ -11,8 +11,8 @@ dependencies: googleapis_beta: '>=0.3.0 <0.6.0' http: '>=0.11.0 <0.12.0' dev_dependencies: - http_parser: '>=0.0.2+5 <0.1.0' googleapis_auth: '>=0.1.1 <0.3.0' + http_parser: '>=0.0.2+5 <0.1.0' mime: '>=0.9.0+3 <0.10.0' unittest: '>=0.11.0 <0.12.0' transformers: From b81f407b2ebaf572bbcced9689de6e49e5bfea01 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Fri, 7 Nov 2014 08:30:49 +0100 Subject: [PATCH 040/239] Fix version number in CHANGELOG TBR=kustermann@google.com BUG= Review URL: https://codereview.chromium.org//703403003 --- pkgs/gcloud/CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index ee1c8f21..4a0c1c77 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,4 +1,4 @@ -## 0.1.0 +## 0.1.1 * Increased version constraint on googleapis{,_auth,_beta}. From cb4eddeaa62a47b7fe9572c13889b3dbbd081fda Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Fri, 14 Nov 2014 13:05:57 +0100 Subject: [PATCH 041/239] Getting rid of INDEX_UPDATE_DELAY sleeps db/datastore tests R=sgjesse@google.com, wibling@google.com Review URL: https://codereview.chromium.org//717153003 --- pkgs/gcloud/test/common_e2e.dart | 9 -- .../datastore/e2e/datastore_test_impl.dart | 89 ++++++++++++++++--- pkgs/gcloud/test/db/e2e/db_test_impl.dart | 42 ++++++++- 3 files changed, 119 insertions(+), 21 deletions(-) diff --git a/pkgs/gcloud/test/common_e2e.dart b/pkgs/gcloud/test/common_e2e.dart index daabd595..4df65f65 100644 --- a/pkgs/gcloud/test/common_e2e.dart +++ b/pkgs/gcloud/test/common_e2e.dart @@ -26,15 +26,6 @@ const String DEFAULT_PROJECT = 'dart-gcloud-e2e'; const String DEFAULT_KEY_LOCATION = 'gs://dart-archive-internal/keys/dart-gcloud-e2e.json'; -// Used for db/datastore e2e tests: -// -// Non-ancestor queries (i.e. queries not lookups) result in index scans. -// The index tables are updated in a "eventually consistent" way. -// -// So this can make tests flaky, if the index updates take longer than the -// following constant. -const INDEX_UPDATE_DELAY = const Duration(seconds: 20); - // Used for storage e2e tests: // // List operations on buckets are eventually consistent. Bucket deletion is diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index 685840f3..010eb4c6 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -745,7 +745,7 @@ runTests(Datastore datastore) { test('query', () { return insert(stringNamedEntities, []).then((keys) { - return sleep(INDEX_UPDATE_DELAY).then((_) { + return waitUntilEntitiesReady(datastore, stringNamedKeys).then((_) { var tests = [ // EntityKind query () => testQueryAndCompare( @@ -838,7 +838,7 @@ runTests(Datastore datastore) { () => delete(stringNamedKeys, transactional: true), // Wait until the entity deletes are reflected in the indices. - () => sleep(INDEX_UPDATE_DELAY), + () => waitUntilEntitiesGone(datastore, stringNamedKeys), // Make sure queries don't return results () => testQueryAndCompare( @@ -879,8 +879,10 @@ runTests(Datastore datastore) { return datastore.commit(inserts: [entity, entity2]).then((_) { var futures = [ + // FIXME/TODO: Ancestor queries should be strongly consistent. + // We should not need to wait for them. () { - return sleep(INDEX_UPDATE_DELAY); + return waitUntilEntitiesReady(datastore, [subSubKey, subSubKey2]); }, // Test that lookup only returns inserted entities. () { @@ -1008,22 +1010,89 @@ runTests(Datastore datastore) { } Future cleanupDB(Datastore db) { - // cleanup() will call itself again as long as the DB is not clean. - cleanup() { - var q = new Query(limit: 500); + Future> getNamespaces() { + var q = new Query(kind: '__namespace__'); return consumePages((_) => db.query(q)).then((List entities) { - entities = entities.where((entity) { - return !entity.key.elements[0].kind.contains('__'); + return entities.map((Entity e) { + var id = e.key.elements.last.id; + if (id == 1) return null; + return id; }).toList(); + }); + } + Future> getKinds(String namespace) { + var partition = new Partition(namespace); + var q = new Query(kind: '__kind__'); + return consumePages((_) => db.query(q, partition: partition)) + .then((List entities) { + return entities + .map((Entity e) => e.key.elements.last.id) + .where((String kind) => !kind.contains('__')) + .toList(); + }); + } + + // cleanup() will call itself again as long as the DB is not clean. + cleanup(String namespace, String kind) { + var partition = new Partition(namespace); + var q = new Query(kind: kind, limit: 500); + return consumePages((_) => db.query(q, partition: partition)) + .then((List entities) { if (entities.length == 0) return null; print('[cleanupDB]: Removing left-over ${entities.length} entities'); var deletes = entities.map((e) => e.key).toList(); - return db.commit(deletes: deletes).then((_) => cleanup()); + return db.commit(deletes: deletes).then((_) => cleanup(namespace, kind)); + }); + } + + return getNamespaces().then((List namespaces) { + return Future.forEach(namespaces, (String namespace) { + return getKinds(namespace).then((List kinds) { + return Future.forEach(kinds, (String kind) { + return cleanup(namespace, kind); + }); + }); + }); + }); +} + +Future waitUntilEntitiesReady(Datastore db, List keys) { + return waitUntilEntitiesHelper(db, keys, true); +} + +Future waitUntilEntitiesGone(Datastore db, List keys) { + return waitUntilEntitiesHelper(db, keys, false); +} + +Future waitUntilEntitiesHelper(Datastore db, List keys, bool positive) { + var keysByKind = {}; + for (var key in keys) { + keysByKind.putIfAbsent(key.elements.last.kind, () => []).add(key); + } + + Future waitForKeys(String kind, List keys) { + var q = new Query(kind: kind); + return consumePages((_) => db.query(q)).then((entities) { + for (var key in keys) { + bool found = false; + for (var entity in entities) { + if (key == entity.key) found = true; + } + if (positive) { + if (!found) return waitForKeys(kind, keys); + } else { + if (found) return waitForKeys(kind, keys); + } + } + return null; }); } - return cleanup(); + + return Future.forEach(keysByKind.keys.toList(), (String kind) { + return waitForKeys(kind, keysByKind[kind]); + }); } main() { diff --git a/pkgs/gcloud/test/db/e2e/db_test_impl.dart b/pkgs/gcloud/test/db/e2e/db_test_impl.dart index 529b64f9..ed1663d5 100644 --- a/pkgs/gcloud/test/db/e2e/db_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/db_test_impl.dart @@ -457,7 +457,7 @@ runTests(db.DatastoreDB store) { ..addAll(expandoPersons); var allKeys = allInserts.map((db.Model model) => model.key).toList(); return store.commit(inserts: allInserts).then((_) { - return sleep(INDEX_UPDATE_DELAY).then((_) { + return waitUntilEntitiesReady(store, allKeys).then((_) { var tests = [ // Queries for [Person] return no results, we only have [User] // objects. @@ -589,7 +589,7 @@ runTests(db.DatastoreDB store) { () => store.commit(deletes: allKeys), // Wait until the entity deletes are reflected in the indices. - () => sleep(INDEX_UPDATE_DELAY), + () => waitUntilEntitiesGone(store, allKeys), // Make sure queries don't return results () => store.lookup(allKeys).then((List models) { @@ -606,6 +606,44 @@ runTests(db.DatastoreDB store) { }); } +Future waitUntilEntitiesReady(db.DatastoreDB mdb, List keys) { + return waitUntilEntitiesHelper(mdb, keys, true); +} + +Future waitUntilEntitiesGone(db.DatastoreDB mdb, List keys) { + return waitUntilEntitiesHelper(mdb, keys, false); +} + +Future waitUntilEntitiesHelper(db.DatastoreDB mdb, + List keys, + bool positive) { + var keysByKind = {}; + for (var key in keys) { + keysByKind.putIfAbsent(key.type, () => []).add(key); + } + + Future waitForKeys(Type kind, List keys) { + return mdb.query(kind).run().toList().then((List models) { + for (var key in keys) { + bool found = false; + for (var model in models) { + if (key == model.key) found = true; + } + if (positive) { + if (!found) return waitForKeys(kind, keys); + } else { + if (found) return waitForKeys(kind, keys); + } + } + return null; + }); + } + + return Future.forEach(keysByKind.keys.toList(), (Type kind) { + return waitForKeys(kind, keysByKind[kind]); + }); +} + main() { var scopes = datastore_impl.DatastoreImpl.SCOPES; From 1411b3ade392b275376c3914b1252b08dde84d30 Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Wed, 19 Nov 2014 14:38:48 +0100 Subject: [PATCH 042/239] Add context library to package:gcloud and registerX()/get X functions to the APIs. R=lrn@google.com, sgjesse@google.com Review URL: https://codereview.chromium.org//736463002 --- pkgs/gcloud/lib/datastore.dart | 23 ++ pkgs/gcloud/lib/db.dart | 24 +- pkgs/gcloud/lib/http.dart | 37 +++ pkgs/gcloud/lib/service_scope.dart | 273 +++++++++++++++++++++++ pkgs/gcloud/lib/src/storage_impl.dart | 32 +-- pkgs/gcloud/lib/storage.dart | 44 +++- pkgs/gcloud/test/service_scope_test.dart | 221 ++++++++++++++++++ 7 files changed, 627 insertions(+), 27 deletions(-) create mode 100644 pkgs/gcloud/lib/http.dart create mode 100644 pkgs/gcloud/lib/service_scope.dart create mode 100644 pkgs/gcloud/test/service_scope_test.dart diff --git a/pkgs/gcloud/lib/datastore.dart b/pkgs/gcloud/lib/datastore.dart index 6303ec21..3b54d747 100644 --- a/pkgs/gcloud/lib/datastore.dart +++ b/pkgs/gcloud/lib/datastore.dart @@ -12,6 +12,29 @@ library gcloud.datastore; import 'dart:async'; import 'common.dart' show Page; +import 'service_scope.dart' as ss; + +const Symbol _datastoreKey = #_gcloud.datastore; + +/// Access the [Datastore] object available in the current service scope. +/// +/// The returned object will be the one which was previously registered with +/// [registerDatastoreService] within the current (or a parent) service scope. +/// +/// Accessing this getter outside of a service scope will result in an error. +Datastore get datastoreService => ss.lookup(_datastoreKey); + +/// Registers the [Datastore] object within the current service scope. +/// +/// The provided `datastore` object will be avilable via the top-level +/// `datastore` getter. +/// +/// Calling this function outside of a service scope will result in an error. +/// Calling this function more than once inside the same service scope is not +/// allowed. +void registerDatastoreService(Datastore datastore) { + ss.register(_datastoreKey, datastore); +} class ApplicationError implements Exception { final String message; diff --git a/pkgs/gcloud/lib/db.dart b/pkgs/gcloud/lib/db.dart index 76f3e1fb..cb3610c0 100644 --- a/pkgs/gcloud/lib/db.dart +++ b/pkgs/gcloud/lib/db.dart @@ -7,12 +7,34 @@ library gcloud.db; import 'dart:async'; import 'dart:collection'; import 'dart:mirrors' as mirrors; -import 'datastore.dart' as datastore; import 'common.dart' show Page, StreamFromPages; +import 'service_scope.dart' as ss; +import 'datastore.dart' as datastore; part 'src/db/annotations.dart'; part 'src/db/db.dart'; part 'src/db/models.dart'; part 'src/db/model_db.dart'; part 'src/db/model_db_impl.dart'; + +const Symbol _dbKey = #_gcloud.db; + +/// Access the [DatastoreDB] object available in the current service scope. +/// +/// The returned object will be the one which was previously registered with +/// [registerDbService] within the current (or a parent) service scope. +/// +/// Accessing this getter outside of a service scope will result in an error. +DatastoreDB get dbService => ss.lookup(_dbKey); + +/// Registers the [DatastoreDB] object within the current service scope. +/// +/// The provided `db` object will be avilable via the top-level `db` getter. +/// +/// Calling this function outside of a service scope will result in an error. +/// Calling this function more than once inside the same service scope is not +/// allowed. +void registerDbService(DatastoreDB db) { + ss.register(_dbKey, db); +} diff --git a/pkgs/gcloud/lib/http.dart b/pkgs/gcloud/lib/http.dart new file mode 100644 index 00000000..dd33ac8b --- /dev/null +++ b/pkgs/gcloud/lib/http.dart @@ -0,0 +1,37 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +/// Provides access to an authenticated HTTP client which can be used to access +/// Google APIs. +library gcloud.http; + +import 'package:http/http.dart' as http; + +import 'service_scope.dart' as ss; + +const Symbol _authenticatedClientKey = #_gcloud.http; + +/// Access the [http.Client] object available in the current service scope. +/// +/// The returned object will be the one which was previously registered with +/// [registerAuthClientService] within the current (or a parent) service +/// scope. +/// +/// Accessing this getter outside of a service scope will result in an error. +http.Client get authClientService => ss.lookup(_authenticatedClientKey); + +/// Registers the [http.Client] object within the current service scope. +/// +/// The provided `client` object will be avilable via the top-level +/// `authenticatedHttp` getter. +/// +/// Calling this function outside of a service scope will result in an error. +/// Calling this function more than once inside the same service scope is not +/// allowed. +void registerAuthClientService(http.Client client, {bool close: true}) { + ss.register(_authenticatedClientKey, client); + if (close) { + ss.registerScopeExitCallback(() => client.close()); + } +} diff --git a/pkgs/gcloud/lib/service_scope.dart b/pkgs/gcloud/lib/service_scope.dart new file mode 100644 index 00000000..986af1d3 --- /dev/null +++ b/pkgs/gcloud/lib/service_scope.dart @@ -0,0 +1,273 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +/// This library enables one to create a service scope in which code can run. +/// +/// A service scope is an environment in which code runs. The environment is a +/// [Zone] with added functionality. Code can be run inside a new service scope +/// by using the `fork(callback)` method. This will call `callback` inside a new +/// service scope and will keep the scope alive until the Future returned by the +/// callback completes. At this point the service scope ends. +/// +/// Code running inside a new service scope can +/// +/// - register objects (e.g. a database connection pool or a logging service) +/// - look up previously registered objects +/// - register on-scope-exit handlers +/// +/// Service scopes can be nested. All registered values from the parent service +/// scope are still accessible as long as they have not been overridden. The +/// callback passed to `fork()` is responsible for not completing it's returned +/// Future until all nested service scopes have ended. +/// +/// The on-scope-exit callbacks will be called when the service scope ends. The +/// callbacks are run in reverse registration order and are guaranteed to be +/// executed. During a scope exit callback the active service scope cannot +/// be modified anymore and `lookup()`s will only return values which were +/// registered before the registration of the on-scope-exit callback. +/// +/// One use-case of this is making services available to a server application. +/// The server application will run inside a service scope which will have all +/// necessary services registered. +/// Once the server app shuts down, the registered on-scope-exit callbacks will +/// automatically be invoked and the process will shut down cleanly. +/// +/// Here is an example use case: +/// +/// import 'dart:async'; +/// import 'package:gcloud/service_scope.dart' as scope; +/// +/// class DBPool { ... } +/// +/// DBPool get dbService => scope.lookup(#dbpool); +/// +/// Future runApp() { +/// // The application can use the registered objects (here the +/// // dbService). It does not need to pass it around, but can use a +/// // global getter. +/// return dbService.query( ... ).listen(print).asFuture(); +/// } +/// +/// main() { +/// // Creates a new service scope and runs the given closure inside it. +/// ss.fork(() { +/// // We create a new database pool with a 10 active connections and +/// // add it to the current service scope with key `#dbpool`. +/// // In addition we insert a on-scope-exit callback which will be +/// // called once the application is done. +/// var pool = new DBPool(connections: 10); +/// scope.register(#dbpool, pool, onScopeExit: () => pool.close()); +/// return runApp(); +/// }).then((_) { +/// print('Server application shut down cleanly'); +/// }); +/// } +library gcloud.service_scope; + +import 'dart:async'; + +/// The Symbol used as index in the zone map for the service scope object. +const Symbol _ServiceScopeKey = #_gcloud.service_scope; + +/// An empty service scope. +/// +/// New service scope can be created by calling [fork] on the empty +/// service scope. +final _ServiceScope _emptyServiceScope = new _ServiceScope(); + +/// Returns the current [_ServiceScope] object. +_ServiceScope get _serviceScope => Zone.current[_ServiceScopeKey]; + +/// Start a new zone with a new service scope and run [func] inside it. +/// +/// The function [func] must return a `Future` and the service scope will end +/// when this future completes. +/// +/// If an uncaught error occurs and [onError] is given, it will be called. The +/// `onError` parameter can take the same values as `Zone.current.fork`. +Future fork(Future func(), {Function onError}) { + var currentServiceScope = _serviceScope; + if (currentServiceScope == null) { + currentServiceScope = _emptyServiceScope; + } + return currentServiceScope._fork(func, onError: onError); +} + +/// Register a new [object] into the current service scope using the given +/// [key]. +/// +/// If [onScopeExit] is provided, it will be called when the service scope ends. +/// +/// The registered on-scope-exit functions are executed in reverse registration +/// order. +void register(Object key, Object value, {onScopeExit()}) { + var serviceScope = _serviceScope; + if (serviceScope == null) { + throw new StateError('Not running inside a service scope zone.'); + } + serviceScope.register(key, value, onScopeExit: onScopeExit); +} + +/// Register a [onScopeExitCallback] to be invoked when this service scope ends. +/// +/// The registered on-scope-exit functions are executed in reverse registration +/// order. +Object registerScopeExitCallback(onScopeExitCallback()) { + var serviceScope = _serviceScope; + if (serviceScope == null) { + throw new StateError('Not running inside a service scope zone.'); + } + return serviceScope.registerOnScopeExitCallback(onScopeExitCallback); +} + +/// Look up an item by it's key in the currently active service scope. +/// +/// Returns `null` if there is no entry with the given key. +Object lookup(Object key) { + var serviceScope = _serviceScope; + if (serviceScope == null) { + throw new StateError('Not running inside a service scope zone.'); + } + return serviceScope.lookup(key); +} + +/// Represents a global service scope of values stored via zones. +class _ServiceScope { + /// A mapping of keys to values stored inside the service scope. + final Map _key2Values = new Map(); + + /// A set which indicates whether an object was copied from it's parent. + final Set _parentCopies = new Set(); + + /// On-Scope-Exit functions which will be called in reverse insertion order. + final List<_RegisteredEntry> _registeredEntries = []; + + bool _cleaningUp = false; + bool _destroyed = false; + + /// Looks up an object by it's service scope key - returns `null` if not + /// found. + Object lookup(Object serviceScope) { + _ensureNotInDestroyingState(); + var entry = _key2Values[serviceScope]; + return entry != null ? entry.value : null; + } + + /// Inserts a new item to the service scope using [serviceScopeKey]. + /// + /// Optionally calls a [onScopeExit] function once this service scope ends. + void register(Object serviceScopeKey, Object value, {onScopeExit()}) { + _ensureNotInCleaningState(); + _ensureNotInDestroyingState(); + + bool isParentCopy = _parentCopies.contains(serviceScopeKey); + if (!isParentCopy && _key2Values.containsKey(serviceScopeKey)) { + throw new ArgumentError( + 'Servie scope already contains key $serviceScopeKey.'); + } + + var entry = new _RegisteredEntry(serviceScopeKey, value, onScopeExit); + + _key2Values[serviceScopeKey] = entry; + if (isParentCopy) _parentCopies.remove(serviceScopeKey); + + _registeredEntries.add(entry); + } + + /// Inserts a new on-scope-exit function to be called once this service scope + /// ends. + void registerOnScopeExitCallback(onScopeExitCallback()) { + _ensureNotInCleaningState(); + _ensureNotInDestroyingState(); + + if (onScopeExitCallback != null) { + _registeredEntries.add( + new _RegisteredEntry(null, null, onScopeExitCallback)); + } + } + + /// Start a new zone with a forked service scope. + Future _fork(Future func(), {Function onError}) { + _ensureNotInCleaningState(); + _ensureNotInDestroyingState(); + + var serviceScope = _copy(); + var map = { _ServiceScopeKey: serviceScope }; + return runZoned(() { + var f = func(); + if (f is! Future) { + throw new ArgumentError('Forking a service scope zone requires the ' + 'callback function to return a future.'); + } + return f.whenComplete(serviceScope._runScopeExitHandlers); + }, zoneValues: map, onError: onError); + } + + void _ensureNotInDestroyingState() { + if (_destroyed) { + throw new StateError( + 'The service scope has already been exited. It is therefore ' + 'forbidden to use this service scope anymore. ' + 'Please make sure that your code waits for all asynchronous tasks ' + 'before the closure passed to fork() completes.'); + } + } + + void _ensureNotInCleaningState() { + if (_cleaningUp) { + throw new StateError( + 'The service scope is in the process of cleaning up. It is therefore ' + 'forbidden to make any modifications to the current service scope. ' + 'Please make sure that your code waits for all asynchronous tasks ' + 'before the closure passed to fork() completes.'); + } + } + + /// Copies all service scope entries to a new service scope, but not their + /// on-scope-exit handlers. + _ServiceScope _copy() { + var serviceScopeCopy = new _ServiceScope(); + serviceScopeCopy._key2Values.addAll(_key2Values); + serviceScopeCopy._parentCopies.addAll(_key2Values.keys); + return serviceScopeCopy; + } + + /// Runs all on-scope-exit functions in [_ServiceScope]. + Future _runScopeExitHandlers() { + _cleaningUp = true; + var errors = []; + + // We are running all on-scope-exit functions in reverse registration order. + // Even if one fails, we continue cleaning up and report then the list of + // errors (if there were any). + return Future.forEach(_registeredEntries.reversed, + (_RegisteredEntry registeredEntry) { + if (registeredEntry.key != null) { + _key2Values.remove(registeredEntry.key); + } + if (registeredEntry.scopeExitCallback != null) { + return new Future.sync(registeredEntry.scopeExitCallback) + .catchError((e, s) => errors.add(e)); + } else { + return new Future.value(); + } + }).then((_) { + _cleaningUp = true; + _destroyed = true; + if (errors.length > 0) { + throw new Exception( + 'The following errors occured while running scope exit handlers' + ': $errors'); + } + }); + } +} + +class _RegisteredEntry { + final Object key; + final Object value; + final Function scopeExitCallback; + + _RegisteredEntry(this.key, this.value, this.scopeExitCallback); +} diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index a37e4ed6..21a9de92 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -35,14 +35,14 @@ class _AbsoluteName { /// Storage API implementation providing access to buckets. class _StorageImpl implements Storage { final String project; - final storage.StorageApi _api; + final storage_api.StorageApi _api; _StorageImpl(client, this.project) - : _api = new storage.StorageApi(client); + : _api = new storage_api.StorageApi(client); Future createBucket(String bucketName, {PredefinedAcl predefinedAcl, Acl acl}) { - var bucket = new storage.Bucket()..name = bucketName; + var bucket = new storage_api.Bucket()..name = bucketName; var predefinedName = predefinedAcl != null ? predefinedAcl._name : null; if (acl != null) { bucket.acl = acl._toBucketAccessControlList(); @@ -100,7 +100,7 @@ class _StorageImpl implements Storage { .then((_) => null); } - Future _listBuckets(int pageSize, String nextPageToken) { + Future _listBuckets(int pageSize, String nextPageToken) { return _api.buckets.list( project, maxResults: pageSize, @@ -109,7 +109,7 @@ class _StorageImpl implements Storage { } class _BucketInfoImpl implements BucketInfo { - storage.Bucket _bucket; + final storage_api.Bucket _bucket; _BucketInfoImpl(this._bucket); @@ -126,7 +126,7 @@ class _BucketInfoImpl implements BucketInfo { /// Bucket API implementation providing access to objects. class _BucketImpl implements Bucket { - final storage.StorageApi _api; + final storage_api.StorageApi _api; PredefinedAcl _defaultPredefinedObjectAcl; Acl _defaultObjectAcl; final String bucketName; @@ -145,7 +145,7 @@ class _BucketImpl implements Bucket { String objectName, {int length, ObjectMetadata metadata, Acl acl, PredefinedAcl predefinedAcl, String contentType}) { - storage.Object object; + storage_api.Object object; if (metadata == null) { metadata = new _ObjectMetadata(acl: acl, contentType: contentType); } else { @@ -243,7 +243,7 @@ class _BucketImpl implements Bucket { return _api.objects.update(object, bucketName, objectName); } - Future _listObjects( + Future _listObjects( String bucketName, String prefix, String delimiter, int pageSize, String nextPageToken) { return _api.objects.list( @@ -261,7 +261,7 @@ class _BucketPageImpl implements Page { final String _nextPageToken; final List items; - _BucketPageImpl(this._storage, this._pageSize, storage.Buckets response) + _BucketPageImpl(this._storage, this._pageSize, storage_api.Buckets response) : items = new List(response.items != null ? response.items.length : 0), _nextPageToken = response.nextPageToken { for (int i = 0; i < items.length; i++) { @@ -290,7 +290,7 @@ class _ObjectPageImpl implements Page { _ObjectPageImpl( this._bucket, this._prefix, this._pageSize, - storage.Objects response) + storage_api.Objects response) : items = new List( (response.items != null ? response.items.length : 0) + (response.prefixes != null ? response.prefixes.length : 0)), @@ -335,12 +335,12 @@ class _ObjectGenerationImpl implements ObjectGeneration { } class _ObjectInfoImpl implements ObjectInfo { - final storage.Object _object; + final storage_api.Object _object; final ObjectMetadata _metadata; Uri _downloadLink; ObjectGeneration _generation; - _ObjectInfoImpl(storage.Object object) : + _ObjectInfoImpl(storage_api.Object object) : _object = object, _metadata = new _ObjectMetadata._(object); String get name => _object.name; @@ -379,7 +379,7 @@ class _ObjectInfoImpl implements ObjectInfo { } class _ObjectMetadata implements ObjectMetadata { - final storage.Object _object; + final storage_api.Object _object; Acl _cachedAcl; ObjectGeneration _cachedGeneration; Map _cachedCustom; @@ -391,7 +391,7 @@ class _ObjectMetadata implements ObjectMetadata { String contentDisposition, String contentLanguage, Map custom}) - : _object = new storage.Object() { + : _object = new storage_api.Object() { _object.acl = acl != null ? acl._toObjectAccessControlList() : null; _object.contentType = contentType; _object.contentEncoding = contentEncoding; @@ -462,10 +462,10 @@ class _ObjectMetadata implements ObjectMetadata { /// media upload (multipart mime) or resumable media upload. class _MediaUploadStreamSink implements StreamSink> { static const int _DEFAULT_MAX_NORMAL_UPLOAD_LENGTH = 1024 * 1024; - final storage.StorageApi _api; + final storage_api.StorageApi _api; final String _bucketName; final String _objectName; - final storage.Object _object; + final storage_api.Object _object; final String _predefinedAcl; final int _length; final int _maxNormalUploadLength; diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index 9fd1a344..4b744704 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -54,14 +54,38 @@ import 'dart:collection' show UnmodifiableListView, UnmodifiableMapView; import 'package:http/http.dart' as http; import 'package:crypto/crypto.dart' as crypto; -import 'package:googleapis/storage/v1.dart' as storage; +import 'package:googleapis/storage/v1.dart' as storage_api; import 'package:googleapis/common/common.dart' as common; +import 'service_scope.dart' as ss; + import 'common.dart'; export 'common.dart'; part 'src/storage_impl.dart'; +const Symbol _storageKey = #_gcloud.storage; + +/// Access the [Storage] object available in the current service scope. +/// +/// The returned object will be the one which was previously registered with +/// [registerStorageService] within the current (or a parent) service scope. +/// +/// Accessing this getter outside of a service scope will result in an error. +Storage get storageService => ss.lookup(_storageKey); + +/// Registers the [storage] object within the current service scope. +/// +/// The provided `storage` object will be avilable via the top-level `storage` +/// getter. +/// +/// Calling this function outside of a service scope will result in an error. +/// Calling this function more than once inside the same service scope is not +/// allowed. +void registerStorageService(Storage storage) { + ss.register(_storageKey, storage); +} + int _jenkinsHash(List e) { const _HASH_MASK = 0x3fffffff; int hash = 0; @@ -93,7 +117,7 @@ class Acl { /// Create a new ACL with a list of ACL entries. Acl(Iterable entries) : _entries = new List.from(entries); - Acl._fromBucketAcl(storage.Bucket bucket) + Acl._fromBucketAcl(storage_api.Bucket bucket) : _entries = new List(bucket.acl == null ? 0 : bucket.acl.length) { if (bucket.acl != null) { for (int i = 0; i < bucket.acl.length; i++) { @@ -103,7 +127,7 @@ class Acl { } } - Acl._fromObjectAcl(storage.Object object) + Acl._fromObjectAcl(storage_api.Object object) : _entries = new List(object.acl == null ? 0 : object.acl.length) { if (object.acl != null) { for (int i = 0; i < object.acl.length; i++) { @@ -149,11 +173,11 @@ class Acl { "Server returned a unsupported permission role '$role'"); } - List _toBucketAccessControlList() { + List _toBucketAccessControlList() { return _entries.map((entry) => entry._toBucketAccessControl()).toList(); } - List _toObjectAccessControlList() { + List _toObjectAccessControlList() { return _entries.map((entry) => entry._toObjectAccessControl()).toList(); } @@ -190,15 +214,15 @@ class AclEntry { AclEntry(this.scope, this.permission); - storage.BucketAccessControl _toBucketAccessControl() { - var acl = new storage.BucketAccessControl(); + storage_api.BucketAccessControl _toBucketAccessControl() { + var acl = new storage_api.BucketAccessControl(); acl.entity = scope._storageEntity; acl.role = permission._storageBucketRole; return acl; } - storage.ObjectAccessControl _toObjectAccessControl() { - var acl = new storage.ObjectAccessControl(); + storage_api.ObjectAccessControl _toObjectAccessControl() { + var acl = new storage_api.ObjectAccessControl(); acl.entity = scope._storageEntity; acl.role = permission._storageObjectRole; return acl; @@ -472,7 +496,7 @@ abstract class BucketInfo { abstract class Storage { /// List of required OAuth2 scopes for Cloud Storage operation. static const List SCOPES = - const [storage.StorageApi.DevstorageFullControlScope]; + const [storage_api.StorageApi.DevstorageFullControlScope]; /// Initializes access to cloud storage. factory Storage(http.Client client, String project) = _StorageImpl; diff --git a/pkgs/gcloud/test/service_scope_test.dart b/pkgs/gcloud/test/service_scope_test.dart new file mode 100644 index 00000000..afaa3be3 --- /dev/null +++ b/pkgs/gcloud/test/service_scope_test.dart @@ -0,0 +1,221 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +library gcloud.test.service_scope_test; + +import 'dart:async'; + +import 'package:gcloud/service_scope.dart' as ss; +import 'package:unittest/unittest.dart'; + +main() { + test('no-service-scope', () { + expect(() => ss.register(1, 'foobar'), throwsA(isStateError)); + expect(() => ss.registerScopeExitCallback(() {}), throwsA(isStateError)); + expect(() => ss.lookup(1), throwsA(isStateError)); + + var c = new Completer.sync(); + ss.fork(expectAsync(() { + c.complete(); + return new Future.value(); + })); + + // Assert that after fork()ing we still don't have a service scope outside + // of the zone created by the fork()ing. + c.future.then(expectAsync((_) { + expect(() => ss.register(1, 'foobar'), throwsA(isStateError)); + expect(() => ss.registerScopeExitCallback(() {}), throwsA(isStateError)); + expect(() => ss.lookup(1), throwsA(isStateError)); + })); + }); + + test('non-existent-key', () { + return ss.fork(expectAsync(() { + expect(ss.lookup(1), isNull); + return new Future.value(); + })); + }); + + test('fork-callback-returns-non-future', () { + // The closure passed to fork() must return a future. + expect(() => ss.fork(expectAsync(() => null)), + throwsA(isArgumentError)); + }); + + test('error-on-double-insert', () { + // Ensure that inserting twice with the same key results in an error. + return ss.fork(expectAsync(() => new Future.sync(() { + ss.register(1, 'firstValue'); + expect(() => ss.register(1, 'firstValue'), throwsA(isArgumentError)); + }))); + }); + + test('only-cleanup', () { + return ss.fork(expectAsync(() => new Future.sync(() { + ss.registerScopeExitCallback(expectAsync(() {})); + }))); + }); + + test('correct-insertion-and-cleanup-order', () { + // Ensure cleanup functions are called in the reverse order of inserting + // their entries. + int insertions = 0; + return ss.fork(expectAsync(() => new Future.value(() { + int NUM = 10; + + for (int i = 0; i < NUM; i++) { + var key = i; + + insertions++; + ss.register(key, 'value$i'); + ss.registerScopeExitCallback(expectAsync(() { + expect(insertions, equals(i + 1)); + insertions--; + })); + + for (int j = 0; j <= NUM; j++) { + if (j <= i) { + expect(ss.lookup(key), 'value$i'); + } else { + expect(ss.lookup(key), isNull); + } + } + } + }))); + }); + + test('onion-cleanup', () { + // Ensures that a cleanup method can look up things registered before it. + return ss.fork(expectAsync(() { + ss.registerScopeExitCallback(expectAsync(() { + expect(ss.lookup(1), isNull); + expect(ss.lookup(2), isNull); + })); + ss.register(1, 'value1'); + ss.registerScopeExitCallback(expectAsync(() { + expect(ss.lookup(1), equals('value1')); + expect(ss.lookup(2), isNull); + })); + ss.register(2, 'value2', onScopeExit: expectAsync(() { + expect(ss.lookup(1), equals('value1')); + expect(ss.lookup(2), isNull); + })); + ss.registerScopeExitCallback(expectAsync(() { + expect(ss.lookup(1), 'value1'); + expect(ss.lookup(2), 'value2'); + })); + return new Future.value(); + })); + }); + + test('correct-insertion-and-cleanup-order--errors', () { + // Ensure that all cleanup functions will be called - even if some of them + // result in an error. + // Ensure the fork() error message contains all error messages from the + // failed cleanup() calls. + int insertions = 0; + return ss.fork(() => new Future.sync(() { + for (int i = 0; i < 10; i++) { + insertions++; + ss.register(i, 'value$i'); + ss.registerScopeExitCallback(() { + expect(insertions, equals(i + 1)); + insertions--; + if (i.isEven) throw 'xx${i}yy'; + }); + } + })).catchError(expectAsync((e, _) { + for (int i = 0; i < 10; i++) { + expect('$e'.contains('xx${i}yy'), equals(i.isEven)); + } + })); + }); + + test('service-scope-destroyed-after-callback-completes', () { + // Ensure that once the closure passed to fork() completes, the service + // scope is destroyed. + return ss.fork(expectAsync(() => new Future.sync(() { + var key = 1; + ss.register(key, 'firstValue'); + ss.registerScopeExitCallback(Zone.current.bindCallback(() { + // Spawn an async task which will be run after the cleanups to ensure + // the service scope got destroyed. + Timer.run(expectAsync(() { + expect(() => ss.lookup(key), throwsA(isStateError)); + expect(() => ss.register(2, 'value'), throwsA(isStateError)); + expect(() => ss.registerScopeExitCallback(() {}), + throwsA(isStateError)); + })); + })); + expect(ss.lookup(key), equals('firstValue')); + }))); + }); + + test('override-parent-value', () { + // Ensure that once the closure passed to fork() completes, the service + // scope is destroyed. + return ss.fork(expectAsync(() => new Future.sync(() { + var key = 1; + ss.register(key, 'firstValue'); + expect(ss.lookup(key), equals('firstValue')); + + return ss.fork(expectAsync(() => new Future.sync(() { + ss.register(key, 'secondValue'); + expect(ss.lookup(key), equals('secondValue')); + }))); + }))); + }); + + test('fork-onError-handler', () { + // Ensure that once the closure passed to fork() completes, the service + // scope is destroyed. + ss.fork(expectAsync(() { + Timer.run(() => throw new StateError('foobar')); + return new Future.value(); + }), onError: expectAsync((error, _) { + expect(error, isStateError); + })); + }); + + test('nested-fork-and-insert', () { + // Ensure that independently fork()ed serice scopes can insert keys + // independently and they cannot see each others values but can see parent + // service scope values. + var rootKey = 1; + var subKey = 2; + var subKey1 = 3; + var subKey2 = 4; + + return ss.fork(expectAsync(() { + int cleanupFork1 = 0; + int cleanupFork2 = 0; + + ss.register(rootKey, 'root'); + ss.registerScopeExitCallback(expectAsync(() { + expect(cleanupFork1, equals(2)); + expect(cleanupFork2, equals(2)); + })); + expect(ss.lookup(rootKey), equals('root')); + + Future spawnChild(ownSubKey, otherSubKey, int i, cleanup) { + return ss.fork(expectAsync(() => new Future.sync(() { + ss.register(subKey, 'fork$i'); + ss.registerScopeExitCallback(cleanup); + ss.register(ownSubKey, 'sub$i'); + ss.registerScopeExitCallback(cleanup); + + expect(ss.lookup(rootKey), equals('root')); + expect(ss.lookup(subKey), equals('fork$i')); + expect(ss.lookup(ownSubKey), equals('sub$i')); + expect(ss.lookup(otherSubKey), isNull); + }))); + } + + return Future.wait([ + spawnChild(subKey1, subKey2, 1, () => cleanupFork1++), + spawnChild(subKey2, subKey1, 2, () => cleanupFork2++), + ]); + })); + }); +} From 90a3029b1866a7c6372b82e45a291d7ae07440f7 Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Fri, 21 Nov 2014 16:44:50 +0100 Subject: [PATCH 043/239] Disable datastore/storage e2e tests inside build/ R=sgjesse@google.com Review URL: https://codereview.chromium.org//747893003 --- pkgs/gcloud/.status | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pkgs/gcloud/.status b/pkgs/gcloud/.status index 89f3c69c..13d431e4 100644 --- a/pkgs/gcloud/.status +++ b/pkgs/gcloud/.status @@ -2,11 +2,15 @@ */*/packages/*: Skip */*/*/packages/*: Skip +# We do not run the e2e tests inside the build/ directory in order to prevent +# ./tools/test.py from running several e2e in parallel. +build/test/db_all_e2e_test: Skip +build/test/storage/e2e_test: Skip + # This test is slow because # - eventual consistency forces us to put in sleep()s # - it does e2e testing # - it combines several tests to avoid concurrent tests touching the same data -build/test/db_all_e2e_test: Slow, Pass test/db_all_e2e_test: Slow, Pass [ $browser ] From cd333c47440558bd4978d6da7d20c21b472dd2c6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Mon, 24 Nov 2014 09:48:53 +0100 Subject: [PATCH 044/239] Add documentation on supported filter strings R=kustermann@google.com BUG= Review URL: https://codereview.chromium.org//757553002 --- pkgs/gcloud/lib/src/db/db.dart | 9 ++++++++- pkgs/gcloud/pubspec.yaml | 2 +- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/lib/src/db/db.dart b/pkgs/gcloud/lib/src/db/db.dart index fababe09..07c5efb7 100644 --- a/pkgs/gcloud/lib/src/db/db.dart +++ b/pkgs/gcloud/lib/src/db/db.dart @@ -136,7 +136,14 @@ class Query { * Adds a filter to this [Query]. * * [filterString] has form "name OP" where 'name' is a fieldName of the - * model and OP is an operator (e.g. "name >="). + * model and OP is an operator. The following operators are supported: + * + * * '<' (less than) + * * '<=' (less than or equal) + * * '>' (greater than) + * * '>=' (greater than or equal) + * * '=' (equal) + * * 'IN' (in - `comparisonObject` must be a list) * * [comparisonObject] is the object for comparison. */ diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 1f454fc3..17f18db0 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.1.1 +version: 0.1.2-dev author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud From 4f1b55f027906b51d097032ed9fa8257049a9456 Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Fri, 28 Nov 2014 14:46:32 +0100 Subject: [PATCH 045/239] Prepare pubspec/CHANGELOG for version 0.1.2 R=sgjesse@google.com, wibling@google.com Review URL: https://codereview.chromium.org//766663007 --- pkgs/gcloud/CHANGELOG.md | 7 +++++++ pkgs/gcloud/lib/datastore.dart | 1 + pkgs/gcloud/lib/db.dart | 1 + pkgs/gcloud/lib/http.dart | 1 + pkgs/gcloud/lib/service_scope.dart | 9 +++++++++ pkgs/gcloud/lib/storage.dart | 1 + pkgs/gcloud/pubspec.yaml | 2 +- 7 files changed, 21 insertions(+), 1 deletion(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 4a0c1c77..f7bd4092 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,10 @@ +## 0.1.2 + +* Introduced `package:gcloud/service_scope.dart` library. +* Added global getters for getting gcloud services from the current service +scope. +* Added an `package:gcloud/http.dart` library using service scopes. + ## 0.1.1 * Increased version constraint on googleapis{,_auth,_beta}. diff --git a/pkgs/gcloud/lib/datastore.dart b/pkgs/gcloud/lib/datastore.dart index 3b54d747..3f0dcf9c 100644 --- a/pkgs/gcloud/lib/datastore.dart +++ b/pkgs/gcloud/lib/datastore.dart @@ -22,6 +22,7 @@ const Symbol _datastoreKey = #_gcloud.datastore; /// [registerDatastoreService] within the current (or a parent) service scope. /// /// Accessing this getter outside of a service scope will result in an error. +/// See the `package:gcloud/service_scope.dart` library for more information. Datastore get datastoreService => ss.lookup(_datastoreKey); /// Registers the [Datastore] object within the current service scope. diff --git a/pkgs/gcloud/lib/db.dart b/pkgs/gcloud/lib/db.dart index cb3610c0..7c5feb2e 100644 --- a/pkgs/gcloud/lib/db.dart +++ b/pkgs/gcloud/lib/db.dart @@ -26,6 +26,7 @@ const Symbol _dbKey = #_gcloud.db; /// [registerDbService] within the current (or a parent) service scope. /// /// Accessing this getter outside of a service scope will result in an error. +/// See the `package:gcloud/service_scope.dart` library for more information. DatastoreDB get dbService => ss.lookup(_dbKey); /// Registers the [DatastoreDB] object within the current service scope. diff --git a/pkgs/gcloud/lib/http.dart b/pkgs/gcloud/lib/http.dart index dd33ac8b..b6f0c6f5 100644 --- a/pkgs/gcloud/lib/http.dart +++ b/pkgs/gcloud/lib/http.dart @@ -19,6 +19,7 @@ const Symbol _authenticatedClientKey = #_gcloud.http; /// scope. /// /// Accessing this getter outside of a service scope will result in an error. +/// See the `package:gcloud/service_scope.dart` library for more information. http.Client get authClientService => ss.lookup(_authenticatedClientKey); /// Registers the [http.Client] object within the current service scope. diff --git a/pkgs/gcloud/lib/service_scope.dart b/pkgs/gcloud/lib/service_scope.dart index 986af1d3..856365c7 100644 --- a/pkgs/gcloud/lib/service_scope.dart +++ b/pkgs/gcloud/lib/service_scope.dart @@ -63,6 +63,15 @@ /// print('Server application shut down cleanly'); /// }); /// } +/// +/// As an example, the `package:appengine/appengine.dart` package runs request +/// handlers inside a service scope, which has most `package:gcloud` services +/// registered. +/// +/// The core application code can then be independent of `package:appengine` +/// and instead depend only on the services needed (e.g. +/// `package:gcloud/storage.dart`) by using getters in the service library (e.g. +/// the `storageService`) which are implemented with service scope lookups. library gcloud.service_scope; import 'dart:async'; diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index 4b744704..35f7a85d 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -72,6 +72,7 @@ const Symbol _storageKey = #_gcloud.storage; /// [registerStorageService] within the current (or a parent) service scope. /// /// Accessing this getter outside of a service scope will result in an error. +/// See the `package:gcloud/service_scope.dart` library for more information. Storage get storageService => ss.lookup(_storageKey); /// Registers the [storage] object within the current service scope. diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 17f18db0..e7a64d45 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.1.2-dev +version: 0.1.2 author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud From daee10bbab6a7d26cad3126e4213bedcbaf8d1ed Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Wed, 3 Dec 2014 16:36:38 +0100 Subject: [PATCH 046/239] Widen package:googleapis constraint in pubspec.yaml R=wibling@google.com Review URL: https://codereview.chromium.org//757833003 --- pkgs/gcloud/CHANGELOG.md | 4 ++++ pkgs/gcloud/pubspec.yaml | 4 ++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index f7bd4092..52b1cccb 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.1.3 + +* Widen package:googleapis dependency constraint in pubspec.yaml. + ## 0.1.2 * Introduced `package:gcloud/service_scope.dart` library. diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index e7a64d45..6fe282bf 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.1.2 +version: 0.1.3 author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud @@ -7,7 +7,7 @@ environment: sdk: '>=1.5.0 <2.0.0' dependencies: crypto: '>=0.9.0 <0.10.0' - googleapis: '>=0.2.0 <0.5.0' + googleapis: '>=0.2.0 <0.6.0' googleapis_beta: '>=0.3.0 <0.6.0' http: '>=0.11.0 <0.12.0' dev_dependencies: From db437e24534a05985cf2dd2a45264e57fa9e2653 Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Wed, 3 Dec 2014 16:38:31 +0100 Subject: [PATCH 047/239] [datastore] Bugfix in ListProperty(), correctly handle list values of length 1 BUG=https://github.com/dart-lang/gcloud/issues/17 R=sgjesse@google.com Review URL: https://codereview.chromium.org//774213002 --- pkgs/gcloud/CHANGELOG.md | 2 + pkgs/gcloud/lib/src/db/annotations.dart | 4 +- pkgs/gcloud/test/db/properties_test.dart | 53 ++++++++++++++++++++++++ 3 files changed, 57 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 52b1cccb..9cd75d97 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,6 +1,8 @@ ## 0.1.3 * Widen package:googleapis dependency constraint in pubspec.yaml. +* Bugfix in `package:appengine/db.dart`: Correctly handle ListProperties +of length 1. ## 0.1.2 diff --git a/pkgs/gcloud/lib/src/db/annotations.dart b/pkgs/gcloud/lib/src/db/annotations.dart index b1f0f6b5..e158ad53 100644 --- a/pkgs/gcloud/lib/src/db/annotations.dart +++ b/pkgs/gcloud/lib/src/db/annotations.dart @@ -258,14 +258,14 @@ class ListProperty extends Property { if (value == null) return null; List list = value; if (list.length == 0) return null; - if (list.length == 1) return list[0]; + if (list.length == 1) return subProperty.encodeValue(db, list[0]); return list.map( (value) => subProperty.encodeValue(db, value)).toList(); } Object decodePrimitiveValue(ModelDB db, Object value) { if (value == null) return []; - if (value is! List) return [value]; + if (value is! List) return [subProperty.decodePrimitiveValue(db, value)]; return (value as List) .map((entry) => subProperty.decodePrimitiveValue(db, entry)) .toList(); diff --git a/pkgs/gcloud/test/db/properties_test.dart b/pkgs/gcloud/test/db/properties_test.dart index f20c2509..d0ae074f 100644 --- a/pkgs/gcloud/test/db/properties_test.dart +++ b/pkgs/gcloud/test/db/properties_test.dart @@ -111,6 +111,7 @@ main() { expect(prop.validate(null, null), isFalse); expect(prop.validate(null, []), isTrue); + expect(prop.validate(null, [true]), isTrue); expect(prop.validate(null, [true, false]), isTrue); expect(prop.validate(null, [true, false, 1]), isFalse); expect(prop.encodeValue(null, []), equals(null)); @@ -123,6 +124,28 @@ main() { equals([true, false])); }); + test('composed_list_property', () { + var prop = const ListProperty(const CustomProperty()); + + var c1 = new Custom()..customValue = 'c1'; + var c2 = new Custom()..customValue = 'c2'; + + expect(prop.validate(null, null), isFalse); + expect(prop.validate(null, []), isTrue); + expect(prop.validate(null, [c1]), isTrue); + expect(prop.validate(null, [c1, c2]), isTrue); + expect(prop.validate(null, [c1, c2, 1]), isFalse); + expect(prop.encodeValue(null, []), equals(null)); + expect(prop.encodeValue(null, [c1]), equals(c1.customValue)); + expect(prop.encodeValue(null, [c1, c2]), + equals([c1.customValue, c2.customValue])); + expect(prop.decodePrimitiveValue(null, null), equals([])); + expect(prop.decodePrimitiveValue(null, []), equals([])); + expect(prop.decodePrimitiveValue(null, c1.customValue), equals([c1])); + expect(prop.decodePrimitiveValue(null, [c1.customValue, c2.customValue]), + equals([c1, c2])); + }); + test('modelkey_property', () { var datastoreKey = new datastore.Key( [new datastore.KeyElement('MyKind', 42)], @@ -146,6 +169,36 @@ main() { }); } +class Custom { + String customValue; + + int get hashCode => customValue.hashCode; + + bool operator==(other) { + return other is Custom && other.customValue == customValue; + } +} + +class CustomProperty extends StringProperty { + const CustomProperty( + {String propertyName: null, bool required: false, bool indexed: true}); + + bool validate(ModelDB db, Object value) { + if (required && value == null) return false; + return value == null || value is Custom; + } + + Object decodePrimitiveValue(ModelDB db, Object value) { + if (value == null) return null; + return new Custom()..customValue = value; + } + + Object encodeValue(ModelDB db, Object value) { + if (value == null) return null; + return (value as Custom).customValue; + } +} + class KeyMock implements Key { datastore.Key _datastoreKey; From ed8bd0e864dfbbdf1007253ce5f87ff50cb0f6a0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Thu, 4 Dec 2014 17:31:00 +0100 Subject: [PATCH 048/239] Change the service scope keys keys to non-private symbols dart2js does not currently support private symbols, so generating dartdoc fials with private symbols. R=kustermann@google.com BUG= Review URL: https://codereview.chromium.org//782553002 --- pkgs/gcloud/CHANGELOG.md | 4 ++++ pkgs/gcloud/lib/datastore.dart | 2 +- pkgs/gcloud/lib/db.dart | 2 +- pkgs/gcloud/lib/http.dart | 2 +- pkgs/gcloud/lib/service_scope.dart | 2 +- pkgs/gcloud/lib/storage.dart | 2 +- pkgs/gcloud/pubspec.yaml | 2 +- 7 files changed, 10 insertions(+), 6 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 9cd75d97..d90dff00 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.1.3+1 + +* Change the service scope keys keys to non-private symbols. + ## 0.1.3 * Widen package:googleapis dependency constraint in pubspec.yaml. diff --git a/pkgs/gcloud/lib/datastore.dart b/pkgs/gcloud/lib/datastore.dart index 3f0dcf9c..a65ec47a 100644 --- a/pkgs/gcloud/lib/datastore.dart +++ b/pkgs/gcloud/lib/datastore.dart @@ -14,7 +14,7 @@ import 'dart:async'; import 'common.dart' show Page; import 'service_scope.dart' as ss; -const Symbol _datastoreKey = #_gcloud.datastore; +const Symbol _datastoreKey = #gcloud.datastore; /// Access the [Datastore] object available in the current service scope. /// diff --git a/pkgs/gcloud/lib/db.dart b/pkgs/gcloud/lib/db.dart index 7c5feb2e..fc95ef1e 100644 --- a/pkgs/gcloud/lib/db.dart +++ b/pkgs/gcloud/lib/db.dart @@ -18,7 +18,7 @@ part 'src/db/models.dart'; part 'src/db/model_db.dart'; part 'src/db/model_db_impl.dart'; -const Symbol _dbKey = #_gcloud.db; +const Symbol _dbKey = #gcloud.db; /// Access the [DatastoreDB] object available in the current service scope. /// diff --git a/pkgs/gcloud/lib/http.dart b/pkgs/gcloud/lib/http.dart index b6f0c6f5..07950933 100644 --- a/pkgs/gcloud/lib/http.dart +++ b/pkgs/gcloud/lib/http.dart @@ -10,7 +10,7 @@ import 'package:http/http.dart' as http; import 'service_scope.dart' as ss; -const Symbol _authenticatedClientKey = #_gcloud.http; +const Symbol _authenticatedClientKey = #gcloud.http; /// Access the [http.Client] object available in the current service scope. /// diff --git a/pkgs/gcloud/lib/service_scope.dart b/pkgs/gcloud/lib/service_scope.dart index 856365c7..8d4641ee 100644 --- a/pkgs/gcloud/lib/service_scope.dart +++ b/pkgs/gcloud/lib/service_scope.dart @@ -77,7 +77,7 @@ library gcloud.service_scope; import 'dart:async'; /// The Symbol used as index in the zone map for the service scope object. -const Symbol _ServiceScopeKey = #_gcloud.service_scope; +const Symbol _ServiceScopeKey = #gcloud.service_scope; /// An empty service scope. /// diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index 35f7a85d..f7e0d855 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -64,7 +64,7 @@ export 'common.dart'; part 'src/storage_impl.dart'; -const Symbol _storageKey = #_gcloud.storage; +const Symbol _storageKey = #gcloud.storage; /// Access the [Storage] object available in the current service scope. /// diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 6fe282bf..82e2d6c0 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.1.3 +version: 0.1.3+1 author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud From 3a87199f095cd8727720b235072c1c320094b95a Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Fri, 19 Dec 2014 08:55:28 +0100 Subject: [PATCH 049/239] First attempt to deflake package waterfall builders R=sgjesse@google.com, wibling@google.com Review URL: https://codereview.chromium.org//813033002 --- .../datastore/e2e/datastore_test_impl.dart | 81 +++++++++---------- pkgs/gcloud/test/datastore/e2e/utils.dart | 23 +++--- pkgs/gcloud/test/db/e2e/db_test_impl.dart | 45 +++++------ pkgs/gcloud/test/db_all_e2e_test.dart | 28 ++++--- 4 files changed, 89 insertions(+), 88 deletions(-) diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index 010eb4c6..3ad1795f 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -49,7 +49,9 @@ Future> consumePages(FirstPageProvider provider) { return new StreamFromPages(provider).stream.toList(); } -runTests(Datastore datastore) { +runTests(Datastore datastore, String namespace) { + Partition partition = new Partition(namespace); + Future withTransaction(Function f, {bool xg: false}) { return datastore.beginTransaction(crossEntityGroup: xg).then(f); } @@ -203,11 +205,12 @@ runTests(Datastore datastore) { return test(null); } - var unnamedEntities1 = buildEntities(42, 43); - var unnamedEntities5 = buildEntities(1, 6); - var unnamedEntities20 = buildEntities(6, 26); + var unnamedEntities1 = buildEntities(42, 43, partition: partition); + var unnamedEntities5 = buildEntities(1, 6, partition: partition); + var unnamedEntities20 = buildEntities(6, 26, partition: partition); var named20000 = buildEntities( - 1000, 21001, idFunction: (i) => 'named_${i}_of_10000'); + 1000, 21001, idFunction: (i) => 'named_${i}_of_10000', + partition: partition); test('insert', () { return testInsert(unnamedEntities5, transactional: false).then((keys) { @@ -285,7 +288,7 @@ runTests(Datastore datastore) { } } - var keys = buildKeys(1, 4); + var keys = buildKeys(1, 4, partition: partition); return datastore.allocateIds(keys).then((List completedKeys) { compareResult(keys, completedKeys); // TODO: Make sure we can insert these keys @@ -337,10 +340,11 @@ runTests(Datastore datastore) { return test(null); } - var unnamedEntities1 = buildEntities(42, 43); - var unnamedEntities5 = buildEntities(1, 6); - var unnamedEntities20 = buildEntities(6, 26); - var entitiesWithAllPropertyTypes = buildEntityWithAllProperties(1, 6); + var unnamedEntities1 = buildEntities(42, 43, partition: partition); + var unnamedEntities5 = buildEntities(1, 6, partition: partition); + var unnamedEntities20 = buildEntities(6, 26, partition: partition); + var entitiesWithAllPropertyTypes = + buildEntityWithAllProperties(1, 6, partition: partition); test('lookup', () { return insert([], unnamedEntities20, transactional: false).then((keys) { @@ -399,9 +403,9 @@ runTests(Datastore datastore) { return test(null); } - var unnamedEntities1 = buildEntities(42, 43); - var unnamedEntities5 = buildEntities(1, 6); - var unnamedEntities99 = buildEntities(6, 106); + var unnamedEntities1 = buildEntities(42, 43, partition: partition); + var unnamedEntities5 = buildEntities(1, 6, partition: partition); + var unnamedEntities99 = buildEntities(6, 106, partition: partition); test('delete', () { return insert([], unnamedEntities99, transactional: false).then((keys) { @@ -462,8 +466,10 @@ runTests(Datastore datastore) { }, xg: xg); } - var namedEntities1 = buildEntities(42, 43, idFunction: (i) => "i$i"); - var namedEntities5 = buildEntities(1, 6, idFunction: (i) => "i$i"); + var namedEntities1 = + buildEntities(42, 43, idFunction: (i) => "i$i", partition: partition); + var namedEntities5 = + buildEntities(1, 6, idFunction: (i) => "i$i", partition: partition); var namedEntities1Keys = namedEntities1.map((e) => e.key).toList(); var namedEntities5Keys = namedEntities5.map((e) => e.key).toList(); @@ -494,9 +500,12 @@ runTests(Datastore datastore) { } } - var namedEntities1 = buildEntities(42, 43, idFunction: (i) => "i$i"); - var namedEntities5 = buildEntities(1, 6, idFunction: (i) => "i$i"); - var namedEntities20 = buildEntities(6, 26, idFunction: (i) => "i$i"); + var namedEntities1 = + buildEntities(42, 43, idFunction: (i) => "i$i", partition: partition); + var namedEntities5 = + buildEntities(1, 6, idFunction: (i) => "i$i", partition: partition); + var namedEntities20 = + buildEntities(6, 26, idFunction: (i) => "i$i", partition: partition); var namedEntities1Keys = namedEntities1.map((e) => e.key).toList(); var namedEntities5Keys = namedEntities5.map((e) => e.key).toList(); @@ -574,8 +583,10 @@ runTests(Datastore datastore) { }); } - var namedEntities1 = buildEntities(42, 43, idFunction: (i) => "i$i"); - var namedEntities5 = buildEntities(1, 6, idFunction: (i) => "i$i"); + var namedEntities1 = + buildEntities(42, 43, idFunction: (i) => "i$i", partition: partition); + var namedEntities5 = + buildEntities(1, 6, idFunction: (i) => "i$i", partition: partition); test('conflicting_transaction', () { expect(testConflictingTransaction(namedEntities1), @@ -685,7 +696,8 @@ runTests(Datastore datastore) { const TEST_QUERY_KIND = 'TestQueryKind'; var stringNamedEntities = buildEntities( - 1, 6, idFunction: (i) => 'str$i', kind: TEST_QUERY_KIND); + 1, 6, idFunction: (i) => 'str$i', kind: TEST_QUERY_KIND, + partition: partition); var stringNamedKeys = stringNamedEntities.map((e) => e.key).toList(); var QUERY_KEY = TEST_PROPERTY_KEY_PREFIX; @@ -1009,18 +1021,7 @@ runTests(Datastore datastore) { }); } -Future cleanupDB(Datastore db) { - Future> getNamespaces() { - var q = new Query(kind: '__namespace__'); - return consumePages((_) => db.query(q)).then((List entities) { - return entities.map((Entity e) { - var id = e.key.elements.last.id; - if (id == 1) return null; - return id; - }).toList(); - }); - } - +Future cleanupDB(Datastore db, String namespace) { Future> getKinds(String namespace) { var partition = new Partition(namespace); var q = new Query(kind: '__kind__'); @@ -1047,13 +1048,9 @@ Future cleanupDB(Datastore db) { }); } - return getNamespaces().then((List namespaces) { - return Future.forEach(namespaces, (String namespace) { - return getKinds(namespace).then((List kinds) { - return Future.forEach(kinds, (String kind) { - return cleanup(namespace, kind); - }); - }); + return getKinds(namespace).then((List kinds) { + return Future.forEach(kinds, (String kind) { + return cleanup(namespace, kind); }); }); } @@ -1100,8 +1097,8 @@ main() { withAuthClient(scopes, (String project, httpClient) { var datastore = new datastore_impl.DatastoreImpl(httpClient, 's~$project'); - return cleanupDB(datastore).then((_) { - return runE2EUnittest(() => runTests(datastore)); + return cleanupDB(datastore, null).then((_) { + return runE2EUnittest(() => runTests(datastore, null)); }); }); } diff --git a/pkgs/gcloud/test/datastore/e2e/utils.dart b/pkgs/gcloud/test/datastore/e2e/utils.dart index 32fc4911..2a1ea1c1 100644 --- a/pkgs/gcloud/test/datastore/e2e/utils.dart +++ b/pkgs/gcloud/test/datastore/e2e/utils.dart @@ -18,10 +18,9 @@ const TEST_UNINDEXED_PROPERTY = 'unindexedProp'; const TEST_BLOB_INDEXED_PROPERTY = 'blobPropertyIndexed'; final TEST_BLOB_INDEXED_VALUE = new BlobValue([0xaa, 0xaa, 0xff, 0xff]); - -buildKey(int i, {Function idFunction, String kind : TEST_KIND}) { - return new Key( - [new KeyElement(kind, idFunction == null ? null : idFunction(i))]); +buildKey(int i, {Function idFunction, String kind : TEST_KIND, Partition p}) { + var path = [new KeyElement(kind, idFunction == null ? null : idFunction(i))]; + return new Key(path, partition: p); } Map buildProperties(int i) { @@ -40,20 +39,22 @@ Map buildProperties(int i) { } List buildKeys( - int from, int to, {Function idFunction, String kind : TEST_KIND}) { + int from, int to, {Function idFunction, String kind : TEST_KIND, + Partition partition}) { var keys = []; for (var i = from; i < to; i++) { - keys.add(buildKey(i, idFunction: idFunction, kind: kind)); + keys.add(buildKey(i, idFunction: idFunction, kind: kind, p: partition)); } return keys; } List buildEntities( - int from, int to, {Function idFunction, String kind : TEST_KIND}) { + int from, int to, {Function idFunction, String kind : TEST_KIND, + Partition partition}) { var entities = []; var unIndexedProperties = new Set(); for (var i = from; i < to; i++) { - var key = buildKey(i, idFunction: idFunction, kind: kind); + var key = buildKey(i, idFunction: idFunction, kind: kind, p: partition); var properties = buildProperties(i); unIndexedProperties.add(TEST_UNINDEXED_PROPERTY); entities.add( @@ -63,7 +64,7 @@ List buildEntities( } List buildEntityWithAllProperties( - int from, int to, {String kind : TEST_KIND}) { + int from, int to, {String kind : TEST_KIND, Partition partition}) { var us42 = const Duration(microseconds: 42); var unIndexed = new Set.from(['blobProperty']); @@ -89,7 +90,9 @@ List buildEntityWithAllProperties( var entities = []; for (var i = from; i < to; i++) { - var key = buildKey(i, idFunction: (i) => 'allprop$i', kind: kind); + var key = buildKey( + i, idFunction: (i) => 'allprop$i', kind: kind, p: partition); + var unIndexedCopy = new Set.from(unIndexed); var properties = buildProperties(i); entities.add(new Entity(key, properties, unIndexedProperties: unIndexed)); } diff --git a/pkgs/gcloud/test/db/e2e/db_test_impl.dart b/pkgs/gcloud/test/db/e2e/db_test_impl.dart index ed1663d5..eb46f9e3 100644 --- a/pkgs/gcloud/test/db/e2e/db_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/db_test_impl.dart @@ -144,7 +144,9 @@ Future sleep(Duration duration) { return completer.future; } -runTests(db.DatastoreDB store) { +runTests(db.DatastoreDB store, String namespace) { + var partition = store.newPartition(namespace); + void compareModels(List expectedModels, List models, {bool anyOrder: false}) { @@ -214,7 +216,7 @@ runTests(db.DatastoreDB store) { group('e2e_db', () { group('insert_lookup_delete', () { test('persons', () { - var root = store.emptyKey; + var root = partition.emptyKey; var persons = []; for (var i = 1; i <= 10; i++) { persons.add(new Person() @@ -227,7 +229,7 @@ runTests(db.DatastoreDB store) { return testInsertLookupDelete(persons); }); test('users', () { - var root = store.emptyKey; + var root = partition.emptyKey; var users = []; for (var i = 1; i <= 10; i++) { users.add(new User() @@ -240,7 +242,7 @@ runTests(db.DatastoreDB store) { return testInsertLookupDelete(users); }); test('expando_insert', () { - var root = store.emptyKey; + var root = partition.emptyKey; var expandoPersons = []; for (var i = 1; i <= 10; i++) { var expandoPerson = new ExpandoPerson() @@ -256,7 +258,7 @@ runTests(db.DatastoreDB store) { return testInsertLookupDelete(expandoPersons); }); test('transactional_insert', () { - var root = store.emptyKey; + var root = partition.emptyKey; var models = []; models.add(new Person() @@ -281,7 +283,7 @@ runTests(db.DatastoreDB store) { }); test('parent_key', () { - var root = store.emptyKey; + var root = partition.emptyKey; var users = []; for (var i = 333; i <= 334; i++) { users.add(new User() @@ -310,7 +312,7 @@ runTests(db.DatastoreDB store) { }); test('auto_ids', () { - var root = store.emptyKey; + var root = partition.emptyKey; var persons = []; persons.add(new Person() ..id = 42 @@ -322,21 +324,17 @@ runTests(db.DatastoreDB store) { ..parentKey = root ..age = 81 ..name = 'user81'); - // Auto id person without parentKey - persons.add(new Person() - ..age = 82 - ..name = 'user82'); // Auto id person with non-root parentKey var fatherKey = persons.first.parentKey; persons.add(new Person() ..parentKey = fatherKey - ..age = 83 - ..name = 'user83'); + ..age = 82 + ..name = 'user82'); persons.add(new Person() ..id = 43 ..parentKey = root - ..age = 84 - ..name = 'user84'); + ..age = 83 + ..name = 'user83'); return store.commit(inserts: persons).then(expectAsync((_) { // At this point, autoIds are allocated and are relfected in the // models (as well as parentKey if it was empty). @@ -357,14 +355,10 @@ runTests(db.DatastoreDB store) { expect(persons[2].id, isNotNull); expect(persons[2].id is int, isTrue); - expect(persons[2].parentKey, equals(root)); - - expect(persons[3].id, isNotNull); - expect(persons[3].id is int, isTrue); - expect(persons[3].parentKey, equals(fatherKey)); + expect(persons[2].parentKey, equals(fatherKey)); - expect(persons[4].id, equals(43)); - expect(persons[4].parentKey, equals(root)); + expect(persons[3].id, equals(43)); + expect(persons[3].parentKey, equals(root)); expect(persons[1].id != persons[2].id, isTrue); // NOTE: We can't make assumptions about the id of persons[3], @@ -389,7 +383,7 @@ runTests(db.DatastoreDB store) { }); test('query', () { - var root = store.emptyKey; + var root = partition.emptyKey; var users = []; for (var i = 1; i <= 10; i++) { var languages = []; @@ -649,8 +643,9 @@ main() { withAuthClient(scopes, (String project, httpClient) { var datastore = new datastore_impl.DatastoreImpl(httpClient, 's~$project'); - return datastore_test.cleanupDB(datastore).then((_) { - return runE2EUnittest(() => runTests(new db.DatastoreDB(datastore))); + return datastore_test.cleanupDB(datastore, null).then((_) { + return runE2EUnittest( + () => runTests(new db.DatastoreDB(datastore), null)); }); }); } diff --git a/pkgs/gcloud/test/db_all_e2e_test.dart b/pkgs/gcloud/test/db_all_e2e_test.dart index 3d231e18..d38f2d12 100644 --- a/pkgs/gcloud/test/db_all_e2e_test.dart +++ b/pkgs/gcloud/test/db_all_e2e_test.dart @@ -4,6 +4,7 @@ library gcloud.test.db_all_test; +import 'dart:io'; import 'dart:async'; import 'package:gcloud/db.dart' as db; @@ -20,26 +21,31 @@ import 'common_e2e.dart'; main() { var scopes = datastore_impl.DatastoreImpl.SCOPES; + var now = new DateTime.now().millisecondsSinceEpoch; + String namespace = '${Platform.operatingSystem}${now}'; + withAuthClient(scopes, (String project, httpClient) { var datastore = new datastore_impl.DatastoreImpl(httpClient, 's~$project'); var datastoreDB = new db.DatastoreDB(datastore); - return datastore_test.cleanupDB(datastore).then((_) { - return runE2EUnittest(() { - datastore_test.runTests(datastore); + return runE2EUnittest(() { + unittestConfiguration.timeout = const Duration(minutes: 1); - test('sleep-between-test-suites', () { - expect(new Future.delayed(const Duration(seconds: 10)), completes); - }); + datastore_test.runTests(datastore, namespace); - db_test.runTests(datastoreDB); + test('sleep-between-test-suites', () { + expect(new Future.delayed(const Duration(seconds: 10)), completes); + }); - test('sleep-between-test-suites', () { - expect(new Future.delayed(const Duration(seconds: 10)), completes); - }); + db_test.runTests(datastoreDB, namespace); - db_metamodel_test.runTests(datastore, datastoreDB); + test('sleep-between-test-suites', () { + expect(new Future.delayed(const Duration(seconds: 10)), completes); }); + + db_metamodel_test.runTests(datastore, datastoreDB); + }).whenComplete(() { + return datastore_test.cleanupDB(datastore, namespace); }); }); } From 34b559afd8df437706aa9eef8bad3a8fd1e5eed9 Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Fri, 19 Dec 2014 10:33:06 +0100 Subject: [PATCH 050/239] Second attempt to deflake package waterfall builders R=sgjesse@google.com Review URL: https://codereview.chromium.org//818673002 --- .../datastore/e2e/datastore_test_impl.dart | 61 ++++++++++++------- 1 file changed, 39 insertions(+), 22 deletions(-) diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index 3ad1795f..adeade90 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -598,7 +598,6 @@ runTests(Datastore datastore, String namespace) { throwsA(isTransactionAbortedError)); }); }); - group('query', () { Future testQuery(String kind, {List filters, @@ -611,7 +610,8 @@ runTests(Datastore datastore, String namespace) { var query = new Query( kind: kind, filters: filters, orders: orders, offset: offset, limit: limit); - return consumePages((_) => datastore.query(query)) + return consumePages( + (_) => datastore.query(query, partition: partition)) .then((List entities) { if (transaction != null) { return datastore.commit(transaction: transaction) @@ -757,7 +757,8 @@ runTests(Datastore datastore, String namespace) { test('query', () { return insert(stringNamedEntities, []).then((keys) { - return waitUntilEntitiesReady(datastore, stringNamedKeys).then((_) { + return waitUntilEntitiesReady( + datastore, stringNamedKeys, partition).then((_) { var tests = [ // EntityKind query () => testQueryAndCompare( @@ -850,7 +851,8 @@ runTests(Datastore datastore, String namespace) { () => delete(stringNamedKeys, transactional: true), // Wait until the entity deletes are reflected in the indices. - () => waitUntilEntitiesGone(datastore, stringNamedKeys), + () => waitUntilEntitiesGone( + datastore, stringNamedKeys, partition), // Make sure queries don't return results () => testQueryAndCompare( @@ -878,7 +880,8 @@ runTests(Datastore datastore, String namespace) { * + SubSubKind:1 -- This is a real entity of kind SubSubKind * + SubSubKind2:1 -- This is a real entity of kind SubSubKind2 */ - var rootKey = new Key.fromParent('RootKind', 1); + var rootKey = + new Key([new KeyElement('RootKind', 1)], partition: partition); var subKey = new Key.fromParent('SubKind', 1, parent: rootKey); var subSubKey = new Key.fromParent('SubSubKind', 1, parent: subKey); var subSubKey2 = new Key.fromParent('SubSubKind2', 1, parent: subKey); @@ -894,7 +897,8 @@ runTests(Datastore datastore, String namespace) { // FIXME/TODO: Ancestor queries should be strongly consistent. // We should not need to wait for them. () { - return waitUntilEntitiesReady(datastore, [subSubKey, subSubKey2]); + return waitUntilEntitiesReady( + datastore, [subSubKey, subSubKey2], partition); }, // Test that lookup only returns inserted entities. () { @@ -915,7 +919,8 @@ runTests(Datastore datastore, String namespace) { () { var ancestorQuery = new Query(ancestorKey: rootKey, orders: orders); - return consumePages((_) => datastore.query(ancestorQuery)) + return consumePages( + (_) => datastore.query(ancestorQuery, partition: partition)) .then((results) { expect(results.length, 2); expect(compareEntity(entity, results[0]), isTrue); @@ -926,7 +931,8 @@ runTests(Datastore datastore, String namespace) { () { var ancestorQuery = new Query(ancestorKey: subKey, orders: orders); - return consumePages((_) => datastore.query(ancestorQuery)) + return consumePages( + (_) => datastore.query(ancestorQuery, partition: partition)) .then((results) { expect(results.length, 2); expect(compareEntity(entity, results[0]), isTrue); @@ -936,7 +942,8 @@ runTests(Datastore datastore, String namespace) { // - by [subSubKey] () { var ancestorQuery = new Query(ancestorKey: subSubKey); - return consumePages((_) => datastore.query(ancestorQuery)) + return consumePages( + (_) => datastore.query(ancestorQuery, partition: partition)) .then((results) { expect(results.length, 1); expect(compareEntity(entity, results[0]), isTrue); @@ -945,7 +952,8 @@ runTests(Datastore datastore, String namespace) { // - by [subSubKey2] () { var ancestorQuery = new Query(ancestorKey: subSubKey2); - return consumePages((_) => datastore.query(ancestorQuery)) + return consumePages( + (_) => datastore.query(ancestorQuery, partition: partition)) .then((results) { expect(results.length, 1); expect(compareEntity(entity2, results[0]), isTrue); @@ -956,7 +964,8 @@ runTests(Datastore datastore, String namespace) { // - by [rootKey] + 'SubSubKind' () { var query = new Query(ancestorKey: rootKey, kind: 'SubSubKind'); - return consumePages((_) => datastore.query(query)) + return consumePages( + (_) => datastore.query(query, partition: partition)) .then((List results) { expect(results.length, 1); expect(compareEntity(entity, results[0]), isTrue); @@ -965,7 +974,8 @@ runTests(Datastore datastore, String namespace) { // - by [rootKey] + 'SubSubKind2' () { var query = new Query(ancestorKey: rootKey, kind: 'SubSubKind2'); - return consumePages((_) => datastore.query(query)) + return consumePages( + (_) => datastore.query(query, partition: partition)) .then((List results) { expect(results.length, 1); expect(compareEntity(entity2, results[0]), isTrue); @@ -974,7 +984,8 @@ runTests(Datastore datastore, String namespace) { // - by [subSubKey] + 'SubSubKind' () { var query = new Query(ancestorKey: subSubKey, kind: 'SubSubKind'); - return consumePages((_) => datastore.query(query)) + return consumePages( + (_) => datastore.query(query, partition: partition)) .then((List results) { expect(results.length, 1); expect(compareEntity(entity, results[0]), isTrue); @@ -984,7 +995,8 @@ runTests(Datastore datastore, String namespace) { () { var query = new Query(ancestorKey: subSubKey2, kind: 'SubSubKind2'); - return consumePages((_) => datastore.query(query)) + return consumePages( + (_) => datastore.query(query, partition: partition)) .then((List results) { expect(results.length, 1); expect(compareEntity(entity2, results[0]), isTrue); @@ -994,7 +1006,8 @@ runTests(Datastore datastore, String namespace) { () { var query = new Query(ancestorKey: subSubKey, kind: 'SubSubKind2'); - return consumePages((_) => datastore.query(query)) + return consumePages( + (_) => datastore.query(query, partition: partition)) .then((List results) { expect(results.length, 0); }); @@ -1003,7 +1016,8 @@ runTests(Datastore datastore, String namespace) { () { var query = new Query(ancestorKey: subSubKey2, kind: 'SubSubKind'); - return consumePages((_) => datastore.query(query)) + return consumePages( + (_) => datastore.query(query, partition: partition)) .then((List results) { expect(results.length, 0); }); @@ -1055,15 +1069,18 @@ Future cleanupDB(Datastore db, String namespace) { }); } -Future waitUntilEntitiesReady(Datastore db, List keys) { - return waitUntilEntitiesHelper(db, keys, true); +Future waitUntilEntitiesReady(Datastore db, List keys, Partition p) { + return waitUntilEntitiesHelper(db, keys, true, p); } -Future waitUntilEntitiesGone(Datastore db, List keys) { - return waitUntilEntitiesHelper(db, keys, false); +Future waitUntilEntitiesGone(Datastore db, List keys, Partition p) { + return waitUntilEntitiesHelper(db, keys, false, p); } -Future waitUntilEntitiesHelper(Datastore db, List keys, bool positive) { +Future waitUntilEntitiesHelper(Datastore db, + List keys, + bool positive, + Partition p) { var keysByKind = {}; for (var key in keys) { keysByKind.putIfAbsent(key.elements.last.kind, () => []).add(key); @@ -1071,7 +1088,7 @@ Future waitUntilEntitiesHelper(Datastore db, List keys, bool positive) { Future waitForKeys(String kind, List keys) { var q = new Query(kind: kind); - return consumePages((_) => db.query(q)).then((entities) { + return consumePages((_) => db.query(q, partition: p)).then((entities) { for (var key in keys) { bool found = false; for (var entity in entities) { From 61959e32a7ed32020fed0cf623849826cd1af4ae Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Wed, 7 Jan 2015 12:47:19 +0100 Subject: [PATCH 051/239] Third attempt to finally deflake builders on package waterfall R=sgjesse@google.com Review URL: https://codereview.chromium.org//788243009 --- pkgs/gcloud/test/db/e2e/db_test_impl.dart | 48 +++++++++++-------- .../test/db/e2e/metamodel_test_impl.dart | 9 +++- 2 files changed, 35 insertions(+), 22 deletions(-) diff --git a/pkgs/gcloud/test/db/e2e/db_test_impl.dart b/pkgs/gcloud/test/db/e2e/db_test_impl.dart index eb46f9e3..0d25407a 100644 --- a/pkgs/gcloud/test/db/e2e/db_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/db_test_impl.dart @@ -451,12 +451,12 @@ runTests(db.DatastoreDB store, String namespace) { ..addAll(expandoPersons); var allKeys = allInserts.map((db.Model model) => model.key).toList(); return store.commit(inserts: allInserts).then((_) { - return waitUntilEntitiesReady(store, allKeys).then((_) { + return waitUntilEntitiesReady(store, allKeys, partition).then((_) { var tests = [ // Queries for [Person] return no results, we only have [User] // objects. () { - return store.query(Person).run().toList() + return store.query(Person, partition: partition).run().toList() .then((List models) { compareModels([], models); }); @@ -464,7 +464,7 @@ runTests(db.DatastoreDB store, String namespace) { // All users query () { - return store.query(User).run().toList() + return store.query(User, partition: partition).run().toList() .then((List models) { compareModels(users, models, anyOrder: true); }); @@ -472,7 +472,7 @@ runTests(db.DatastoreDB store, String namespace) { // Sorted query () { - return store.query(User) + return store.query(User, partition: partition) ..order('-name') ..order('nickname') ..run().toList().then((List models) { @@ -481,7 +481,7 @@ runTests(db.DatastoreDB store, String namespace) { }); }, () { - return store.query(User) + return store.query(User, partition: partition) ..order('-name') ..order('-nickname') ..run().toList().then((List models) { @@ -492,7 +492,7 @@ runTests(db.DatastoreDB store, String namespace) { // Sorted query with filter () { - return store.query(User) + return store.query(User, partition: partition) ..filter('name >=', LOWER_BOUND) ..order('-name') ..order('nickname') @@ -502,7 +502,7 @@ runTests(db.DatastoreDB store, String namespace) { }); }, () { - return store.query(User) + return store.query(User, partition: partition) ..filter('name >=', LOWER_BOUND) ..order('-name') ..order('-nickname') @@ -515,7 +515,7 @@ runTests(db.DatastoreDB store, String namespace) { // Filter lists /* FIXME: TODO: FIXME: "IN" not supported in public proto/apiary */ () { - return store.query(User) + return store.query(User, partition: partition) ..filter('languages IN', ['foo']) ..order('name') ..run().toList().then((List models) { @@ -523,7 +523,7 @@ runTests(db.DatastoreDB store, String namespace) { }); }, () { - return store.query(User) + return store.query(User, partition: partition) ..filter('languages IN', ['bar']) ..order('name') ..run().toList().then((List models) { @@ -533,7 +533,7 @@ runTests(db.DatastoreDB store, String namespace) { // Simple limit/offset test. () { - return store.query(User) + return store.query(User, partition: partition) ..order('-name') ..order('nickname') ..offset(3) @@ -547,7 +547,7 @@ runTests(db.DatastoreDB store, String namespace) { // Expando queries: Filter on normal property. () { - return store.query(ExpandoPerson) + return store.query(ExpandoPerson, partition: partition) ..filter('name =', expandoPersons.last.name) ..run().toList().then((List models) { compareModels([expandoPersons.last], models); @@ -555,7 +555,7 @@ runTests(db.DatastoreDB store, String namespace) { }, // Expando queries: Filter on expanded String property () { - return store.query(ExpandoPerson) + return store.query(ExpandoPerson, partition: partition) ..filter('foo =', expandoPersons.last.foo) ..run().toList().then((List models) { compareModels([expandoPersons.last], models); @@ -563,7 +563,7 @@ runTests(db.DatastoreDB store, String namespace) { }, // Expando queries: Filter on expanded int property () { - return store.query(ExpandoPerson) + return store.query(ExpandoPerson, partition: partition) ..filter('bar =', expandoPersons.last.bar) ..run().toList().then((List models) { compareModels([expandoPersons.last], models); @@ -572,7 +572,7 @@ runTests(db.DatastoreDB store, String namespace) { // Expando queries: Filter normal property with different // propertyName (datastore name is 'NN'). () { - return store.query(ExpandoPerson) + return store.query(ExpandoPerson, partition: partition) ..filter('nickname =', expandoPersons.last.nickname) ..run().toList().then((List models) { compareModels([expandoPersons.last], models); @@ -583,7 +583,7 @@ runTests(db.DatastoreDB store, String namespace) { () => store.commit(deletes: allKeys), // Wait until the entity deletes are reflected in the indices. - () => waitUntilEntitiesGone(store, allKeys), + () => waitUntilEntitiesGone(store, allKeys, partition), // Make sure queries don't return results () => store.lookup(allKeys).then((List models) { @@ -600,24 +600,30 @@ runTests(db.DatastoreDB store, String namespace) { }); } -Future waitUntilEntitiesReady(db.DatastoreDB mdb, List keys) { - return waitUntilEntitiesHelper(mdb, keys, true); +Future waitUntilEntitiesReady(db.DatastoreDB mdb, + List keys, + db.Partition partition) { + return waitUntilEntitiesHelper(mdb, keys, true, partition); } -Future waitUntilEntitiesGone(db.DatastoreDB mdb, List keys) { - return waitUntilEntitiesHelper(mdb, keys, false); +Future waitUntilEntitiesGone(db.DatastoreDB mdb, + List keys, + db.Partition partition) { + return waitUntilEntitiesHelper(mdb, keys, false, partition); } Future waitUntilEntitiesHelper(db.DatastoreDB mdb, List keys, - bool positive) { + bool positive, + db.Partition partition) { var keysByKind = {}; for (var key in keys) { keysByKind.putIfAbsent(key.type, () => []).add(key); } Future waitForKeys(Type kind, List keys) { - return mdb.query(kind).run().toList().then((List models) { + return mdb.query(kind, partition: partition) + .run().toList().then((List models) { for (var key in keys) { bool found = false; for (var model in models) { diff --git a/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart index 14c7e9ce..43e50df5 100644 --- a/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart @@ -52,6 +52,8 @@ runTests(datastore, db.DatastoreDB store) { final cond = predicate; group('e2e_db_metamodel', () { + // NOTE: This test cannot safely be run concurrently, since it's using fixed + // keys (i.e. fixed partition + fixed Ids). test('namespaces__insert_lookup_delete', () { var entities = buildEntitiesWithDifferentNamespaces(); var keys = entities.map((e) => e.key).toList(); @@ -61,7 +63,7 @@ runTests(datastore, db.DatastoreDB store) { var namespaceQuery = store.query(Namespace); return namespaceQuery.run().toList() .then((List namespaces) { - expect(namespaces.length, 3); + expect(namespaces.length, greaterThanOrEqualTo(3)); expect(namespaces, contains(cond((ns) => ns.name == null))); expect(namespaces, contains(cond((ns) => ns.name == 'FooNamespace'))); @@ -70,6 +72,11 @@ runTests(datastore, db.DatastoreDB store) { var futures = []; for (var namespace in namespaces) { + if (!(namespace == null || + namespace == 'FooNamespace' || + namespace == 'BarNamespace')) { + continue; + } var partition = store.newPartition(namespace.name); var kindQuery = store.query(Kind, partition: partition); futures.add(kindQuery.run().toList().then((List kinds) { From fb6680ef647e96fef48dae6cdc0fec20534134eb Mon Sep 17 00:00:00 2001 From: Gustav Wibling Date: Thu, 5 Feb 2015 10:34:55 +0100 Subject: [PATCH 052/239] Temporarily remove failing test and remove the timeout which is already defaulting to 2 minutes. R=sgjesse@google.com, kustermann@google.com BUG= Review URL: https://codereview.chromium.org//900623002 --- pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart | 3 +++ pkgs/gcloud/test/db_all_e2e_test.dart | 2 -- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index adeade90..adbf30b6 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -523,11 +523,14 @@ runTests(Datastore datastore, String namespace) { return testEmptyCommit(namedEntities5Keys); }); + /* Disabled until we validate if the server has started to support + * more than 5 concurrent commits to different entity groups. test('negative_empty_commit_xg', () { expect(testEmptyCommit( namedEntities20Keys, transactional: true, xg: true), throwsA(isApplicationError)); }); + */ }); group('conflicting_transaction', () { diff --git a/pkgs/gcloud/test/db_all_e2e_test.dart b/pkgs/gcloud/test/db_all_e2e_test.dart index d38f2d12..4fe737c2 100644 --- a/pkgs/gcloud/test/db_all_e2e_test.dart +++ b/pkgs/gcloud/test/db_all_e2e_test.dart @@ -29,8 +29,6 @@ main() { var datastoreDB = new db.DatastoreDB(datastore); return runE2EUnittest(() { - unittestConfiguration.timeout = const Duration(minutes: 1); - datastore_test.runTests(datastore, namespace); test('sleep-between-test-suites', () { From 87b7b1b0fb756c1e53a226daf1083df97b2f7aa7 Mon Sep 17 00:00:00 2001 From: Nicolas Garnier Date: Sun, 15 Feb 2015 01:56:44 +0100 Subject: [PATCH 053/239] Update pubspec.yaml --- pkgs/gcloud/pubspec.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 82e2d6c0..4e50dde5 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -8,7 +8,7 @@ environment: dependencies: crypto: '>=0.9.0 <0.10.0' googleapis: '>=0.2.0 <0.6.0' - googleapis_beta: '>=0.3.0 <0.6.0' + googleapis_beta: '>=0.3.0 <0.9.0' http: '>=0.11.0 <0.12.0' dev_dependencies: googleapis_auth: '>=0.1.1 <0.3.0' From 37c196e137a38f136b7c9fa589f163e7c8231bdd Mon Sep 17 00:00:00 2001 From: Nicolas Garnier Date: Sun, 15 Feb 2015 01:57:04 +0100 Subject: [PATCH 054/239] Update pubspec.yaml --- pkgs/gcloud/pubspec.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 4e50dde5..82e2d6c0 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -8,7 +8,7 @@ environment: dependencies: crypto: '>=0.9.0 <0.10.0' googleapis: '>=0.2.0 <0.6.0' - googleapis_beta: '>=0.3.0 <0.9.0' + googleapis_beta: '>=0.3.0 <0.6.0' http: '>=0.11.0 <0.12.0' dev_dependencies: googleapis_auth: '>=0.1.1 <0.3.0' From 93f11e0e963990462603d675d5adeff5b45693e5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Thu, 19 Feb 2015 09:57:56 +0100 Subject: [PATCH 055/239] Roll googleapis and googleapis_beta dependencies None of the APIs used in the gcloud package have had breaking changes. R=kustermann@google.com Review URL: https://codereview.chromium.org//935323002 --- pkgs/gcloud/pubspec.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 82e2d6c0..0fc94536 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.1.3+1 +version: 0.1.3+2 author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud @@ -7,8 +7,8 @@ environment: sdk: '>=1.5.0 <2.0.0' dependencies: crypto: '>=0.9.0 <0.10.0' - googleapis: '>=0.2.0 <0.6.0' - googleapis_beta: '>=0.3.0 <0.6.0' + googleapis: '>=0.2.0 <0.7.0' + googleapis_beta: '>=0.3.0 <0.10.0' http: '>=0.11.0 <0.12.0' dev_dependencies: googleapis_auth: '>=0.1.1 <0.3.0' From a52cb64dd94790a3bf88657839e3724757244e38 Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Thu, 19 Feb 2015 13:36:37 +0100 Subject: [PATCH 056/239] Add DatastoreDB.replace({defaultPartition}) R=sgjesse@google.com Review URL: https://codereview.chromium.org//937133002 --- pkgs/gcloud/CHANGELOG.md | 9 ++++++ pkgs/gcloud/lib/src/db/db.dart | 7 +++-- pkgs/gcloud/pubspec.yaml | 2 +- pkgs/gcloud/test/db/db_test.dart | 50 ++++++++++++++++++++++++++++++++ 4 files changed, 64 insertions(+), 4 deletions(-) create mode 100644 pkgs/gcloud/test/db/db_test.dart diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index d90dff00..ecf38adb 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,12 @@ +## 0.1.4 + +* Added optional `defaultPartition` parameter to the constructor of + `DatastoreDB`. + +## 0.1.3+2 + +* Widened googleapis/googleapis_beta constraints in pubspec.yaml. + ## 0.1.3+1 * Change the service scope keys keys to non-private symbols. diff --git a/pkgs/gcloud/lib/src/db/db.dart b/pkgs/gcloud/lib/src/db/db.dart index 07c5efb7..8d17c1f3 100644 --- a/pkgs/gcloud/lib/src/db/db.dart +++ b/pkgs/gcloud/lib/src/db/db.dart @@ -247,9 +247,10 @@ class DatastoreDB { final ModelDB _modelDB; Partition _defaultPartition; - DatastoreDB(this.datastore, {ModelDB modelDB}) - : _modelDB = modelDB != null ? modelDB : new ModelDBImpl() { - _defaultPartition = new Partition(null); + DatastoreDB(this.datastore, {ModelDB modelDB, Partition defaultPartition}) : + _modelDB = modelDB != null ? modelDB : new ModelDBImpl() { + _defaultPartition = + defaultPartition != null ? defaultPartition : new Partition(null); } /** diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 0fc94536..660572a8 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.1.3+2 +version: 0.1.4 author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud diff --git a/pkgs/gcloud/test/db/db_test.dart b/pkgs/gcloud/test/db/db_test.dart new file mode 100644 index 00000000..70320d7a --- /dev/null +++ b/pkgs/gcloud/test/db/db_test.dart @@ -0,0 +1,50 @@ +// Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +library gcloud.db_test; + +import 'package:gcloud/db.dart'; +import 'package:unittest/unittest.dart'; + +@Kind() +class Foobar extends Model {} + +main() { + group('db', () { + test('default-partition', () { + var db = new DatastoreDB(null); + + // Test defaultPartition + expect(db.defaultPartition.namespace, isNull); + + // Test emptyKey + expect(db.emptyKey.partition.namespace, isNull); + + // Test emptyKey.append() + var key = db.emptyKey.append(Foobar, id: 42); + expect(key.parent, db.emptyKey); + expect(key.partition.namespace, isNull); + expect(key.id, 42); + expect(key.type, equals(Foobar)); + }); + + test('non-default-partition', () { + var nsDb = new DatastoreDB( + null, defaultPartition: new Partition('foobar-namespace')); + + // Test defaultPartition + expect(nsDb.defaultPartition.namespace, 'foobar-namespace'); + + // Test emptyKey + expect(nsDb.emptyKey.partition.namespace, 'foobar-namespace'); + + // Test emptyKey.append() + var key = nsDb.emptyKey.append(Foobar, id: 42); + expect(key.parent, nsDb.emptyKey); + expect(key.partition.namespace, 'foobar-namespace'); + expect(key.id, 42); + expect(key.type, equals(Foobar)); + }); + }); +} From df6ac27ae06b447d3a0f42abbd5ecf236a99be9c Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Thu, 19 Feb 2015 16:24:50 +0100 Subject: [PATCH 057/239] When running db queries deduce the query-partition automatically from the ancestor key R=sgjesse@google.com Review URL: https://codereview.chromium.org//939133002 --- pkgs/gcloud/CHANGELOG.md | 4 ++++ pkgs/gcloud/lib/src/db/db.dart | 31 +++++++++++++++++++++++++++---- pkgs/gcloud/pubspec.yaml | 2 +- 3 files changed, 32 insertions(+), 5 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index ecf38adb..eba8111b 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.1.4+1 + +* Deduce the query partition automatically from query ancestor key. + ## 0.1.4 * Added optional `defaultPartition` parameter to the constructor of diff --git a/pkgs/gcloud/lib/src/db/db.dart b/pkgs/gcloud/lib/src/db/db.dart index 8d17c1f3..bf1c3f15 100644 --- a/pkgs/gcloud/lib/src/db/db.dart +++ b/pkgs/gcloud/lib/src/db/db.dart @@ -61,6 +61,14 @@ class Transaction { * touch/look at an arbitrary number of rows. */ Query query(Type kind, Key ancestorKey, {Partition partition}) { + // TODO(#25): The `partition` element is redundant and should be removed. + if (partition == null) { + partition = ancestorKey.partition; + } else if (ancestorKey.partition != partition) { + throw new ArgumentError( + 'Ancestor queries must have the same partition in the ancestor key ' + 'as the partition where the query executes in.'); + } _checkSealed(); return new Query(db, kind, @@ -297,10 +305,25 @@ class DatastoreDB { * Build a query for [kind] models. */ Query query(Type kind, {Partition partition, Key ancestorKey}) { - return new Query(this, - kind, - partition: partition, - ancestorKey: ancestorKey); + // TODO(#26): There is only one case where `partition` is not redundant + // Namely if `ancestorKey == null` and `partition != null`. We could + // say we get rid of `partition` and enforce `ancestorKey` to + // be `Partition.emptyKey`? + if (partition == null) { + if (ancestorKey != null) { + partition = ancestorKey.partition; + } else { + partition = defaultPartition; + } + } else if (partition != ancestorKey.partition) { + throw new ArgumentError( + 'Ancestor queries must have the same partition in the ancestor key ' + 'as the partition where the query executes in.'); + } + return new Query(this, + kind, + partition: partition, + ancestorKey: ancestorKey); } /** diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 660572a8..ebaa2dbd 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.1.4 +version: 0.1.4+1 author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud From 607fdcfa401f1a1bc53fa46573cb86e3933137b4 Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Thu, 19 Feb 2015 16:43:35 +0100 Subject: [PATCH 058/239] Bugfix in sanity check: Prevent null-dereference Review URL: https://codereview.chromium.org//943663002 --- pkgs/gcloud/lib/src/db/db.dart | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/gcloud/lib/src/db/db.dart b/pkgs/gcloud/lib/src/db/db.dart index bf1c3f15..c417d1c2 100644 --- a/pkgs/gcloud/lib/src/db/db.dart +++ b/pkgs/gcloud/lib/src/db/db.dart @@ -315,7 +315,7 @@ class DatastoreDB { } else { partition = defaultPartition; } - } else if (partition != ancestorKey.partition) { + } else if (ancestorKey != null && partition != ancestorKey.partition) { throw new ArgumentError( 'Ancestor queries must have the same partition in the ancestor key ' 'as the partition where the query executes in.'); From d7903945ba872f817c4dbf3292304d542dd95f55 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Fri, 13 Mar 2015 08:46:22 +0100 Subject: [PATCH 059/239] Revert "Remove Pub/Sub for now" This reverts commit 245f1e6ec83dede79131870f4daa6dd87065c9a3. As Cloud Pub/Sub is now publicly available it makes sense to add the API for it back. This only reverts the removal. Actual updates to it will be in a separate change R=kustermann@google.com BUG= Review URL: https://codereview.chromium.org//1005563002 --- pkgs/gcloud/lib/pubsub.dart | 410 +++++++++ pkgs/gcloud/lib/src/pubsub_impl.dart | 503 +++++++++++ pkgs/gcloud/pubspec.yaml | 2 +- pkgs/gcloud/test/pubsub/pubsub_test.dart | 1044 ++++++++++++++++++++++ 4 files changed, 1958 insertions(+), 1 deletion(-) create mode 100644 pkgs/gcloud/lib/pubsub.dart create mode 100644 pkgs/gcloud/lib/src/pubsub_impl.dart create mode 100644 pkgs/gcloud/test/pubsub/pubsub_test.dart diff --git a/pkgs/gcloud/lib/pubsub.dart b/pkgs/gcloud/lib/pubsub.dart new file mode 100644 index 00000000..ae24da11 --- /dev/null +++ b/pkgs/gcloud/lib/pubsub.dart @@ -0,0 +1,410 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +library gcloud.pubsub; + +import 'dart:async'; +import 'dart:collection'; +import 'dart:convert'; +import 'package:crypto/crypto.dart'; +import 'package:http/http.dart' as http; + +import 'package:googleapis_beta/pubsub/v1beta1.dart' as pubsub; + +import 'common.dart'; +export 'common.dart'; + +part 'src/pubsub_impl.dart'; + +/// A Cloud Pub/Sub client. +/// +/// Connects to the Cloud Pub/Sub service and gives access to its operations. +/// +/// Google Cloud Pub/Sub is a reliable, many-to-many, asynchronous messaging +/// service from Google Cloud Platform. A detailed overview is available on +/// [Pub/Sub docs](https://developers.google.com/pubsub/overview). +/// +/// To access Pub/Sub, an authenticate HTTP client is required. This client +/// should as a minimum provide access to the scopes `PubSub.Scopes`. +/// +/// The following example shows how to access Pub/Sub using a service account +/// and pull a message from a subscription. +/// +/// import 'package:http/http.dart' as http; +/// import 'package:googleapis_auth/auth_io.dart' as auth; +/// import 'package:gcloud/pubsub.dart'; +/// +/// Future createClient() { +/// // Service account credentials retreived from Cloud Console. +/// String creds = +/// r''' +/// { +/// "private_key_id": ..., +/// "private_key": ..., +/// "client_email": ..., +/// "client_id": ..., +/// "type": "service_account" +/// }'''; +/// return auth.clientViaServiceAccount( +/// new auth.ServiceAccountCredentials.fromJson(creds), +/// PubSub.Scopes); +/// } +/// +/// main() { +/// var project = 'my-project'; +/// var client; +/// var pubsub; +/// createClient().then((c) { +/// client = c; +/// pubsub = new PubSub(client, project); +/// return pubsub.lookupSubscription('my-subscription'); +/// }) +/// .then((Subscription subscription) => subscription.pull()) +/// .then((PullEvent event) => print('Message ${event.message.asString}')) +/// .whenComplete(() => client.close()); +/// } +/// +/// When working with topics and subscriptions they are referred to using +/// names. These names can be either relative names or absolute names. +/// +/// An absolute name of a topic starts with `/` and has the form: +/// +/// /topics// +/// +/// When a relative topic name is used, its absolute name is generated by +/// prepending `/topics//`, where `` is the project +/// id passed to the constructor. +/// +/// An absolute name of a subscription starts with `/` and has the form: +/// +/// /subscriptions// +/// +/// When a relative subscription name is used, its absolute name is +/// generated by prepending `/subscriptions//`, where +/// `` is the project id passed to the constructor. +/// +abstract class PubSub { + /// List of required OAuth2 scopes for Pub/Sub operation. + static const Scopes = const [ pubsub.PubsubApi.PubsubScope ]; + + /// Access Pub/Sub using an authenicated client. + /// + /// The [client] is an authentiacted HTTP client. This client must + /// provide access to at least the scopes in `PubSub.Scopes`. + /// + /// The [project] is the name of the Google Cloud project. + /// + /// Returs an object providing access to Pub/Sub. The passed-in [client] will + /// not be closed automatically. The caller is responsible for closing it. + factory PubSub(http.Client client, String project) = _PubSubImpl; + + /// The name of the project. + String get project; + + /// Create a new topic named [name]. + /// + /// The [name] can be either an absolute name or a relative name. + /// + /// Returns a `Future` which completes with the newly created topic. + Future createTopic(String name); + + /// Delete topic named [name]. + /// + /// The [name] can be either an absolute name or a relative name. + /// + /// Returns a `Future` which completes with `null` when the operation + /// is finished. + Future deleteTopic(String name); + + /// Look up topic named [name]. + /// + /// The [name] can be either an absolute name or a relative name. + /// + /// Returns a `Future` which completes with the topic. + Future lookupTopic(String name); + + /// Lists all topics. + /// + /// Returns a `Stream` of topics. + Stream listTopics(); + + /// Start paging through all topics. + /// + /// The maximum number of topics in each page is specified in [pageSize]. + /// + /// Returns a `Future` which completes with a `Page` object holding the + /// first page. Use the `Page` object to move to the next page of topics. + Future> pageTopics({int pageSize: 50}); + + /// Create a new subscription named [name] listening on topic [topic]. + /// + /// If [endpoint] is passed this will create a push subscription. + /// + /// Otherwise this will create a pull subscription. + /// + /// The [name] can be either an absolute name or a relative name. + /// + /// Returns a `Future` which completes with the newly created subscripton. + Future createSubscription( + String name, String topic, {Uri endpoint}); + + /// Delete subscription named [name]. + /// + /// The [name] can be either an absolute name or a relative name. + /// + /// Returns a `Future` which completes with the subscription. + Future deleteSubscription(String name); + + /// Lookup subscription with named [name]. + /// + /// The [name] can be either an absolute name or a relative name. + /// + /// Returns a `Future` which completes with the subscription. + Future lookupSubscription(String name); + + /// List subscriptions. + /// + /// If [query] is passed this will list all subscriptions matching the query. + /// + /// Otherwise this will list all subscriptions. + /// + /// The only supported query string is the name of a topic. If a name of a + /// topic is passed as [query], this will list all subscriptions on that + /// topic. + /// + /// Returns a `Stream` of subscriptions. + Stream listSubscriptions([String query]); + + /// Start paging through subscriptions. + /// + /// If [topic] is passed this will list all subscriptions to that topic. + /// + /// Otherwise this will list all subscriptions. + /// + /// The maximum number of subscriptions in each page is specified in + /// [pageSize] + /// + /// Returns a `Future` which completes with a `Page` object holding the + /// first page. Use the `Page` object to move to the next page of + /// subscriptions. + Future> pageSubscriptions( + {String topic, int pageSize: 50}); +} + +/// A Pub/Sub topic. +/// +/// A topic is used by a publisher to publish (send) messages. +abstract class Topic { + /// The relative name of this topic. + String get name; + + /// The name of the project for this topic. + String get project; + + /// The absolute name of this topic. + String get absoluteName; + + /// Delete this topic. + /// + /// Returns a `Future` which completes with `null` when the operation + /// is finished. + Future delete(); + + /// Publish a message. + /// + /// Returns a `Future` which completes with `null` when the operation + /// is finished. + Future publish(Message message); + + /// Publish a string as a message. + /// + /// The message will get the labels specified in [labels]. The keys in this + /// map must be strings and the values must be either Strings or integers. + /// + /// The [labels] are passed together with the message to the receiver. + /// + /// Returns a `Future` which completes with `null` when the operation + /// is finished. + Future publishString(String message, {Map labels}); + + /// Publish bytes as a message. + /// + /// The message will get the labels specified in [labels]. The keys in this + /// map must be strings and the values must be either Strings or integers. + /// + /// The [labels] are passed together with the message to the receiver. + /// + /// Returns a `Future` which completes with `null` when the operation + /// is finished. + Future publishBytes(List message, {Map labels}); +} + +/// A Pub/Sub subscription +/// +/// A subscription is used to receive messages. A subscriber application +/// create a subscription on a topic to receive messages from it. +/// +/// Subscriptions can be either pull subscriptions or push subscriptions. +/// +/// For a pull subscription the receiver calls the `Subscription.pull` +/// method on the subscription object to get the next message. +/// +/// For a push subscription a HTTPS endpoint is configured. This endpoint get +/// POST requests with the messages. +abstract class Subscription { + /// The relative name of this subscription. + String get name; + + /// The name of the project for this subscription. + String get project; + + /// The absolute name of this subscription. + String get absoluteName; + + /// The topic subscribed to. + Topic get topic; + + /// Whether this is a push subscription. + /// + /// A push subscription is configured with an endpoint URI, and messages + /// are automatically sent to this endpoint without needing to call [pull]. + bool get isPush; + + /// Whether this is a pull subscription. + /// + /// A subscription without a configured endpoint URI is a pull subscripton. + /// Messages are not delivered automatically, but must instead be requested + /// using [pull]. + bool get isPull; + + /// The URI for the push endpoint. + /// + /// If this is a pull subscription this is `null`. + Uri get endpoint; + + /// Update the push configuration with a new endpoint. + /// + /// if [endpoint] is `null`, the subscription stops delivering messages + /// automatically, and becomes a pull subscription, if it isn't already. + /// + /// If [endpoint] is not `null`, the subscription will be a push + /// subscription, if it wasn't already, and Pub/Sub will start automatically + /// delivering message to the endpoint URI. + /// + /// Returns a `Future` which completes when the operation completes. + Future updatePushConfiguration(Uri endpoint); + + /// Delete this subscription. + /// + /// Returns a `Future` which completes when the operation completes. + Future delete(); + + + /// Pull a message from the subscription. + /// + /// If [noWait] is true, the method will complete the returned `Future` + /// with `null` if it finds that there are no messages available. + /// + /// If `noWait` is false, the method will wait for a message to become + /// available, and will then complete the `Future` with a `PullEvent` + /// containing the message. + Future pull({bool noWait: true}); +} + +/// The content of a Pub/Sub message. +/// +/// All Pub/Sub messages consist of a body of binary data and has an optional +/// set of labels (key-value pairs) associated with it. +/// +/// A `Message` contains the message body a list of bytes. The message body can +/// be read and written as a String, in which case the string is converted to +/// or from UTF-8 automatically. +abstract class Message { + /// Creates a new message with a String for the body. The String will + /// be UTF-8 encoded to create the actual binary body for the message. + /// + /// Message labels can be passed in the [labels] Map. The values in this + /// map must be either Strings or integers. Integers must be positive + /// 64-bit integers. + factory Message.withString(String message, {Map labels}) = + _MessageImpl.withString; + + /// Creates a new message with a binary body. + /// + /// Message labels can be passed in the [labels] Map. The values in this + /// map must be either Strings or integers. Integers must be positive + /// 64-bit integers. + factory Message.withBytes(List message, {Map labels}) = + _MessageImpl.withBytes; + + /// The message body as a String. + /// + /// The binary body is decoded into a String using an UTF-8 decoder. + /// + /// If the body is not UTF-8 encoded use the [asBytes] getter and manually + /// apply the corect decoding. + String get asString; + + /// The message body as bytes. + List get asBytes; + + /// The labels for this message. The values in the Map are either + /// Strings or integers. + /// + /// Values can be 64-bit integers. + Map get labels; +} + +/// A Pub/Sub pull event. +/// +/// Instances of this class are returned when pulling messages with +/// [Subscription.pull]. +abstract class PullEvent { + /// The message content. + Message get message; + + /// Whether the message was truncated. + bool get isTruncated; + + /// Acknowledge reception of this message. + /// + /// Returns a `Future` which completes with `null` when the acknowledge has + /// been processed. + Future acknowledge(); +} + +/// Pub/Sub push event. +/// +/// This class can be used in a HTTP server for decoding messages pushed to +/// an endpoint. +/// +/// When a message is received on a push endpoint use the [PushEvent.fromJson] +/// constructor with the HTTP body to decode the received message. +/// +/// E.g. with a `dart:io` HTTP handler: +/// +/// void pushHandler(HttpRequest request) { +/// // Decode the JSON body. +/// request.transform(UTF8.decoder).join('').then((body) { +/// // Decode the JSON into a push message. +/// var message = new PushMessage.fromJson(body) +/// +/// // Process the message... +/// +/// // Respond with status code 20X to acknowledge the message. +/// response.statusCode = statusCode; +/// response.close(); +/// }); +/// } +//// +abstract class PushEvent { + /// The message content. + Message get message; + + /// The absolute name of the subscription. + String get subscriptionName; + + /// Create a `PushMessage` from JSON received on a Pub/Sub push endpoint. + factory PushEvent.fromJson(String json) = _PushEventImpl.fromJson; +} diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart new file mode 100644 index 00000000..2ce7b44e --- /dev/null +++ b/pkgs/gcloud/lib/src/pubsub_impl.dart @@ -0,0 +1,503 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +part of gcloud.pubsub; + +class _PubSubImpl implements PubSub { + final http.Client _client; + final String project; + final pubsub.PubsubApi _api; + final String _topicPrefix; + final String _subscriptionPrefix; + + _PubSubImpl(client, project) : + this._client = client, + this.project = project, + _api = new pubsub.PubsubApi(client), + _topicPrefix = '/topics/$project/', + _subscriptionPrefix = '/subscriptions/$project/'; + + + String _fullTopicName(String name) { + if (name.startsWith('/') && !name.startsWith('/topics')) { + throw new ArgumentError("Illegal absolute topic name. Absolute topic " + "name must start with '/topics'"); + } + return name.startsWith('/topics') ? name : '${_topicPrefix}$name'; + } + + String _fullSubscriptionName(name) { + if (name.startsWith('/') && !name.startsWith('/subscriptions')) { + throw new ArgumentError("Illegal absolute topic name. Absolute topic " + "name must start with '/subscriptions'"); + } + return name.startsWith('/subscriptions') ? name + : '${_subscriptionPrefix}$name'; + } + + Future _createTopic(String name) { + return _api.topics.create(new pubsub.Topic()..name = name); + } + + Future _deleteTopic(String name) { + return _api.topics.delete(name); + } + + Future _getTopic(String name) { + return _api.topics.get(name); + } + + Future _listTopics( + int pageSize, String nextPageToken) { + var query = 'cloud.googleapis.com/project in (/projects/$project)'; + return _api.topics.list( + query: query, maxResults: pageSize, pageToken: nextPageToken); + } + + Future _createSubscription( + String name, String topic, Uri endpoint) { + var subscription = new pubsub.Subscription() + ..name = name + ..topic = topic; + if (endpoint != null) { + var pushConfig = + new pubsub.PushConfig()..pushEndpoint = endpoint.toString(); + subscription.pushConfig = pushConfig; + } + return _api.subscriptions.create(subscription); + } + + Future _deleteSubscription(String name) { + return _api.subscriptions.delete(_fullSubscriptionName(name)); + } + + Future _getSubscription(String name) { + return _api.subscriptions.get(name); + } + + Future _listSubscriptions( + String topic, int pageSize, String nextPageToken) { + // See https://developers.google.com/pubsub/v1beta1/subscriptions/list for + // the specification of the query format. + var query = topic == null + ? 'cloud.googleapis.com/project in (/projects/$project)' + : 'pubsub.googleapis.com/topic in (/topics/$project/$topic)'; + return _api.subscriptions.list( + query: query, maxResults: pageSize, pageToken: nextPageToken); + } + + Future _modifyPushConfig(String subscription, Uri endpoint) { + var pushConfig = new pubsub.PushConfig() + ..pushEndpoint = endpoint != null ? endpoint.toString() : null; + var request = new pubsub.ModifyPushConfigRequest() + ..subscription = subscription + ..pushConfig = pushConfig; + return _api.subscriptions.modifyPushConfig(request); + } + + Future _publish( + String topic, List message, Map labels) { + var l = null; + if (labels != null) { + l = []; + labels.forEach((key, value) { + if (value is String) { + l.add(new pubsub.Label()..key = key..strValue = value); + } else { + l.add(new pubsub.Label()..key = key..numValue = value.toString()); + } + }); + } + var request = new pubsub.PublishRequest() + ..topic = topic + ..message = (new pubsub.PubsubMessage() + ..dataAsBytes = message + ..label = l); + return _api.topics.publish(request); + } + + Future _pull( + String subscription, bool returnImmediately) { + var request = new pubsub.PullRequest() + ..subscription = subscription + ..returnImmediately = returnImmediately; + return _api.subscriptions.pull(request); + } + + Future _ack(String ackId, String subscription) { + var request = new pubsub.AcknowledgeRequest() + ..ackId = [ ackId ] + ..subscription = subscription; + return _api.subscriptions.acknowledge(request); + } + + void _checkTopicName(name) { + if (name.startsWith('/') && !name.startsWith(_topicPrefix)) { + throw new ArgumentError( + "Illegal topic name. Absolute topic names for project '$project' " + "must start with $_topicPrefix"); + } + if (name.length == _topicPrefix.length) { + throw new ArgumentError( + 'Illegal topic name. Relative part of the name cannot be empty'); + } + } + + void _checkSubscriptionName(name) { + if (name.startsWith('/') && !name.startsWith(_subscriptionPrefix)) { + throw new ArgumentError( + "Illegal subscription name. Absolute subscription names for project " + "'$project' must start with $_subscriptionPrefix"); + } + if (name.length == _subscriptionPrefix.length) { + throw new ArgumentError( + 'Illegal subscription name. ' + 'Relative part of the name cannot be empty'); + } + } + + Future createTopic(String name) { + _checkTopicName(name); + return _createTopic(_fullTopicName(name)) + .then((top) => new _TopicImpl(this, top)); + } + + Future deleteTopic(String name) { + _checkTopicName(name); + return _deleteTopic(_fullTopicName(name)); + } + + Future lookupTopic(String name) { + _checkTopicName(name); + return _getTopic(_fullTopicName(name)) + .then((top) => new _TopicImpl(this, top)); + } + + Stream listTopics() { + Future> firstPage(pageSize) { + return _listTopics(pageSize, null) + .then((response) => new _TopicPageImpl(this, pageSize, response)); + } + return new StreamFromPages(firstPage).stream; + } + + Future> pageTopics({int pageSize: 50}) { + return _listTopics(pageSize, null).then((response) { + return new _TopicPageImpl(this, pageSize, response); + }); + } + + Future createSubscription( + String name, String topic, {Uri endpoint}) { + _checkSubscriptionName(name); + _checkTopicName(topic); + return _createSubscription(_fullSubscriptionName(name), + _fullTopicName(topic), + endpoint) + .then((sub) => new _SubscriptionImpl(this, sub)); + } + + Future deleteSubscription(String name) { + _checkSubscriptionName(name); + return _deleteSubscription(_fullSubscriptionName(name)); + } + + Future lookupSubscription(String name) { + _checkSubscriptionName(name); + return _getSubscription(_fullSubscriptionName(name)) + .then((sub) => new _SubscriptionImpl(this, sub)); + } + + Stream listSubscriptions([String query]) { + Future> firstPage(pageSize) { + return _listSubscriptions(query, pageSize, null) + .then((response) => + new _SubscriptionPageImpl(this, query, pageSize, response)); + } + return new StreamFromPages(firstPage).stream; + } + + Future> pageSubscriptions( + {String topic, int pageSize: 50}) { + return _listSubscriptions(topic, pageSize, null).then((response) { + return new _SubscriptionPageImpl(this, topic, pageSize, response); + }); + } +} + +/// Message class for messages constructed through 'new Message()'. It stores +/// the user supplied body as either String or bytes. +class _MessageImpl implements Message { + // The message body, if it is a `String`. In that case, [bytesMessage] is + // null. + final String _stringMessage; + + // The message body, if it is a byte list. In that case, [stringMessage] is + // null. + final List _bytesMessage; + + final Map labels; + + _MessageImpl.withString(this._stringMessage, {this.labels}) + : _bytesMessage = null; + + _MessageImpl.withBytes(this._bytesMessage, {this.labels}) + : _stringMessage = null; + + List get asBytes => + _bytesMessage != null ? _bytesMessage : UTF8.encode(_stringMessage); + + String get asString => + _stringMessage != null ? _stringMessage : UTF8.decode(_bytesMessage); +} + +/// Message received using [Subscription.pull]. +/// +/// Contains the [pubsub.PubsubMessage] received from Pub/Sub, and +/// makes the message body and labels available on request. +/// +/// The labels map is lazily created when first accessed. +class _PullMessage implements Message { + final pubsub.PubsubMessage _message; + List _bytes; + String _string; + Map _labels; + + _PullMessage(this._message); + + List get asBytes { + if (_bytes == null) _bytes = _message.dataAsBytes; + return _bytes; + } + + String get asString { + if (_string == null) _string = UTF8.decode(_message.dataAsBytes); + return _string; + } + + Map get labels { + if (_labels == null) { + _labels = {}; + _message.label.forEach((label) { + _labels[label.key] = + label.numValue != null ? label.numValue : label.strValue; + }); + } + return _labels; + } +} + +/// Message received through Pub/Sub push delivery. +/// +/// Stores the message body received from Pub/Sub as the Base64 encoded string +/// from the wire protocol. +/// +/// The labels have been decoded into a Map. +class _PushMessage implements Message { + final String _base64Message; + final Map labels; + + _PushMessage(this._base64Message, this.labels); + + List get asBytes => CryptoUtils.base64StringToBytes(_base64Message); + + String get asString => UTF8.decode(asBytes); +} + +/// Pull event received from Pub/Sub pull delivery. +/// +/// Stores the pull response received from Pub/Sub. +class _PullEventImpl implements PullEvent { + /// Pub/Sub API object. + final _PubSubImpl _api; + /// Low level response received from Pub/Sub. + final pubsub.PullResponse _response; + final Message message; + + _PullEventImpl(this._api, response) + : this._response = response, + message = new _PullMessage(response.pubsubEvent.message); + + bool get isTruncated => _response.pubsubEvent.truncated; + + Future acknowledge() { + return _api._ack(_response.ackId, _response.pubsubEvent.subscription); + } + +} + +/// Push event received from Pub/Sub push delivery. +/// +/// decoded from JSON encoded push HTTP request body. +class _PushEventImpl implements PushEvent { + static const PREFIX = '/subscriptions/'; + final Message _message; + final String _subscriptionName; + + Message get message => _message; + + String get subscriptionName => _subscriptionName; + + _PushEventImpl(this._message, this._subscriptionName); + + factory _PushEventImpl.fromJson(String json) { + Map body = JSON.decode(json); + String data = body['message']['data']; + Map labels = new HashMap(); + body['message']['labels'].forEach((label) { + var key = label['key']; + var value = label['strValue']; + if (value == null) value = label['numValue']; + labels[key] = value; + }); + String subscription = body['subscription']; + // TODO(#1): Remove this when the push event subscription name is prefixed + // with '/subscriptions/'. + if (!subscription.startsWith(PREFIX)) { + subscription = PREFIX + subscription; + } + return new _PushEventImpl(new _PushMessage(data, labels), subscription); + } +} + +class _TopicImpl implements Topic { + final _PubSubImpl _api; + final pubsub.Topic _topic; + + _TopicImpl(this._api, this._topic); + + String get name { + assert(_topic.name.startsWith(_api._topicPrefix)); + return _topic.name.substring(_api._topicPrefix.length); + } + + String get project { + assert(_topic.name.startsWith(_api._topicPrefix)); + return _api.project; + } + + String get absoluteName => _topic.name; + + Future publish(Message message) { + return _api._publish(_topic.name, message.asBytes, message.labels); + } + + Future delete() => _api._deleteTopic(_topic.name); + + Future publishString(String message, {Map labels}) { + return _api._publish(_topic.name, UTF8.encode(message), labels); + } + + Future publishBytes(List message, {Map labels}) { + return _api._publish(_topic.name, message, labels); + } +} + +class _SubscriptionImpl implements Subscription { + final _PubSubImpl _api; + final pubsub.Subscription _subscription; + + _SubscriptionImpl(this._api, this._subscription); + + String get name { + assert(_subscription.name.startsWith(_api._subscriptionPrefix)); + return _subscription.name.substring(_api._subscriptionPrefix.length); + } + + String get project { + assert(_subscription.name.startsWith(_api._subscriptionPrefix)); + return _api.project; + } + + String get absoluteName => _subscription.name; + + Topic get topic { + var topic = new pubsub.Topic()..name = _subscription.topic; + return new _TopicImpl(_api, topic); + } + + Future delete() => _api._deleteSubscription(_subscription.name); + + Future pull({bool noWait: true}) { + return _api._pull(_subscription.name, noWait) + .then((response) { + return new _PullEventImpl(_api, response); + }).catchError((e) => null, + test: (e) => e is pubsub.DetailedApiRequestError && + e.status == 400); + } + + Uri get endpoint => null; + + bool get isPull => endpoint == null; + + bool get isPush => endpoint != null; + + Future updatePushConfiguration(Uri endpoint) { + return _api._modifyPushConfig(_subscription.name, endpoint); + } +} + +class _TopicPageImpl implements Page { + final _PubSubImpl _api; + final int _pageSize; + final String _nextPageToken; + final List items; + + _TopicPageImpl(this._api, + this._pageSize, + pubsub.ListTopicsResponse response) + : items = new List(response.topic.length), + _nextPageToken = response.nextPageToken { + for (int i = 0; i < response.topic.length; i++) { + items[i] = new _TopicImpl(_api, response.topic[i]); + } + } + + bool get isLast => _nextPageToken == null; + + Future> next({int pageSize}) { + if (isLast) return new Future.value(null); + if (pageSize == null) pageSize = this._pageSize; + + return _api._listTopics(pageSize, _nextPageToken).then((response) { + return new _TopicPageImpl(_api, pageSize, response); + }); + } +} + +class _SubscriptionPageImpl implements Page { + final _PubSubImpl _api; + final String _topic; + final int _pageSize; + final String _nextPageToken; + final List items; + + _SubscriptionPageImpl(this._api, + this._topic, + this._pageSize, + pubsub.ListSubscriptionsResponse response) + : items = new List(response.subscription != null + ? response.subscription.length + : 0), + _nextPageToken = response.nextPageToken{ + if (response.subscription != null) { + for (int i = 0; i < response.subscription.length; i++) { + items[i] = new _SubscriptionImpl(_api, response.subscription[i]); + } + } + } + + bool get isLast => _nextPageToken == null; + + Future> next({int pageSize}) { + if (_nextPageToken == null) return new Future.value(null); + if (pageSize == null) pageSize = this._pageSize; + + return _api._listSubscriptions( + _topic, pageSize, _nextPageToken).then((response) { + return new _SubscriptionPageImpl(_api, _topic, pageSize, response); + }); + } +} \ No newline at end of file diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index ebaa2dbd..c8d4f3a0 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.1.4+1 +version: 0.1.5-dev author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud diff --git a/pkgs/gcloud/test/pubsub/pubsub_test.dart b/pkgs/gcloud/test/pubsub/pubsub_test.dart new file mode 100644 index 00000000..95c53d64 --- /dev/null +++ b/pkgs/gcloud/test/pubsub/pubsub_test.dart @@ -0,0 +1,1044 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'dart:async'; +import 'dart:convert'; + +import 'package:crypto/crypto.dart' as crypto; +import 'package:http/http.dart' as http; +import 'package:unittest/unittest.dart'; + +import 'package:gcloud/pubsub.dart'; + +import 'package:googleapis_beta/pubsub/v1beta1.dart' as pubsub; + +import '../common.dart'; + +const String ROOT_PATH = '/pubsub/v1beta1/'; + +http.Client mockClient() => new MockClient(ROOT_PATH); + +main() { + group('api', () { + var badTopicNames = [ + '/', '/topics', '/topics/$PROJECT', '/topics/$PROJECT/', + '/topics/${PROJECT}x', '/topics/${PROJECT}x/']; + + var badSubscriptionNames = [ + '/', '/subscriptions', '/subscriptions/$PROJECT', + '/subscriptions/$PROJECT/', '/subscriptions/${PROJECT}x', + '/subscriptions/${PROJECT}x/']; + + group('topic', () { + var name = 'test-topic'; + var absoluteName = '/topics/$PROJECT/test-topic'; + + test('create', () { + var mock = mockClient(); + mock.register('POST', 'topics', expectAsync((request) { + var requestTopic = + new pubsub.Topic.fromJson(JSON.decode(request.body)); + expect(requestTopic.name, absoluteName); + return mock.respond(new pubsub.Topic()..name = absoluteName); + }, count: 2)); + + var api = new PubSub(mock, PROJECT); + return api.createTopic(name).then(expectAsync((topic) { + expect(topic.name, name); + expect(topic.project, PROJECT); + expect(topic.absoluteName, absoluteName); + return api.createTopic(absoluteName).then(expectAsync((topic) { + expect(topic.name, name); + expect(topic.absoluteName, absoluteName); + })); + })); + }); + + test('create-error', () { + var mock = mockClient(); + var api = new PubSub(mock, PROJECT); + badTopicNames.forEach((name) { + expect(() => api.createTopic(name), throwsArgumentError); + }); + badSubscriptionNames.forEach((name) { + expect(() => api.createTopic(name), throwsArgumentError); + }); + }); + + test('delete', () { + var mock = mockClient(); + mock.register( + 'DELETE', new RegExp(r'topics/[a-z/-]*$'), expectAsync((request) { + expect(request.url.path, '${ROOT_PATH}topics/$absoluteName'); + expect(request.body.length, 0); + return mock.respondEmpty(); + }, count: 2)); + + var api = new PubSub(mock, PROJECT); + return api.deleteTopic(name).then(expectAsync((result) { + expect(result, isNull); + return api.deleteTopic(absoluteName).then(expectAsync((topic) { + expect(result, isNull); + })); + })); + }); + + test('delete-error', () { + var mock = mockClient(); + var api = new PubSub(mock, PROJECT); + badTopicNames.forEach((name) { + expect(() => api.deleteTopic(name), throwsArgumentError); + }); + badSubscriptionNames.forEach((name) { + expect(() => api.deleteTopic(name), throwsArgumentError); + }); + }); + + test('lookup', () { + var mock = mockClient(); + mock.register( + 'GET', new RegExp(r'topics/[a-z/-]*$'), expectAsync((request) { + expect(request.url.path, '${ROOT_PATH}topics/$absoluteName'); + expect(request.body.length, 0); + return mock.respond(new pubsub.Topic()..name = absoluteName); + }, count: 2)); + + var api = new PubSub(mock, PROJECT); + return api.lookupTopic(name).then(expectAsync((topic) { + expect(topic.name, name); + expect(topic.project, PROJECT); + expect(topic.absoluteName, absoluteName); + return api.lookupTopic(absoluteName).then(expectAsync((topic) { + expect(topic.name, name); + expect(topic.absoluteName, absoluteName); + })); + })); + }); + + test('lookup-error', () { + var mock = mockClient(); + var api = new PubSub(mock, PROJECT); + badTopicNames.forEach((name) { + expect(() => api.lookupTopic(name), throwsArgumentError); + }); + badSubscriptionNames.forEach((name) { + expect(() => api.lookupTopic(name), throwsArgumentError); + }); + }); + + group('query', () { + var query = 'cloud.googleapis.com/project in (/projects/$PROJECT)'; + var defaultPageSize = 50; + + addTopics(pubsub.ListTopicsResponse response, int first, int count) { + response.topic = []; + for (int i = 0; i < count; i++) { + response.topic.add(new pubsub.Topic()..name = 'topic-${first + i}'); + } + } + + // Mock that expect/generates [n] topics in pages of page size + // [pageSize]. + registerQueryMock(mock, n, pageSize, [totalCalls]) { + var totalPages = (n + pageSize - 1) ~/ pageSize; + // No items still generate one request. + if (totalPages == 0) totalPages = 1; + // Can pass in total calls if this mock is overwritten before all + // expected pages are done, e.g. when testing errors. + if (totalCalls == null) { + totalCalls = totalPages; + } + var pageCount = 0; + mock.register('GET', 'topics', expectAsync((request) { + pageCount++; + expect(request.url.queryParameters['query'], query); + expect(request.url.queryParameters['maxResults'], '$pageSize'); + expect(request.body.length, 0); + if (pageCount > 1) { + expect(request.url.queryParameters['pageToken'], 'next-page'); + } + + var response = new pubsub.ListTopicsResponse(); + var first = (pageCount - 1) * pageSize + 1; + if (pageCount < totalPages) { + response.nextPageToken = 'next-page'; + addTopics(response, first, pageSize); + } else { + addTopics(response, first, n - (totalPages - 1) * pageSize); + } + return mock.respond(response); + }, count: totalCalls)); + } + + group('list', () { + Future q(count) { + var mock = mockClient(); + registerQueryMock(mock, count, 50); + + var api = new PubSub(mock, PROJECT); + return api.listTopics().listen( + expectAsync((_) => null, count: count)).asFuture(); + } + + test('simple', () { + return q(0) + .then((_) => q(1)) + .then((_) => q(1)) + .then((_) => q(49)) + .then((_) => q(50)) + .then((_) => q(51)) + .then((_) => q(99)) + .then((_) => q(100)) + .then((_) => q(101)) + .then((_) => q(170)); + }); + + test('immediate-pause-resume', () { + var mock = mockClient(); + registerQueryMock(mock, 70, 50); + + var api = new PubSub(mock, PROJECT); + api.listTopics().listen( + expectAsync(((_) => null), count: 70), + onDone: expectAsync(() => null)) + ..pause() + ..resume() + ..pause() + ..resume(); + }); + + test('pause-resume', () { + var mock = mockClient(); + registerQueryMock(mock, 70, 50); + + var api = new PubSub(mock, PROJECT); + var count = 0; + var subscription; + subscription = api.listTopics().listen( + expectAsync(((_) { + subscription..pause()..resume()..pause(); + if ((count % 2) == 0) { + subscription.resume(); + } else { + scheduleMicrotask(() => subscription.resume()); + } + return null; + }), count: 70), + onDone: expectAsync(() => null)) + ..pause(); + scheduleMicrotask(() => subscription.resume()); + }); + + test('immediate-cancel', () { + var mock = mockClient(); + registerQueryMock(mock, 70, 50, 1); + + var api = new PubSub(mock, PROJECT); + api.listTopics().listen( + (_) => throw 'Unexpected', + onDone: () => throw 'Unexpected') + ..cancel(); + }); + + test('cancel', () { + var mock = mockClient(); + registerQueryMock(mock, 170, 50, 1); + + var api = new PubSub(mock, PROJECT); + var subscription; + subscription = api.listTopics().listen( + expectAsync((_) => subscription.cancel()), + onDone: () => throw 'Unexpected'); + }); + + test('error', () { + runTest(bool withPause) { + // Test error on first GET request. + var mock = mockClient(); + mock.register('GET', 'topics', expectAsync((request) { + return mock.respondError(500); + })); + var api = new PubSub(mock, PROJECT); + var subscription; + subscription = api.listTopics().listen( + (_) => throw 'Unexpected', + onDone: expectAsync(() => null), + onError: expectAsync( + (e) => e is pubsub.DetailedApiRequestError)); + if (withPause) { + subscription.pause(); + scheduleMicrotask(() => subscription.resume()); + } + } + + runTest(false); + runTest(true); + }); + + test('error-2', () { + // Test error on second GET request. + void runTest(bool withPause) { + var mock = mockClient(); + registerQueryMock(mock, 51, 50, 1); + + var api = new PubSub(mock, PROJECT); + + int count = 0; + var subscription; + subscription = api.listTopics().listen( + expectAsync(((_) { + count++; + if (count == 50) { + if (withPause) { + subscription.pause(); + scheduleMicrotask(() => subscription.resume()); + } + mock.clear(); + mock.register('GET', 'topics', expectAsync((request) { + return mock.respondError(500); + })); + } + return null; + }), count: 50), + onDone: expectAsync(() => null), + onError: expectAsync( + (e) => e is pubsub.DetailedApiRequestError)); + } + + runTest(false); + runTest(true); + }); + }); + + group('page', () { + test('empty', () { + var mock = mockClient(); + registerQueryMock(mock, 0, 50); + + var api = new PubSub(mock, PROJECT); + return api.pageTopics().then(expectAsync((page) { + expect(page.items.length, 0); + expect(page.isLast, isTrue); + expect(page.next(), completion(isNull)); + + mock.clear(); + registerQueryMock(mock, 0, 20); + return api.pageTopics(pageSize: 20).then(expectAsync((page) { + expect(page.items.length, 0); + expect(page.isLast, isTrue); + expect(page.next(), completion(isNull)); + })); + })); + }); + + test('single', () { + var mock = mockClient(); + registerQueryMock(mock, 10, 50); + + var api = new PubSub(mock, PROJECT); + return api.pageTopics().then(expectAsync((page) { + expect(page.items.length, 10); + expect(page.isLast, isTrue); + expect(page.next(), completion(isNull)); + + mock.clear(); + registerQueryMock(mock, 20, 20); + return api.pageTopics(pageSize: 20).then(expectAsync((page) { + expect(page.items.length, 20); + expect(page.isLast, isTrue); + expect(page.next(), completion(isNull)); + })); + })); + }); + + test('multiple', () { + runTest(n, pageSize) { + var totalPages = (n + pageSize - 1) ~/ pageSize; + var pageCount = 0; + + var completer = new Completer(); + var mock = mockClient(); + registerQueryMock(mock, n, pageSize); + + handlePage(page) { + pageCount++; + expect(page.isLast, pageCount == totalPages); + expect(page.items.length, + page.isLast ? n - (totalPages - 1) * pageSize + : pageSize ); + page.next().then(expectAsync((page) { + if (page != null) { + handlePage(page); + } else { + expect(pageCount, totalPages); + completer.complete(); + } + })); + } + + var api = new PubSub(mock, PROJECT); + api.pageTopics(pageSize: pageSize).then(expectAsync(handlePage)); + + return completer.future; + } + + return runTest(70, 50) + .then((_) => runTest(99, 1)) + .then((_) => runTest(99, 50)) + .then((_) => runTest(99, 98)) + .then((_) => runTest(99, 99)) + .then((_) => runTest(99, 100)) + .then((_) => runTest(100, 1)) + .then((_) => runTest(100, 50)) + .then((_) => runTest(100, 100)) + .then((_) => runTest(101, 50)); + }); + }); + }); + }); + + group('subscription', () { + var name = 'test-subscription'; + var absoluteName = '/subscriptions/$PROJECT/test-subscription'; + var topicName = 'test-topic'; + var absoluteTopicName = '/topics/$PROJECT/test-topic'; + + test('create', () { + var mock = mockClient(); + mock.register('POST', 'subscriptions', expectAsync((request) { + var requestSubscription = + new pubsub.Subscription.fromJson(JSON.decode(request.body)); + expect(requestSubscription.name, absoluteName); + return mock.respond(new pubsub.Subscription()..name = absoluteName); + }, count: 2)); + + var api = new PubSub(mock, PROJECT); + return api.createSubscription(name, topicName) + .then(expectAsync((subscription) { + expect(subscription.name, name); + expect(subscription.absoluteName, absoluteName); + return api.createSubscription(absoluteName, absoluteTopicName) + .then(expectAsync((subscription) { + expect(subscription.name, name); + expect(subscription.project, PROJECT); + expect(subscription.absoluteName, absoluteName); + })); + })); + }); + + test('create-error', () { + var mock = mockClient(); + var api = new PubSub(mock, PROJECT); + badSubscriptionNames.forEach((name) { + expect(() => api.createSubscription(name, 'test-topic'), + throwsArgumentError); + }); + badTopicNames.forEach((name) { + expect(() => api.createSubscription('test-subscription', name), + throwsArgumentError); + }); + }); + + test('delete', () { + var mock = mockClient(); + mock.register( + 'DELETE', + new RegExp(r'subscriptions/[a-z/-]*$'), expectAsync((request) { + expect(request.url.path, '${ROOT_PATH}subscriptions/$absoluteName'); + expect(request.body.length, 0); + return mock.respondEmpty(); + }, count: 2)); + + var api = new PubSub(mock, PROJECT); + return api.deleteSubscription(name).then(expectAsync((result) { + expect(result, isNull); + return api.deleteSubscription(absoluteName).then(expectAsync((topic) { + expect(result, isNull); + })); + })); + }); + + test('delete-error', () { + var mock = mockClient(); + var api = new PubSub(mock, PROJECT); + badSubscriptionNames.forEach((name) { + expect(() => api.deleteSubscription(name), throwsArgumentError); + }); + badTopicNames.forEach((name) { + expect(() => api.deleteSubscription(name), throwsArgumentError); + }); + }); + + test('lookup', () { + var mock = mockClient(); + mock.register( + 'GET', + new RegExp(r'subscriptions/[a-z/-]*$'), expectAsync((request) { + expect(request.url.path, '${ROOT_PATH}subscriptions/$absoluteName'); + expect(request.body.length, 0); + return mock.respond(new pubsub.Subscription()..name = absoluteName); + }, count: 2)); + + var api = new PubSub(mock, PROJECT); + return api.lookupSubscription(name).then(expectAsync((subscription) { + expect(subscription.name, name); + expect(subscription.absoluteName, absoluteName); + return api.lookupSubscription(absoluteName) + .then(expectAsync((subscription) { + expect(subscription.name, name); + expect(subscription.project, PROJECT); + expect(subscription.absoluteName, absoluteName); + })); + })); + }); + + test('lookup-error', () { + var mock = mockClient(); + var api = new PubSub(mock, PROJECT); + badSubscriptionNames.forEach((name) { + expect(() => api.lookupSubscription(name), throwsArgumentError); + }); + badTopicNames.forEach((name) { + expect(() => api.lookupSubscription(name), throwsArgumentError); + }); + }); + + group('query', () { + var query = 'cloud.googleapis.com/project in (/projects/$PROJECT)'; + var topicQuery = + 'pubsub.googleapis.com/topic in (/topics/$PROJECT/topic)'; + var defaultPageSize = 50; + + addSubscriptions( + pubsub.ListSubscriptionsResponse response, int first, int count) { + response.subscription = []; + for (int i = 0; i < count; i++) { + response.subscription.add( + new pubsub.Subscription()..name = 'subscription-${first + i}'); + } + } + + + // Mock that expect/generates [n] subscriptions in pages of page size + // [pageSize]. + registerQueryMock(mock, n, pageSize, {String topic, int totalCalls}) { + var totalPages = (n + pageSize - 1) ~/ pageSize; + // No items still generate one request. + if (totalPages == 0) totalPages = 1; + // Can pass in total calls if this mock is overwritten before all + // expected pages are done, e.g. when testing errors. + if (totalCalls == null) { + totalCalls = totalPages; + } + var pageCount = 0; + mock.register('GET', 'subscriptions', expectAsync((request) { + pageCount++; + expect(request.url.queryParameters['query'], + topic == null ? query : topicQuery); + expect(request.url.queryParameters['maxResults'], '$pageSize'); + expect(request.body.length, 0); + if (pageCount > 1) { + expect(request.url.queryParameters['pageToken'], 'next-page'); + } + + var response = new pubsub.ListSubscriptionsResponse(); + var first = (pageCount - 1) * pageSize + 1; + if (pageCount < totalPages) { + response.nextPageToken = 'next-page'; + addSubscriptions(response, first, pageSize); + } else { + addSubscriptions( + response, first, n - (totalPages - 1) * pageSize); + } + return mock.respond(response); + }, count: totalCalls)); + } + + group('list', () { + Future q(topic, count) { + var mock = mockClient(); + registerQueryMock(mock, count, 50, topic: topic); + + var api = new PubSub(mock, PROJECT); + return api.listSubscriptions(topic).listen( + expectAsync((_) => null, count: count)).asFuture(); + } + + test('simple', () { + return q(null, 0) + .then((_) => q('topic', 0)) + .then((_) => q(null, 1)) + .then((_) => q('topic', 1)) + .then((_) => q(null, 10)) + .then((_) => q('topic', 10)) + .then((_) => q(null, 49)) + .then((_) => q('topic', 49)) + .then((_) => q(null, 50)) + .then((_) => q('topic', 50)) + .then((_) => q(null, 51)) + .then((_) => q('topic', 51)) + .then((_) => q(null, 99)) + .then((_) => q('topic', 99)) + .then((_) => q(null, 100)) + .then((_) => q('topic', 100)) + .then((_) => q(null, 101)) + .then((_) => q('topic', 101)) + .then((_) => q(null, 170)) + .then((_) => q('topic', 170)); + }); + + test('immediate-pause-resume', () { + var mock = mockClient(); + registerQueryMock(mock, 70, 50); + + var api = new PubSub(mock, PROJECT); + api.listSubscriptions().listen( + expectAsync(((_) => null), count: 70), + onDone: expectAsync(() => null)) + ..pause() + ..resume() + ..pause() + ..resume(); + }); + + test('pause-resume', () { + var mock = mockClient(); + registerQueryMock(mock, 70, 50); + + var api = new PubSub(mock, PROJECT); + var count = 0; + var subscription; + subscription = api.listSubscriptions().listen( + expectAsync(((_) { + subscription..pause()..resume()..pause(); + if ((count % 2) == 0) { + subscription.resume(); + } else { + scheduleMicrotask(() => subscription.resume()); + } + return null; + }), count: 70), + onDone: expectAsync(() => null)) + ..pause(); + scheduleMicrotask(() => subscription.resume()); + }); + + test('immediate-cancel', () { + var mock = mockClient(); + registerQueryMock(mock, 70, 50, totalCalls: 1); + + var api = new PubSub(mock, PROJECT); + api.listSubscriptions().listen( + (_) => throw 'Unexpected', + onDone: () => throw 'Unexpected') + ..cancel(); + }); + + test('cancel', () { + var mock = mockClient(); + registerQueryMock(mock, 170, 50, totalCalls: 1); + + var api = new PubSub(mock, PROJECT); + var subscription; + subscription = api.listSubscriptions().listen( + expectAsync((_) => subscription.cancel()), + onDone: () => throw 'Unexpected'); + }); + + test('error', () { + runTest(bool withPause) { + // Test error on first GET request. + var mock = mockClient(); + mock.register('GET', 'subscriptions', expectAsync((request) { + return mock.respondError(500); + })); + var api = new PubSub(mock, PROJECT); + var subscription; + subscription = api.listSubscriptions().listen( + (_) => throw 'Unexpected', + onDone: expectAsync(() => null), + onError: expectAsync( + (e) => e is pubsub.DetailedApiRequestError)); + if (withPause) { + subscription.pause(); + scheduleMicrotask(() => subscription.resume()); + } + } + + runTest(false); + runTest(true); + }); + + test('error-2', () { + runTest(bool withPause) { + // Test error on second GET request. + var mock = mockClient(); + registerQueryMock(mock, 51, 50, totalCalls: 1); + + var api = new PubSub(mock, PROJECT); + + int count = 0; + var subscription; + subscription = api.listSubscriptions().listen( + expectAsync(((_) { + count++; + if (count == 50) { + if (withPause) { + subscription.pause(); + scheduleMicrotask(() => subscription.resume()); + } + mock.clear(); + mock.register( + 'GET', 'subscriptions', expectAsync((request) { + return mock.respondError(500); + })); + } + return null; + }), count: 50), + onDone: expectAsync(() => null), + onError: expectAsync( + (e) => e is pubsub.DetailedApiRequestError)); + } + + runTest(false); + runTest(true); + }); + }); + + group('page', () { + emptyTest(String topic) { + var mock = mockClient(); + registerQueryMock(mock, 0, 50, topic: topic); + + var api = new PubSub(mock, PROJECT); + return api.pageSubscriptions(topic: topic).then(expectAsync((page) { + expect(page.items.length, 0); + expect(page.isLast, isTrue); + expect(page.next(), completion(isNull)); + + mock.clear(); + registerQueryMock(mock, 0, 20, topic: topic); + return api.pageSubscriptions(topic: topic, pageSize: 20) + .then(expectAsync((page) { + expect(page.items.length, 0); + expect(page.isLast, isTrue); + expect(page.next(), completion(isNull)); + })); + })); + } + + test('empty', () { + emptyTest(null); + emptyTest('topic'); + }); + + singleTest(String topic) { + var mock = mockClient(); + registerQueryMock(mock, 10, 50, topic: topic); + + var api = new PubSub(mock, PROJECT); + return api.pageSubscriptions(topic: topic).then(expectAsync((page) { + expect(page.items.length, 10); + expect(page.isLast, isTrue); + expect(page.next(), completion(isNull)); + + mock.clear(); + registerQueryMock(mock, 20, 20, topic: topic); + return api.pageSubscriptions(topic: topic, pageSize: 20) + .then(expectAsync((page) { + expect(page.items.length, 20); + expect(page.isLast, isTrue); + expect(page.next(), completion(isNull)); + })); + })); + } + + test('single', () { + singleTest(null); + singleTest('topic'); + }); + + multipleTest(n, pageSize, topic) { + var totalPages = (n + pageSize - 1) ~/ pageSize; + var pageCount = 0; + + var completer = new Completer(); + var mock = mockClient(); + registerQueryMock(mock, n, pageSize, topic: topic); + + handlingPage(page) { + pageCount++; + expect(page.isLast, pageCount == totalPages); + expect(page.items.length, + page.isLast ? n - (totalPages - 1) * pageSize + : pageSize ); + page.next().then((page) { + if (page != null) { + handlingPage(page); + } else { + expect(pageCount, totalPages); + completer.complete(); + } + }); + } + + var api = new PubSub(mock, PROJECT); + api.pageSubscriptions(topic: topic, pageSize: pageSize) + .then(handlingPage); + + return completer.future; + } + + test('multiple', () { + return multipleTest(70, 50, null) + .then((_) => multipleTest(99, 1, null)) + .then((_) => multipleTest(99, 50, null)) + .then((_) => multipleTest(99, 98, null)) + .then((_) => multipleTest(99, 99, null)) + .then((_) => multipleTest(99, 100, null)) + .then((_) => multipleTest(100, 1, null)) + .then((_) => multipleTest(100, 50, null)) + .then((_) => multipleTest(100, 100, null)) + .then((_) => multipleTest(101, 50, null)) + .then((_) => multipleTest(70, 50, 'topic')) + .then((_) => multipleTest(99, 1, 'topic')) + .then((_) => multipleTest(99, 50, 'topic')) + .then((_) => multipleTest(99, 98, 'topic')) + .then((_) => multipleTest(99, 99, 'topic')) + .then((_) => multipleTest(99, 100, 'topic')) + .then((_) => multipleTest(100, 1, 'topic')) + .then((_) => multipleTest(100, 50, 'topic')) + .then((_) => multipleTest(100, 100, 'topic')) + .then((_) => multipleTest(101, 50, 'topic')); + }); + }); + }); + }); + }); + + group('topic', () { + var name = 'test-topic'; + var absoluteName = '/topics/$PROJECT/test-topic'; + var message = 'Hello, world!'; + var messageBytes = UTF8.encode(message); + var messageBase64 = crypto.CryptoUtils.bytesToBase64(messageBytes); + var labels = {'a': 1, 'b': 'text'}; + + registerLookup(mock) { + mock.register( + 'GET', new RegExp(r'topics/[a-z/-]*$'), expectAsync((request) { + expect(request.url.path, '${ROOT_PATH}topics/$absoluteName'); + expect(request.body.length, 0); + return mock.respond(new pubsub.Topic()..name = absoluteName); + })); + } + + registerPublish(mock, count, fn) { + mock.register('POST', 'topics/publish', expectAsync((request) { + var publishRequest = + new pubsub.PublishRequest.fromJson(JSON.decode(request.body)); + return fn(publishRequest); + }, count: count)); + } + + test('publish', () { + var mock = mockClient(); + registerLookup(mock); + + var api = new PubSub(mock, PROJECT); + return api.lookupTopic(name).then(expectAsync((topic) { + mock.clear(); + registerPublish(mock, 4, ((request) { + expect(request.topic, absoluteName); + expect(request.message.data, messageBase64); + expect(request.message.label, isNull); + return mock.respondEmpty(); + })); + + return topic.publishString(message).then(expectAsync((result) { + expect(result, isNull); + return topic.publishBytes(messageBytes).then(expectAsync((result) { + expect(result, isNull); + return topic.publish( + new Message.withString(message)).then(expectAsync((result) { + expect(result, isNull); + return topic.publish( + new Message.withBytes( + messageBytes)).then(expectAsync((result) { + expect(result, isNull); + })); + })); + })); + })); + })); + }); + + test('publish-with-labels', () { + var mock = mockClient(); + registerLookup(mock); + + var api = new PubSub(mock, PROJECT); + return api.lookupTopic(name).then(expectAsync((topic) { + mock.clear(); + registerPublish(mock, 4, ((request) { + expect(request.topic, absoluteName); + expect(request.message.data, messageBase64); + expect(request.message.label, isNotNull); + expect(request.message.label.length, labels.length); + request.message.label.forEach((label) { + expect(labels.containsKey(label.key), isTrue); + if (label.numValue != null) { + expect(label.strValue, isNull); + expect(labels[label.key], int.parse(label.numValue)); + } else { + expect(label.strValue, isNotNull); + expect(labels[label.key], label.strValue); + } + }); + return mock.respondEmpty(); + })); + + return topic.publishString(message, labels: labels) + .then(expectAsync((result) { + expect(result, isNull); + return topic.publishBytes(messageBytes, labels: labels) + .then(expectAsync((result) { + expect(result, isNull); + return topic.publish( + new Message.withString(message, labels: labels)) + .then(expectAsync((result) { + expect(result, isNull); + return topic.publish( + new Message.withBytes(messageBytes, labels: labels)) + .then(expectAsync((result) { + expect(result, isNull); + })); + })); + })); + })); + })); + }); + + test('delete', () { + var mock = mockClient(); + mock.register( + 'GET', new RegExp(r'topics/[a-z/-]*$'), expectAsync((request) { + expect(request.url.path, '${ROOT_PATH}topics/$absoluteName'); + expect(request.body.length, 0); + return mock.respond(new pubsub.Topic()..name = absoluteName); + })); + + var api = new PubSub(mock, PROJECT); + return api.lookupTopic(name).then(expectAsync((topic) { + expect(topic.name, name); + expect(topic.absoluteName, absoluteName); + + mock.register( + 'DELETE', new RegExp(r'topics/[a-z/-]*$'), expectAsync((request) { + expect(request.url.path, '${ROOT_PATH}topics/$absoluteName'); + expect(request.body.length, 0); + return mock.respondEmpty(); + })); + + return topic.delete().then(expectAsync((result) { + expect(result, isNull); + })); + })); + }); + }); + + group('subscription', () { + var name = 'test-subscription'; + var absoluteName = '/subscriptions/$PROJECT/test-subscription'; + var topicName = 'test-topic'; + var absoluteTopicName = '/topics/$PROJECT/test-topic'; + + test('delete', () { + var mock = mockClient(); + mock.register( + 'GET', new RegExp(r'subscriptions/[a-z/-]*$'), expectAsync((request) { + expect(request.url.path, '${ROOT_PATH}subscriptions/$absoluteName'); + expect(request.body.length, 0); + return mock.respond(new pubsub.Topic()..name = absoluteName); + })); + + var api = new PubSub(mock, PROJECT); + return api.lookupSubscription(name).then(expectAsync((subscription) { + expect(subscription.name, name); + expect(subscription.absoluteName, absoluteName); + + mock.register( + 'DELETE', + new RegExp(r'subscriptions/[a-z/-]*$'), expectAsync((request) { + expect(request.url.path, '${ROOT_PATH}subscriptions/$absoluteName'); + expect(request.body.length, 0); + return mock.respondEmpty(); + })); + + return subscription.delete().then(expectAsync((result) { + expect(result, isNull); + })); + })); + }); + }); + + group('push', () { + var relativeSubscriptionName = 'sgjesse-managed-vm/test-push-subscription'; + var absoluteSubscriptionName = '/subscriptions/$relativeSubscriptionName'; + + test('event', () { + var requestBody = +''' +{ + "message": { + "data":"SGVsbG8sIHdvcmxkIDMwIG9mIDUwIQ==", + "labels": [ + { + "key":"messageNo", + "numValue":30 + }, + { + "key":"test", + "strValue":"hello" + } + ] + }, + "subscription":"$absoluteSubscriptionName" +} +'''; + var event = new PushEvent.fromJson(requestBody); + expect(event.message.asString, "Hello, world 30 of 50!"); + expect(event.message.labels['messageNo'], 30); + expect(event.message.labels['test'], 'hello'); + expect(event.subscriptionName, absoluteSubscriptionName); + }); + + test('event-short-subscription-name', () { + var requestBody = + ''' +{ + "message": { + "data":"SGVsbG8sIHdvcmxkIDMwIG9mIDUwIQ==", + "labels": [ + { + "key":"messageNo", + "numValue":30 + }, + { + "key":"test", + "strValue":"hello" + } + ] + }, + "subscription":"$relativeSubscriptionName" +} +'''; + var event = new PushEvent.fromJson(requestBody); + expect(event.message.asString, "Hello, world 30 of 50!"); + expect(event.message.labels['messageNo'], 30); + expect(event.message.labels['test'], 'hello'); + expect(event.subscriptionName, absoluteSubscriptionName); + }); + }); +} From 03a3cd7d27cb6fab7fbb8d790db69b71b06f790a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Fri, 13 Mar 2015 09:05:20 +0100 Subject: [PATCH 060/239] Update Pub/Sub API to use the v1beta2 google API Also add the first part of an e2e test. R=kustermann@google.com BUG= Review URL: https://codereview.chromium.org//1001953002 --- pkgs/gcloud/lib/db.dart | 3 +- pkgs/gcloud/lib/pubsub.dart | 82 +++++---- pkgs/gcloud/lib/src/pubsub_impl.dart | 158 +++++++----------- pkgs/gcloud/lib/storage.dart | 4 +- pkgs/gcloud/pubspec.yaml | 2 +- pkgs/gcloud/test/common.dart | 2 +- pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart | 59 +++++++ pkgs/gcloud/test/pubsub/pubsub_test.dart | 167 ++++++++----------- 8 files changed, 248 insertions(+), 229 deletions(-) create mode 100644 pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart diff --git a/pkgs/gcloud/lib/db.dart b/pkgs/gcloud/lib/db.dart index fc95ef1e..41282038 100644 --- a/pkgs/gcloud/lib/db.dart +++ b/pkgs/gcloud/lib/db.dart @@ -31,7 +31,8 @@ DatastoreDB get dbService => ss.lookup(_dbKey); /// Registers the [DatastoreDB] object within the current service scope. /// -/// The provided `db` object will be avilable via the top-level `db` getter. +/// The provided `db` object will be avilable via the top-level `dbService` +/// getter. /// /// Calling this function outside of a service scope will result in an error. /// Calling this function more than once inside the same service scope is not diff --git a/pkgs/gcloud/lib/pubsub.dart b/pkgs/gcloud/lib/pubsub.dart index ae24da11..6cb2fe2c 100644 --- a/pkgs/gcloud/lib/pubsub.dart +++ b/pkgs/gcloud/lib/pubsub.dart @@ -10,13 +10,38 @@ import 'dart:convert'; import 'package:crypto/crypto.dart'; import 'package:http/http.dart' as http; -import 'package:googleapis_beta/pubsub/v1beta1.dart' as pubsub; +import 'package:googleapis_beta/pubsub/v1beta2.dart' as pubsub; + +import 'service_scope.dart' as ss; import 'common.dart'; export 'common.dart'; part 'src/pubsub_impl.dart'; +const Symbol _pubsubKey = #gcloud.pubsub; + +/// Access the [PubSub] object available in the current service scope. +/// +/// The returned object will be the one which was previously registered with +/// [registerPubSubService] within the current (or a parent) service scope. +/// +/// Accessing this getter outside of a service scope will result in an error. +/// See the `package:gcloud/service_scope.dart` library for more information. +PubSub get pubsubService => ss.lookup(_pubsubKey); + +/// Registers the [pubsub] object within the current service scope. +/// +/// The provided `pubsub` object will be avilable via the top-level +/// `pubsubService` getter. +/// +/// Calling this function outside of a service scope will result in an error. +/// Calling this function more than once inside the same service scope is not +/// allowed. +void registerPubSubService(PubSub pubsub) { + ss.register(_pubsubKey, pubsub); +} + /// A Cloud Pub/Sub client. /// /// Connects to the Cloud Pub/Sub service and gives access to its operations. @@ -68,25 +93,26 @@ part 'src/pubsub_impl.dart'; /// When working with topics and subscriptions they are referred to using /// names. These names can be either relative names or absolute names. /// -/// An absolute name of a topic starts with `/` and has the form: +/// An absolute name of a topic starts with `projects/` and has the form: /// -/// /topics// +/// projects//topics/ /// /// When a relative topic name is used, its absolute name is generated by -/// prepending `/topics//`, where `` is the project -/// id passed to the constructor. +/// prepending `projects//topics/`, where `` is the +/// project id passed to the constructor. /// -/// An absolute name of a subscription starts with `/` and has the form: +/// An absolute name of a subscription starts with `projects/` and has the +/// form: /// -/// /subscriptions// +/// projects//subscriptions/ /// /// When a relative subscription name is used, its absolute name is -/// generated by prepending `/subscriptions//`, where +/// generated by prepending `projects//subscriptions/`, where /// `` is the project id passed to the constructor. /// abstract class PubSub { /// List of required OAuth2 scopes for Pub/Sub operation. - static const Scopes = const [ pubsub.PubsubApi.PubsubScope ]; + static const SCOPES = const [ pubsub.PubsubApi.PubsubScope ]; /// Access Pub/Sub using an authenicated client. /// @@ -219,25 +245,23 @@ abstract class Topic { /// Publish a string as a message. /// - /// The message will get the labels specified in [labels]. The keys in this - /// map must be strings and the values must be either Strings or integers. + /// The message will get the attributes specified in [attributes]. /// - /// The [labels] are passed together with the message to the receiver. + /// The [attributes] are passed together with the message to the receiver. /// /// Returns a `Future` which completes with `null` when the operation /// is finished. - Future publishString(String message, {Map labels}); + Future publishString(String message, {Map attributes}); /// Publish bytes as a message. /// - /// The message will get the labels specified in [labels]. The keys in this - /// map must be strings and the values must be either Strings or integers. + /// The message will get the attributes specified in [attributes]. /// - /// The [labels] are passed together with the message to the receiver. + /// The [attributes] are passed together with the message to the receiver. /// /// Returns a `Future` which completes with `null` when the operation /// is finished. - Future publishBytes(List message, {Map labels}); + Future publishBytes(List message, {Map attributes}); } /// A Pub/Sub subscription @@ -315,7 +339,7 @@ abstract class Subscription { /// The content of a Pub/Sub message. /// /// All Pub/Sub messages consist of a body of binary data and has an optional -/// set of labels (key-value pairs) associated with it. +/// set of attributes (key-value pairs) associated with it. /// /// A `Message` contains the message body a list of bytes. The message body can /// be read and written as a String, in which case the string is converted to @@ -324,18 +348,14 @@ abstract class Message { /// Creates a new message with a String for the body. The String will /// be UTF-8 encoded to create the actual binary body for the message. /// - /// Message labels can be passed in the [labels] Map. The values in this - /// map must be either Strings or integers. Integers must be positive - /// 64-bit integers. - factory Message.withString(String message, {Map labels}) = + /// Message attributes can be passed in the [attributes] map. + factory Message.withString(String message, {Map attributes}) = _MessageImpl.withString; /// Creates a new message with a binary body. /// - /// Message labels can be passed in the [labels] Map. The values in this - /// map must be either Strings or integers. Integers must be positive - /// 64-bit integers. - factory Message.withBytes(List message, {Map labels}) = + /// Message attributes can be passed in the [attributes] Map. + factory Message.withBytes(List message, {Map attributes}) = _MessageImpl.withBytes; /// The message body as a String. @@ -349,11 +369,8 @@ abstract class Message { /// The message body as bytes. List get asBytes; - /// The labels for this message. The values in the Map are either - /// Strings or integers. - /// - /// Values can be 64-bit integers. - Map get labels; + /// The attributes for this message. + Map get attributes; } /// A Pub/Sub pull event. @@ -364,9 +381,6 @@ abstract class PullEvent { /// The message content. Message get message; - /// Whether the message was truncated. - bool get isTruncated; - /// Acknowledge reception of this message. /// /// Returns a `Future` which completes with `null` when the acknowledge has diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart index 2ce7b44e..98e8e562 100644 --- a/pkgs/gcloud/lib/src/pubsub_impl.dart +++ b/pkgs/gcloud/lib/src/pubsub_impl.dart @@ -15,44 +15,36 @@ class _PubSubImpl implements PubSub { this._client = client, this.project = project, _api = new pubsub.PubsubApi(client), - _topicPrefix = '/topics/$project/', - _subscriptionPrefix = '/subscriptions/$project/'; + _topicPrefix = 'projects/$project/topics/', + _subscriptionPrefix = 'projects/$project/subscriptions/'; String _fullTopicName(String name) { - if (name.startsWith('/') && !name.startsWith('/topics')) { - throw new ArgumentError("Illegal absolute topic name. Absolute topic " - "name must start with '/topics'"); - } - return name.startsWith('/topics') ? name : '${_topicPrefix}$name'; + return name.startsWith('projects/') ? name : '${_topicPrefix}$name'; } String _fullSubscriptionName(name) { - if (name.startsWith('/') && !name.startsWith('/subscriptions')) { - throw new ArgumentError("Illegal absolute topic name. Absolute topic " - "name must start with '/subscriptions'"); - } - return name.startsWith('/subscriptions') ? name - : '${_subscriptionPrefix}$name'; + return name.startsWith('projects/') ? name + : '${_subscriptionPrefix}$name'; } Future _createTopic(String name) { - return _api.topics.create(new pubsub.Topic()..name = name); + return _api.projects.topics.create(new pubsub.Topic()..name = name, name); } Future _deleteTopic(String name) { - return _api.topics.delete(name); + // The Pub/Sub delete API returns an instance of Empty. + return _api.projects.topics.delete(name).then((_) => null); } Future _getTopic(String name) { - return _api.topics.get(name); + return _api.projects.topics.get(name); } Future _listTopics( int pageSize, String nextPageToken) { - var query = 'cloud.googleapis.com/project in (/projects/$project)'; - return _api.topics.list( - query: query, maxResults: pageSize, pageToken: nextPageToken); + return _api.projects.topics.list( + 'projects/$project', pageSize: pageSize, pageToken: nextPageToken); } Future _createSubscription( @@ -65,95 +57,79 @@ class _PubSubImpl implements PubSub { new pubsub.PushConfig()..pushEndpoint = endpoint.toString(); subscription.pushConfig = pushConfig; } - return _api.subscriptions.create(subscription); + return _api.projects.subscriptions.create(subscription, name); } Future _deleteSubscription(String name) { - return _api.subscriptions.delete(_fullSubscriptionName(name)); + // The Pub/Sub delete API returns an instance of Empty. + return _api.projects.subscriptions.delete(_fullSubscriptionName(name)) + .then((_) => null); } Future _getSubscription(String name) { - return _api.subscriptions.get(name); + return _api.projects.subscriptions.get(name); } Future _listSubscriptions( String topic, int pageSize, String nextPageToken) { - // See https://developers.google.com/pubsub/v1beta1/subscriptions/list for - // the specification of the query format. - var query = topic == null - ? 'cloud.googleapis.com/project in (/projects/$project)' - : 'pubsub.googleapis.com/topic in (/topics/$project/$topic)'; - return _api.subscriptions.list( - query: query, maxResults: pageSize, pageToken: nextPageToken); + return _api.projects.subscriptions.list( + 'projects/$project', pageSize: pageSize, pageToken: nextPageToken); } Future _modifyPushConfig(String subscription, Uri endpoint) { var pushConfig = new pubsub.PushConfig() ..pushEndpoint = endpoint != null ? endpoint.toString() : null; var request = new pubsub.ModifyPushConfigRequest() - ..subscription = subscription ..pushConfig = pushConfig; - return _api.subscriptions.modifyPushConfig(request); + return _api.projects.subscriptions.modifyPushConfig(request, subscription); } Future _publish( - String topic, List message, Map labels) { - var l = null; - if (labels != null) { - l = []; - labels.forEach((key, value) { - if (value is String) { - l.add(new pubsub.Label()..key = key..strValue = value); - } else { - l.add(new pubsub.Label()..key = key..numValue = value.toString()); - } - }); - } + String topic, List message, Map attributes) { var request = new pubsub.PublishRequest() - ..topic = topic - ..message = (new pubsub.PubsubMessage() + ..messages = [(new pubsub.PubsubMessage() ..dataAsBytes = message - ..label = l); - return _api.topics.publish(request); + ..attributes = attributes)]; + // TODO(sgjesse): Handle PublishResponse containing message ids. + return _api.projects.topics.publish(request, topic).then((_) => null); } Future _pull( String subscription, bool returnImmediately) { var request = new pubsub.PullRequest() - ..subscription = subscription + ..maxMessages = 1 ..returnImmediately = returnImmediately; - return _api.subscriptions.pull(request); + return _api.projects.subscriptions.pull(request, subscription); } Future _ack(String ackId, String subscription) { var request = new pubsub.AcknowledgeRequest() - ..ackId = [ ackId ] - ..subscription = subscription; - return _api.subscriptions.acknowledge(request); + ..ackIds = [ ackId ]; + return _api.projects.subscriptions.acknowledge(request, subscription); } void _checkTopicName(name) { - if (name.startsWith('/') && !name.startsWith(_topicPrefix)) { + if (name.startsWith('projects/') && !name.contains('/topics/')) { throw new ArgumentError( - "Illegal topic name. Absolute topic names for project '$project' " - "must start with $_topicPrefix"); + "Illegal topic name. Absolute topic names must have the form " + "'projects/[project-id]/topics/[topic-name]"); } - if (name.length == _topicPrefix.length) { + if (name.endsWith('/topics/')) { throw new ArgumentError( 'Illegal topic name. Relative part of the name cannot be empty'); } } void _checkSubscriptionName(name) { - if (name.startsWith('/') && !name.startsWith(_subscriptionPrefix)) { + if (name.startsWith('projects/') && !name.contains('/subscriptions/')) { throw new ArgumentError( - "Illegal subscription name. Absolute subscription names for project " - "'$project' must start with $_subscriptionPrefix"); + "Illegal subscription name. Absolute subscription names must have " + "the form 'projects/[project-id]/subscriptions/[subscription-name]"); } - if (name.length == _subscriptionPrefix.length) { + if (name.endsWith('/subscriptions/')) { throw new ArgumentError( - 'Illegal subscription name. ' - 'Relative part of the name cannot be empty'); + 'Illegal subscription name. Relative part of the name cannot be ' + 'empty'); } } @@ -237,12 +213,12 @@ class _MessageImpl implements Message { // null. final List _bytesMessage; - final Map labels; + final Map attributes; - _MessageImpl.withString(this._stringMessage, {this.labels}) + _MessageImpl.withString(this._stringMessage, {this.attributes}) : _bytesMessage = null; - _MessageImpl.withBytes(this._bytesMessage, {this.labels}) + _MessageImpl.withBytes(this._bytesMessage, {this.attributes}) : _stringMessage = null; List get asBytes => @@ -262,7 +238,6 @@ class _PullMessage implements Message { final pubsub.PubsubMessage _message; List _bytes; String _string; - Map _labels; _PullMessage(this._message); @@ -276,16 +251,7 @@ class _PullMessage implements Message { return _string; } - Map get labels { - if (_labels == null) { - _labels = {}; - _message.label.forEach((label) { - _labels[label.key] = - label.numValue != null ? label.numValue : label.strValue; - }); - } - return _labels; - } + Map get attributes => _message.attributes; } /// Message received through Pub/Sub push delivery. @@ -296,9 +262,9 @@ class _PullMessage implements Message { /// The labels have been decoded into a Map. class _PushMessage implements Message { final String _base64Message; - final Map labels; + final Map attributes; - _PushMessage(this._base64Message, this.labels); + _PushMessage(this._base64Message, this.attributes); List get asBytes => CryptoUtils.base64StringToBytes(_base64Message); @@ -311,18 +277,18 @@ class _PushMessage implements Message { class _PullEventImpl implements PullEvent { /// Pub/Sub API object. final _PubSubImpl _api; + /// Subscription this was received from. + final String _subscriptionName; /// Low level response received from Pub/Sub. final pubsub.PullResponse _response; final Message message; - _PullEventImpl(this._api, response) + _PullEventImpl(this._api, this._subscriptionName, response) : this._response = response, message = new _PullMessage(response.pubsubEvent.message); - bool get isTruncated => _response.pubsubEvent.truncated; - Future acknowledge() { - return _api._ack(_response.ackId, _response.pubsubEvent.subscription); + return _api._ack(_response.receivedMessages[0].ackId, _subscriptionName); } } @@ -380,17 +346,17 @@ class _TopicImpl implements Topic { String get absoluteName => _topic.name; Future publish(Message message) { - return _api._publish(_topic.name, message.asBytes, message.labels); + return _api._publish(_topic.name, message.asBytes, message.attributes); } Future delete() => _api._deleteTopic(_topic.name); - Future publishString(String message, {Map labels}) { - return _api._publish(_topic.name, UTF8.encode(message), labels); + Future publishString(String message, {Map attributes}) { + return _api._publish(_topic.name, UTF8.encode(message), attributes); } - Future publishBytes(List message, {Map labels}) { - return _api._publish(_topic.name, message, labels); + Future publishBytes(List message, {Map attributes}) { + return _api._publish(_topic.name, message, attributes); } } @@ -422,7 +388,7 @@ class _SubscriptionImpl implements Subscription { Future pull({bool noWait: true}) { return _api._pull(_subscription.name, noWait) .then((response) { - return new _PullEventImpl(_api, response); + return new _PullEventImpl(_api, _subscription.name, response); }).catchError((e) => null, test: (e) => e is pubsub.DetailedApiRequestError && e.status == 400); @@ -448,10 +414,10 @@ class _TopicPageImpl implements Page { _TopicPageImpl(this._api, this._pageSize, pubsub.ListTopicsResponse response) - : items = new List(response.topic.length), + : items = new List(response.topics.length), _nextPageToken = response.nextPageToken { - for (int i = 0; i < response.topic.length; i++) { - items[i] = new _TopicImpl(_api, response.topic[i]); + for (int i = 0; i < response.topics.length; i++) { + items[i] = new _TopicImpl(_api, response.topics[i]); } } @@ -478,13 +444,13 @@ class _SubscriptionPageImpl implements Page { this._topic, this._pageSize, pubsub.ListSubscriptionsResponse response) - : items = new List(response.subscription != null - ? response.subscription.length + : items = new List(response.subscriptions != null + ? response.subscriptions.length : 0), _nextPageToken = response.nextPageToken{ - if (response.subscription != null) { - for (int i = 0; i < response.subscription.length; i++) { - items[i] = new _SubscriptionImpl(_api, response.subscription[i]); + if (response.subscriptions != null) { + for (int i = 0; i < response.subscriptions.length; i++) { + items[i] = new _SubscriptionImpl(_api, response.subscriptions[i]); } } } diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index f7e0d855..4705e7a0 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -77,8 +77,8 @@ Storage get storageService => ss.lookup(_storageKey); /// Registers the [storage] object within the current service scope. /// -/// The provided `storage` object will be avilable via the top-level `storage` -/// getter. +/// The provided `storage` object will be avilable via the top-level +/// `storageService` getter. /// /// Calling this function outside of a service scope will result in an error. /// Calling this function more than once inside the same service scope is not diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index c8d4f3a0..03bd497c 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -8,7 +8,7 @@ environment: dependencies: crypto: '>=0.9.0 <0.10.0' googleapis: '>=0.2.0 <0.7.0' - googleapis_beta: '>=0.3.0 <0.10.0' + googleapis_beta: '>=0.10.0 <0.11.0' http: '>=0.11.0 <0.12.0' dev_dependencies: googleapis_auth: '>=0.1.1 <0.3.0' diff --git a/pkgs/gcloud/test/common.dart b/pkgs/gcloud/test/common.dart index acac02aa..2f6e2f01 100644 --- a/pkgs/gcloud/test/common.dart +++ b/pkgs/gcloud/test/common.dart @@ -58,7 +58,7 @@ class MockClient extends http.BaseClient { } Future handler(http.Request request) { - expect(request.url.host, 'www.googleapis.com'); + expect(request.url.host, 'pubsub.googleapis.com'); var path = request.url.path; if (mocks[request.method] == null) { throw 'No mock handler for method ${request.method} found. ' diff --git a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart new file mode 100644 index 00000000..814f5c17 --- /dev/null +++ b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart @@ -0,0 +1,59 @@ +// Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'dart:async'; + +import 'package:gcloud/pubsub.dart'; +import 'package:googleapis/common/common.dart' as common; +import 'package:unittest/unittest.dart'; + +import '../common_e2e.dart'; + +String generateTopicName() { + var id = new DateTime.now().millisecondsSinceEpoch; + return 'dart-e2e-test-$id'; +} + +runTests(PubSub pubsub, String project) { + group('topic', () { + test('create-lookup-delete', () async { + var topicName = generateTopicName(); + var topic = await pubsub.createTopic(topicName); + expect(topic.name, topicName); + topic = await pubsub.lookupTopic(topicName); + expect(topic.name, topicName); + expect(topic.project, project); + expect(topic.absoluteName, 'projects/$project/topics/$topicName'); + expect(await pubsub.deleteTopic(topicName), isNull); + }); + + test('create-list-delete', () async { + var topicPrefix = generateTopicName(); + + name(i) => '$topicPrefix-$i'; + + for (var i = 0; i < 5; i++) { + await pubsub.createTopic(name(i)); + } + var topics = await pubsub.listTopics().map((t) => t.name).toList(); + for (var i = 0; i < 5; i++) { + expect(topics.contains(name(i)), isTrue); + await pubsub.deleteTopic(name(i)); + } + }); + }); +} + +main() { + withAuthClient(PubSub.SCOPES, (String project, httpClient) { + // Share the same pubsub connection for all tests. + var pubsub = new PubSub(httpClient, project); + + return runE2EUnittest(() { + runTests(pubsub, project); + }).whenComplete(() { + // TODO(sgjesse): Cleanup leftover topics/subscriptions. + }); + }); +} diff --git a/pkgs/gcloud/test/pubsub/pubsub_test.dart b/pkgs/gcloud/test/pubsub/pubsub_test.dart index 95c53d64..30231b66 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_test.dart @@ -11,37 +11,42 @@ import 'package:unittest/unittest.dart'; import 'package:gcloud/pubsub.dart'; -import 'package:googleapis_beta/pubsub/v1beta1.dart' as pubsub; +import 'package:googleapis_beta/pubsub/v1beta2.dart' as pubsub; import '../common.dart'; +import '../common_e2e.dart'; -const String ROOT_PATH = '/pubsub/v1beta1/'; + +const String ROOT_PATH = '/v1beta2/'; http.Client mockClient() => new MockClient(ROOT_PATH); main() { group('api', () { var badTopicNames = [ - '/', '/topics', '/topics/$PROJECT', '/topics/$PROJECT/', - '/topics/${PROJECT}x', '/topics/${PROJECT}x/']; + 'projects/', 'projects/topics', 'projects/$PROJECT', + 'projects/$PROJECT/', 'projects/${PROJECT}/topics', + 'projects/${PROJECT}/topics/']; var badSubscriptionNames = [ - '/', '/subscriptions', '/subscriptions/$PROJECT', - '/subscriptions/$PROJECT/', '/subscriptions/${PROJECT}x', - '/subscriptions/${PROJECT}x/']; + 'projects/', 'projects/subscriptions', 'projects/$PROJECT', + 'projects/$PROJECT/', 'projects/${PROJECT}/subscriptions', + 'projects/${PROJECT}/subscriptions/']; group('topic', () { var name = 'test-topic'; - var absoluteName = '/topics/$PROJECT/test-topic'; + var absoluteName = 'projects/$PROJECT/topics/test-topic'; test('create', () { var mock = mockClient(); - mock.register('POST', 'topics', expectAsync((request) { - var requestTopic = - new pubsub.Topic.fromJson(JSON.decode(request.body)); - expect(requestTopic.name, absoluteName); - return mock.respond(new pubsub.Topic()..name = absoluteName); - }, count: 2)); + mock.register( + 'PUT', 'projects/$PROJECT/topics/test-topic', + expectAsync((request) { + var requestTopic = + new pubsub.Topic.fromJson(JSON.decode(request.body)); + expect(requestTopic.name, absoluteName); + return mock.respond(new pubsub.Topic()..name = absoluteName); + }, count: 2)); var api = new PubSub(mock, PROJECT); return api.createTopic(name).then(expectAsync((topic) { @@ -69,8 +74,7 @@ main() { test('delete', () { var mock = mockClient(); mock.register( - 'DELETE', new RegExp(r'topics/[a-z/-]*$'), expectAsync((request) { - expect(request.url.path, '${ROOT_PATH}topics/$absoluteName'); + 'DELETE', 'projects/test-project/topics/test-topic', expectAsync((request) { expect(request.body.length, 0); return mock.respondEmpty(); }, count: 2)); @@ -98,8 +102,7 @@ main() { test('lookup', () { var mock = mockClient(); mock.register( - 'GET', new RegExp(r'topics/[a-z/-]*$'), expectAsync((request) { - expect(request.url.path, '${ROOT_PATH}topics/$absoluteName'); + 'GET', 'projects/test-project/topics/test-topic', expectAsync((request) { expect(request.body.length, 0); return mock.respond(new pubsub.Topic()..name = absoluteName); }, count: 2)); @@ -128,13 +131,12 @@ main() { }); group('query', () { - var query = 'cloud.googleapis.com/project in (/projects/$PROJECT)'; var defaultPageSize = 50; addTopics(pubsub.ListTopicsResponse response, int first, int count) { - response.topic = []; + response.topics = []; for (int i = 0; i < count; i++) { - response.topic.add(new pubsub.Topic()..name = 'topic-${first + i}'); + response.topics.add(new pubsub.Topic()..name = 'topic-${first + i}'); } } @@ -150,10 +152,9 @@ main() { totalCalls = totalPages; } var pageCount = 0; - mock.register('GET', 'topics', expectAsync((request) { + mock.register('GET', 'projects/$PROJECT/topics', expectAsync((request) { pageCount++; - expect(request.url.queryParameters['query'], query); - expect(request.url.queryParameters['maxResults'], '$pageSize'); + expect(request.url.queryParameters['pageSize'], '$pageSize'); expect(request.body.length, 0); if (pageCount > 1) { expect(request.url.queryParameters['pageToken'], 'next-page'); @@ -256,7 +257,7 @@ main() { runTest(bool withPause) { // Test error on first GET request. var mock = mockClient(); - mock.register('GET', 'topics', expectAsync((request) { + mock.register('GET', 'projects/$PROJECT/topics', expectAsync((request) { return mock.respondError(500); })); var api = new PubSub(mock, PROJECT); @@ -295,7 +296,7 @@ main() { scheduleMicrotask(() => subscription.resume()); } mock.clear(); - mock.register('GET', 'topics', expectAsync((request) { + mock.register('GET', 'projects/$PROJECT/topics', expectAsync((request) { return mock.respondError(500); })); } @@ -400,13 +401,13 @@ main() { group('subscription', () { var name = 'test-subscription'; - var absoluteName = '/subscriptions/$PROJECT/test-subscription'; + var absoluteName = 'projects/$PROJECT/subscriptions/test-subscription'; var topicName = 'test-topic'; - var absoluteTopicName = '/topics/$PROJECT/test-topic'; + var absoluteTopicName = 'projects/$PROJECT/topics/test-topic'; test('create', () { var mock = mockClient(); - mock.register('POST', 'subscriptions', expectAsync((request) { + mock.register('PUT', 'projects/$PROJECT/subscriptions', expectAsync((request) { var requestSubscription = new pubsub.Subscription.fromJson(JSON.decode(request.body)); expect(requestSubscription.name, absoluteName); @@ -444,8 +445,7 @@ main() { var mock = mockClient(); mock.register( 'DELETE', - new RegExp(r'subscriptions/[a-z/-]*$'), expectAsync((request) { - expect(request.url.path, '${ROOT_PATH}subscriptions/$absoluteName'); + 'projects/$PROJECT/subscriptions', expectAsync((request) { expect(request.body.length, 0); return mock.respondEmpty(); }, count: 2)); @@ -474,8 +474,7 @@ main() { var mock = mockClient(); mock.register( 'GET', - new RegExp(r'subscriptions/[a-z/-]*$'), expectAsync((request) { - expect(request.url.path, '${ROOT_PATH}subscriptions/$absoluteName'); + new RegExp('projects/$PROJECT/subscriptions'), expectAsync((request) { expect(request.body.length, 0); return mock.respond(new pubsub.Subscription()..name = absoluteName); }, count: 2)); @@ -505,16 +504,13 @@ main() { }); group('query', () { - var query = 'cloud.googleapis.com/project in (/projects/$PROJECT)'; - var topicQuery = - 'pubsub.googleapis.com/topic in (/topics/$PROJECT/topic)'; var defaultPageSize = 50; addSubscriptions( pubsub.ListSubscriptionsResponse response, int first, int count) { - response.subscription = []; + response.subscriptions = []; for (int i = 0; i < count; i++) { - response.subscription.add( + response.subscriptions.add( new pubsub.Subscription()..name = 'subscription-${first + i}'); } } @@ -532,11 +528,9 @@ main() { totalCalls = totalPages; } var pageCount = 0; - mock.register('GET', 'subscriptions', expectAsync((request) { + mock.register('GET', 'projects/$PROJECT/subscriptions', expectAsync((request) { pageCount++; - expect(request.url.queryParameters['query'], - topic == null ? query : topicQuery); - expect(request.url.queryParameters['maxResults'], '$pageSize'); + expect(request.url.queryParameters['pageSize'], '$pageSize'); expect(request.body.length, 0); if (pageCount > 1) { expect(request.url.queryParameters['pageToken'], 'next-page'); @@ -650,7 +644,7 @@ main() { runTest(bool withPause) { // Test error on first GET request. var mock = mockClient(); - mock.register('GET', 'subscriptions', expectAsync((request) { + mock.register('GET', 'projects/$PROJECT/subscriptions', expectAsync((request) { return mock.respondError(500); })); var api = new PubSub(mock, PROJECT); @@ -690,7 +684,7 @@ main() { } mock.clear(); mock.register( - 'GET', 'subscriptions', expectAsync((request) { + 'GET', 'projects/$PROJECT/subscriptions', expectAsync((request) { return mock.respondError(500); })); } @@ -819,27 +813,27 @@ main() { group('topic', () { var name = 'test-topic'; - var absoluteName = '/topics/$PROJECT/test-topic'; + var absoluteName = 'projects/$PROJECT/topics/test-topic'; var message = 'Hello, world!'; var messageBytes = UTF8.encode(message); var messageBase64 = crypto.CryptoUtils.bytesToBase64(messageBytes); - var labels = {'a': 1, 'b': 'text'}; + var attributes = {'a': '1', 'b': 'text'}; registerLookup(mock) { mock.register( - 'GET', new RegExp(r'topics/[a-z/-]*$'), expectAsync((request) { - expect(request.url.path, '${ROOT_PATH}topics/$absoluteName'); + 'GET', absoluteName, expectAsync((request) { expect(request.body.length, 0); return mock.respond(new pubsub.Topic()..name = absoluteName); })); } registerPublish(mock, count, fn) { - mock.register('POST', 'topics/publish', expectAsync((request) { - var publishRequest = - new pubsub.PublishRequest.fromJson(JSON.decode(request.body)); - return fn(publishRequest); - }, count: count)); + mock.register('POST', 'projects/test-project/topics/test-topic:publish', + expectAsync((request) { + var publishRequest = + new pubsub.PublishRequest.fromJson(JSON.decode(request.body)); + return fn(publishRequest); + }, count: count)); } test('publish', () { @@ -850,10 +844,10 @@ main() { return api.lookupTopic(name).then(expectAsync((topic) { mock.clear(); registerPublish(mock, 4, ((request) { - expect(request.topic, absoluteName); - expect(request.message.data, messageBase64); - expect(request.message.label, isNull); - return mock.respondEmpty(); + expect(request.messages.length, 1); + expect(request.messages[0].data, messageBase64); + expect(request.messages[0].attributes, isNull); + return mock.respond(new pubsub.PublishResponse()..messageIds = [0]); })); return topic.publishString(message).then(expectAsync((result) { @@ -874,7 +868,7 @@ main() { })); }); - test('publish-with-labels', () { + test('publish-with-attributes', () { var mock = mockClient(); registerLookup(mock); @@ -882,35 +876,26 @@ main() { return api.lookupTopic(name).then(expectAsync((topic) { mock.clear(); registerPublish(mock, 4, ((request) { - expect(request.topic, absoluteName); - expect(request.message.data, messageBase64); - expect(request.message.label, isNotNull); - expect(request.message.label.length, labels.length); - request.message.label.forEach((label) { - expect(labels.containsKey(label.key), isTrue); - if (label.numValue != null) { - expect(label.strValue, isNull); - expect(labels[label.key], int.parse(label.numValue)); - } else { - expect(label.strValue, isNotNull); - expect(labels[label.key], label.strValue); - } - }); - return mock.respondEmpty(); + expect(request.messages.length, 1); + expect(request.messages[0].data, messageBase64); + expect(request.messages[0].attributes, isNotNull); + expect(request.messages[0].attributes.length, attributes.length); + expect(request.messages[0].attributes, attributes); + return mock.respond(new pubsub.PublishResponse()..messageIds = [0]); })); - return topic.publishString(message, labels: labels) + return topic.publishString(message, attributes: attributes) .then(expectAsync((result) { expect(result, isNull); - return topic.publishBytes(messageBytes, labels: labels) + return topic.publishBytes(messageBytes, attributes: attributes) .then(expectAsync((result) { expect(result, isNull); return topic.publish( - new Message.withString(message, labels: labels)) + new Message.withString(message, attributes: attributes)) .then(expectAsync((result) { expect(result, isNull); return topic.publish( - new Message.withBytes(messageBytes, labels: labels)) + new Message.withBytes(messageBytes, attributes: attributes)) .then(expectAsync((result) { expect(result, isNull); })); @@ -923,8 +908,7 @@ main() { test('delete', () { var mock = mockClient(); mock.register( - 'GET', new RegExp(r'topics/[a-z/-]*$'), expectAsync((request) { - expect(request.url.path, '${ROOT_PATH}topics/$absoluteName'); + 'GET', absoluteName, expectAsync((request) { expect(request.body.length, 0); return mock.respond(new pubsub.Topic()..name = absoluteName); })); @@ -935,8 +919,7 @@ main() { expect(topic.absoluteName, absoluteName); mock.register( - 'DELETE', new RegExp(r'topics/[a-z/-]*$'), expectAsync((request) { - expect(request.url.path, '${ROOT_PATH}topics/$absoluteName'); + 'DELETE', absoluteName, expectAsync((request) { expect(request.body.length, 0); return mock.respondEmpty(); })); @@ -950,15 +933,14 @@ main() { group('subscription', () { var name = 'test-subscription'; - var absoluteName = '/subscriptions/$PROJECT/test-subscription'; + var absoluteName = 'projects/$PROJECT/subscriptions/test-subscription'; var topicName = 'test-topic'; - var absoluteTopicName = '/topics/$PROJECT/test-topic'; + var absoluteTopicName = 'projects/$PROJECT/topics/test-topic'; test('delete', () { var mock = mockClient(); mock.register( - 'GET', new RegExp(r'subscriptions/[a-z/-]*$'), expectAsync((request) { - expect(request.url.path, '${ROOT_PATH}subscriptions/$absoluteName'); + 'GET', absoluteName, expectAsync((request) { expect(request.body.length, 0); return mock.respond(new pubsub.Topic()..name = absoluteName); })); @@ -968,10 +950,7 @@ main() { expect(subscription.name, name); expect(subscription.absoluteName, absoluteName); - mock.register( - 'DELETE', - new RegExp(r'subscriptions/[a-z/-]*$'), expectAsync((request) { - expect(request.url.path, '${ROOT_PATH}subscriptions/$absoluteName'); + mock.register('DELETE', absoluteName, expectAsync((request) { expect(request.body.length, 0); return mock.respondEmpty(); })); @@ -996,7 +975,7 @@ main() { "labels": [ { "key":"messageNo", - "numValue":30 + "numValue":"30" }, { "key":"test", @@ -1009,8 +988,8 @@ main() { '''; var event = new PushEvent.fromJson(requestBody); expect(event.message.asString, "Hello, world 30 of 50!"); - expect(event.message.labels['messageNo'], 30); - expect(event.message.labels['test'], 'hello'); + expect(event.message.attributes['messageNo'], '30'); + expect(event.message.attributes['test'], 'hello'); expect(event.subscriptionName, absoluteSubscriptionName); }); @@ -1036,8 +1015,8 @@ main() { '''; var event = new PushEvent.fromJson(requestBody); expect(event.message.asString, "Hello, world 30 of 50!"); - expect(event.message.labels['messageNo'], 30); - expect(event.message.labels['test'], 'hello'); + expect(event.message.attributes['messageNo'], 30); + expect(event.message.attributes['test'], 'hello'); expect(event.subscriptionName, absoluteSubscriptionName); }); }); From 06a31bb55cd1dae21569ebfb2e26ebe2ec72d541 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Fri, 13 Mar 2015 10:25:37 +0100 Subject: [PATCH 061/239] Make the API hostname configurable for the mock tests Cloud Pub/Sub uses pubsub.googleapis.com whereas Cloud Storage uses www.googleapis.com. TBR=kustermann@google.com Review URL: https://codereview.chromium.org//1008763002 --- pkgs/gcloud/test/common.dart | 8 +++++--- pkgs/gcloud/test/pubsub/pubsub_test.dart | 4 ++-- pkgs/gcloud/test/storage/storage_test.dart | 3 ++- 3 files changed, 9 insertions(+), 6 deletions(-) diff --git a/pkgs/gcloud/test/common.dart b/pkgs/gcloud/test/common.dart index 2f6e2f01..97aa783c 100644 --- a/pkgs/gcloud/test/common.dart +++ b/pkgs/gcloud/test/common.dart @@ -19,15 +19,17 @@ const RESPONSE_HEADERS = const { }; class MockClient extends http.BaseClient { + final String hostname; final String rootPath; final Uri rootUri; Map> mocks = {}; http_testing.MockClient client; - MockClient(String rootPath) : + MockClient(String hostname, String rootPath) : + hostname = hostname, rootPath = rootPath, - rootUri = Uri.parse('https://www.googleapis.com${rootPath}') { + rootUri = Uri.parse('https://$hostname$rootPath') { client = new http_testing.MockClient(handler); } @@ -58,7 +60,7 @@ class MockClient extends http.BaseClient { } Future handler(http.Request request) { - expect(request.url.host, 'pubsub.googleapis.com'); + expect(request.url.host, hostname); var path = request.url.path; if (mocks[request.method] == null) { throw 'No mock handler for method ${request.method} found. ' diff --git a/pkgs/gcloud/test/pubsub/pubsub_test.dart b/pkgs/gcloud/test/pubsub/pubsub_test.dart index 30231b66..267b46cb 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_test.dart @@ -16,10 +16,10 @@ import 'package:googleapis_beta/pubsub/v1beta2.dart' as pubsub; import '../common.dart'; import '../common_e2e.dart'; - +const String HOSTNAME = 'pubsub.googleapis.com'; const String ROOT_PATH = '/v1beta2/'; -http.Client mockClient() => new MockClient(ROOT_PATH); +http.Client mockClient() => new MockClient(HOSTNAME, ROOT_PATH); main() { group('api', () { diff --git a/pkgs/gcloud/test/storage/storage_test.dart b/pkgs/gcloud/test/storage/storage_test.dart index 5d5068ac..c63ff8f8 100644 --- a/pkgs/gcloud/test/storage/storage_test.dart +++ b/pkgs/gcloud/test/storage/storage_test.dart @@ -19,10 +19,11 @@ import '../common.dart'; import '../common_e2e.dart'; +const String HOSTNAME = 'www.googleapis.com'; const String ROOT_PATH = '/storage/v1/'; -http.Client mockClient() => new MockClient(ROOT_PATH); +http.Client mockClient() => new MockClient(HOSTNAME, ROOT_PATH); withMockClient(function) { var mock = mockClient(); From 53b08cc66861e2fe4ec1f09cf4393f55e4389b1a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Fri, 13 Mar 2015 10:34:17 +0100 Subject: [PATCH 062/239] Update .status for new Cloud Pub/Sub tests TBR=kustermann@google.com Review URL: https://codereview.chromium.org//1004143002 --- pkgs/gcloud/.status | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/pkgs/gcloud/.status b/pkgs/gcloud/.status index 13d431e4..285891af 100644 --- a/pkgs/gcloud/.status +++ b/pkgs/gcloud/.status @@ -6,6 +6,7 @@ # ./tools/test.py from running several e2e in parallel. build/test/db_all_e2e_test: Skip build/test/storage/e2e_test: Skip +build/test/pubsub/pubsub_e2e_test: Skip # This test is slow because # - eventual consistency forces us to put in sleep()s @@ -20,9 +21,15 @@ test/storage/e2e_test: Skip build/test/db_all_e2e_test: Skip test/db_all_e2e_test: Skip +build/test/pubsub/pubsub_e2e_test: Skip +test/pubsub/pubsub_e2e_test: Skip + # Imports common_e2e.dart, which uses dart:io build/test/storage/storage_test: Skip test/storage/storage_test: Skip +build/test/pubsub/pubsub_test: Skip +test/pubsub/pubsub_test: Skip + [ $compiler == dart2js ] *: Skip From d0acd248a61707f7ceed05a7dfe00613abb988df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Tue, 17 Mar 2015 12:19:50 +0100 Subject: [PATCH 063/239] Add more Cloud Pub/Sub tests R=kustermann@google.com Review URL: https://codereview.chromium.org//1008793003 --- pkgs/gcloud/lib/pubsub.dart | 12 +- pkgs/gcloud/lib/src/pubsub_impl.dart | 16 ++- pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart | 131 +++++++++++++++++-- 3 files changed, 136 insertions(+), 23 deletions(-) diff --git a/pkgs/gcloud/lib/pubsub.dart b/pkgs/gcloud/lib/pubsub.dart index 6cb2fe2c..b46c174a 100644 --- a/pkgs/gcloud/lib/pubsub.dart +++ b/pkgs/gcloud/lib/pubsub.dart @@ -98,7 +98,7 @@ void registerPubSubService(PubSub pubsub) { /// projects//topics/ /// /// When a relative topic name is used, its absolute name is generated by -/// prepending `projects//topics/`, where `` is the +/// pre-pending `projects//topics/`, where `` is the /// project id passed to the constructor. /// /// An absolute name of a subscription starts with `projects/` and has the @@ -107,16 +107,16 @@ void registerPubSubService(PubSub pubsub) { /// projects//subscriptions/ /// /// When a relative subscription name is used, its absolute name is -/// generated by prepending `projects//subscriptions/`, where +/// generated by pre-pending `projects//subscriptions/`, where /// `` is the project id passed to the constructor. /// abstract class PubSub { /// List of required OAuth2 scopes for Pub/Sub operation. static const SCOPES = const [ pubsub.PubsubApi.PubsubScope ]; - /// Access Pub/Sub using an authenicated client. + /// Access Pub/Sub using an authenticated client. /// - /// The [client] is an authentiacted HTTP client. This client must + /// The [client] is an authenticated HTTP client. This client must /// provide access to at least the scopes in `PubSub.Scopes`. /// /// The [project] is the name of the Google Cloud project. @@ -171,7 +171,7 @@ abstract class PubSub { /// /// The [name] can be either an absolute name or a relative name. /// - /// Returns a `Future` which completes with the newly created subscripton. + /// Returns a `Future` which completes with the newly created subscription. Future createSubscription( String name, String topic, {Uri endpoint}); @@ -297,7 +297,7 @@ abstract class Subscription { /// Whether this is a pull subscription. /// - /// A subscription without a configured endpoint URI is a pull subscripton. + /// A subscription without a configured endpoint URI is a pull subscription. /// Messages are not delivered automatically, but must instead be requested /// using [pull]. bool get isPull; diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart index 98e8e562..fe2b957b 100644 --- a/pkgs/gcloud/lib/src/pubsub_impl.dart +++ b/pkgs/gcloud/lib/src/pubsub_impl.dart @@ -105,7 +105,9 @@ class _PubSubImpl implements PubSub { Future _ack(String ackId, String subscription) { var request = new pubsub.AcknowledgeRequest() ..ackIds = [ ackId ]; - return _api.projects.subscriptions.acknowledge(request, subscription); + // The Pub/Sub acknowledge API returns an instance of Empty. + return _api.projects.subscriptions.acknowledge( + request, subscription).then((_) => null);; } void _checkTopicName(name) { @@ -283,9 +285,10 @@ class _PullEventImpl implements PullEvent { final pubsub.PullResponse _response; final Message message; - _PullEventImpl(this._api, this._subscriptionName, response) + _PullEventImpl( + this._api, this._subscriptionName, pubsub.PullResponse response) : this._response = response, - message = new _PullMessage(response.pubsubEvent.message); + message = new _PullMessage(response.receivedMessages[0].message); Future acknowledge() { return _api._ack(_response.receivedMessages[0].ackId, _subscriptionName); @@ -388,6 +391,13 @@ class _SubscriptionImpl implements Subscription { Future pull({bool noWait: true}) { return _api._pull(_subscription.name, noWait) .then((response) { + // The documentation says 'Returns an empty list if there are no + // messages available in the backlog'. However the receivedMessages + // property can also be null in that case. + if (response.receivedMessages == null || + response.receivedMessages.length == 0) { + return null; + } return new _PullEventImpl(_api, _subscription.name, response); }).catchError((e) => null, test: (e) => e is pubsub.DetailedApiRequestError && diff --git a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart index 814f5c17..20d71c43 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart @@ -10,12 +10,17 @@ import 'package:unittest/unittest.dart'; import '../common_e2e.dart'; -String generateTopicName() { - var id = new DateTime.now().millisecondsSinceEpoch; - return 'dart-e2e-test-$id'; -} +runTests(PubSub pubsub, String project, String prefix) { + String generateTopicName() { + var id = new DateTime.now().millisecondsSinceEpoch; + return '$prefix-topic-$id'; + } + + String generateSubscriptionName() { + var id = new DateTime.now().millisecondsSinceEpoch; + return '$prefix-subscription-$id'; + } -runTests(PubSub pubsub, String project) { group('topic', () { test('create-lookup-delete', () async { var topicName = generateTopicName(); @@ -28,32 +33,130 @@ runTests(PubSub pubsub, String project) { expect(await pubsub.deleteTopic(topicName), isNull); }); - test('create-list-delete', () async { + solo_test('create-list-delete', () async { + const int count = 5; + var topicPrefix = generateTopicName(); name(i) => '$topicPrefix-$i'; - for (var i = 0; i < 5; i++) { + for (var i = 0; i < count; i++) { await pubsub.createTopic(name(i)); } var topics = await pubsub.listTopics().map((t) => t.name).toList(); - for (var i = 0; i < 5; i++) { + for (var i = 0; i < count; i++) { expect(topics.contains(name(i)), isTrue); await pubsub.deleteTopic(name(i)); } }); }); + + group('subscription', () { + test('create-lookup-delete', () async { + var topicName = generateTopicName(); + var subscriptionName = generateSubscriptionName(); + var topic = await pubsub.createTopic(topicName); + var subscription = + await pubsub.createSubscription(subscriptionName, topicName); + expect(subscription.name, subscriptionName); + subscription = await pubsub.lookupSubscription(subscriptionName); + expect(subscription.name, subscriptionName); + expect(subscription.project, project); + expect(subscription.absoluteName, + 'projects/$project/subscriptions/$subscriptionName'); + expect(subscription.isPull, isTrue); + expect(subscription.isPush, isFalse); + expect(await pubsub.deleteSubscription(subscriptionName), isNull); + expect(await pubsub.deleteTopic(topicName), isNull); + }); + + test('create-list-delete', () async { + const int count = 5; + var topicName = generateTopicName(); + var topic = await pubsub.createTopic(topicName); + + var subscriptionPrefix = generateSubscriptionName(); + + name(i) => '$subscriptionPrefix-$i'; + + for (var i = 0; i < count; i++) { + await pubsub.createSubscription(name(i), topicName); + } + var subscriptions = + await pubsub.listSubscriptions().map((t) => t.name).toList(); + for (var i = 0; i < count; i++) { + expect(subscriptions.contains(name(i)), isTrue); + await pubsub.deleteSubscription(name(i)); + } + await pubsub.deleteTopic(topicName); + }); + + test('push-pull', () async { + var topicName = generateTopicName(); + var subscriptionName = generateSubscriptionName(); + var topic = await pubsub.createTopic(topicName); + var subscription = + await pubsub.createSubscription(subscriptionName, topicName); + expect(await subscription.pull(), isNull); + + expect(await topic.publishString('Hello, world!'), isNull); + var pullEvent = await subscription.pull(noWait: false); + expect(pullEvent, isNotNull); + expect(pullEvent.message.asString, 'Hello, world!'); + expect(await pullEvent.acknowledge(), isNull); + + await pubsub.deleteSubscription(subscriptionName); + await pubsub.deleteTopic(topicName); + }); + }); } main() { - withAuthClient(PubSub.SCOPES, (String project, httpClient) { + // Generate a unique prefix for all names generated by the tests. + var id = new DateTime.now().millisecondsSinceEpoch; + var prefix = 'dart-e2e-test-$id'; + + withAuthClient(PubSub.SCOPES, (String project, httpClient) async { // Share the same pubsub connection for all tests. + bool leftovers = false; var pubsub = new PubSub(httpClient, project); + try { + await runE2EUnittest(() { + runTests(pubsub, project, prefix); + }); + } finally { + // Try to delete any leftover subscriptions from the tests. + var subscriptions = await pubsub.listSubscriptions().toList(); + for (var subscription in subscriptions) { + if (subscription.name.startsWith(prefix)) { + try { + print('WARNING: Removing leftover subscription ' + '${subscription.name}'); + leftovers = true; + await pubsub.deleteSubscription(subscription.name); + } catch (e) { + print('Error during test cleanup of subscription ' + '${subscription.name} ($e)'); + } + } + } + // Try to delete any leftover topics from the tests. + var topics = await pubsub.listTopics().toList(); + for (var topic in topics) { + if (topic.name.startsWith(prefix)) { + try { + print('WARNING: Removing leftover topic ${topic.name}'); + leftovers = true; + await pubsub.deleteTopic(topic.name); + } catch (e) { + print('Error during test cleanup of topic ${topic.name} ($e)'); + } + } + } + } - return runE2EUnittest(() { - runTests(pubsub, project); - }).whenComplete(() { - // TODO(sgjesse): Cleanup leftover topics/subscriptions. - }); + if (leftovers) { + throw 'Test terminated with leftover topics and/or subscriptions'; + } }); } From e5c514280c5eb912847a9f584393cf647594c715 Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Tue, 24 Mar 2015 12:00:55 +0100 Subject: [PATCH 064/239] Enforce fully populated entity keys in a number of places R=sgjesse@google.com, wibling@google.com Review URL: https://codereview.chromium.org//984573002 --- pkgs/gcloud/CHANGELOG.md | 4 +++ pkgs/gcloud/lib/src/datastore_impl.dart | 31 +++++++++++++------ .../datastore/e2e/datastore_test_impl.dart | 13 ++++---- 3 files changed, 31 insertions(+), 17 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index eba8111b..003b4cdf 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.1.4+2 + +* Enforce fully populated entity keys in a number of places. + ## 0.1.4+1 * Deduce the query partition automatically from query ancestor key. diff --git a/pkgs/gcloud/lib/src/datastore_impl.dart b/pkgs/gcloud/lib/src/datastore_impl.dart index c79920f4..babe0fea 100644 --- a/pkgs/gcloud/lib/src/datastore_impl.dart +++ b/pkgs/gcloud/lib/src/datastore_impl.dart @@ -29,7 +29,7 @@ class DatastoreImpl implements datastore.Datastore { DatastoreImpl(http.Client client, this._project) : _api = new api.DatastoreApi(client); - api.Key _convertDatastore2ApiKey(datastore.Key key) { + api.Key _convertDatastore2ApiKey(datastore.Key key, {bool enforceId: true}) { var apiKey = new api.Key(); apiKey.partitionId = new api.PartitionId() @@ -43,6 +43,10 @@ class DatastoreImpl implements datastore.Datastore { part.id = '${element.id}'; } else if (element.id is String) { part.name = element.id; + } else if (enforceId) { + throw new datastore.ApplicationError( + 'Error while encoding entity key: Using `null` as the id is not ' + 'allowed.'); } return part; }).toList(); @@ -141,7 +145,7 @@ class DatastoreImpl implements datastore.Datastore { ..blobValueAsBytes = value.bytes; } else if (value is datastore.Key) { return apiValue - ..keyValue = _convertDatastore2ApiKey(value); + ..keyValue = _convertDatastore2ApiKey(value, enforceId: false); } else if (value is List) { if (!lists) { // FIXME(Issue #3): Consistently handle exceptions. @@ -210,7 +214,7 @@ class DatastoreImpl implements datastore.Datastore { ..blobValueAsBytes = value.bytes; } else if (value is datastore.Key) { return apiProperty - ..keyValue = _convertDatastore2ApiKey(value); + ..keyValue = _convertDatastore2ApiKey(value, enforceId: false); } else if (value is List) { if (!lists) { // FIXME(Issue #3): Consistently handle exceptions. @@ -259,10 +263,11 @@ class DatastoreImpl implements datastore.Datastore { unIndexedProperties: unindexedProperties); } - api.Entity _convertDatastore2ApiEntity(datastore.Entity entity) { + api.Entity _convertDatastore2ApiEntity(datastore.Entity entity, + {bool enforceId: false}) { var apiEntity = new api.Entity(); - apiEntity.key = _convertDatastore2ApiKey(entity.key); + apiEntity.key = _convertDatastore2ApiKey(entity.key, enforceId: enforceId); apiEntity.properties = {}; if (entity.properties != null) { for (var key in entity.properties.keys) { @@ -319,7 +324,8 @@ class DatastoreImpl implements datastore.Datastore { var pf = new api.PropertyFilter(); pf.operator = 'HAS_ANCESTOR'; pf.property = new api.PropertyReference()..name = '__key__'; - pf.value = new api.Value()..keyValue = _convertDatastore2ApiKey(key); + pf.value = new api.Value() + ..keyValue = _convertDatastore2ApiKey(key, enforceId: true); return new api.Filter()..propertyFilter = pf; } @@ -380,7 +386,9 @@ class DatastoreImpl implements datastore.Datastore { Future> allocateIds(List keys) { var request = new api.AllocateIdsRequest(); - request..keys = keys.map(_convertDatastore2ApiKey).toList(); + request..keys = keys.map((key) { + return _convertDatastore2ApiKey(key, enforceId: false); + }).toList(); return _api.datasets.allocateIds(request, _project).then((response) { return response.keys.map(_convertApi2DatastoreKey).toList(); }, onError: _handleError); @@ -420,13 +428,14 @@ class DatastoreImpl implements datastore.Datastore { request.mutation.insertAutoId = new List(autoIdInserts.length); for (int i = 0; i < autoIdInserts.length; i++) { request.mutation.insertAutoId[i] = - _convertDatastore2ApiEntity(autoIdInserts[i]); + _convertDatastore2ApiEntity(autoIdInserts[i], enforceId: false); } } if (deletes != null) { request.mutation.delete = new List(deletes.length); for (int i = 0; i < deletes.length; i++) { - request.mutation.delete[i] = _convertDatastore2ApiKey(deletes[i]); + request.mutation.delete[i] = + _convertDatastore2ApiKey(deletes[i], enforceId: true); } } return _api.datasets.commit(request, _project).then((result) { @@ -443,7 +452,9 @@ class DatastoreImpl implements datastore.Datastore { Future> lookup(List keys, {datastore.Transaction transaction}) { - var apiKeys = keys.map(_convertDatastore2ApiKey).toList(); + var apiKeys = keys.map((key) { + return _convertDatastore2ApiKey(key, enforceId: true); + }).toList(); var request = new api.LookupRequest(); request.keys = apiKeys; if (transaction != null) { diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index adbf30b6..3b1f7cb0 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -194,7 +194,7 @@ runTests(Datastore datastore, String namespace) { Future> testInsertNegative(List entities, {bool transactional: false, bool xg: false}) { test(Transaction transaction) { - expect(datastore.commit(inserts: entities, + expect(datastore.commit(autoIdInserts: entities, transaction: transaction), throwsA(isApplicationError)); } @@ -207,7 +207,7 @@ runTests(Datastore datastore, String namespace) { var unnamedEntities1 = buildEntities(42, 43, partition: partition); var unnamedEntities5 = buildEntities(1, 6, partition: partition); - var unnamedEntities20 = buildEntities(6, 26, partition: partition); + var unnamedEntities26 = buildEntities(6, 32, partition: partition); var named20000 = buildEntities( 1000, 21001, idFunction: (i) => 'named_${i}_of_10000', partition: partition); @@ -243,15 +243,14 @@ runTests(Datastore datastore, String namespace) { }); }); - // Does not work with cloud datastore REST api, why? - test('negative_insert_transactional', () { - return testInsertNegative(unnamedEntities5, transactional: true); + test('negative_insert__incomplete_path', () { + expect(datastore.commit(inserts: unnamedEntities1), + throwsA(isApplicationError)); }); - // Does not work with cloud datastore REST api, why? test('negative_insert_transactional_xg', () { return testInsertNegative( - unnamedEntities20, transactional: true, xg: true); + unnamedEntities26, transactional: true, xg: true); }); test('negative_insert_20000_entities', () { From 58a022d7270967f2947d8a0009e727db42ef2c3d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Wed, 25 Mar 2015 09:08:24 +0100 Subject: [PATCH 065/239] Change the noWait argument on pull to wait Made it the default to wait for a message. The negated name was carried over directly from the underlying API but I find the direct name much easier to understand. Also waitong seems to be the right default when pulling from a subscription. R=kustermann@google.com Review URL: https://codereview.chromium.org//1033713002 --- pkgs/gcloud/lib/pubsub.dart | 12 ++++++------ pkgs/gcloud/lib/src/pubsub_impl.dart | 4 ++-- pkgs/gcloud/pubspec.yaml | 2 +- pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart | 2 +- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/pkgs/gcloud/lib/pubsub.dart b/pkgs/gcloud/lib/pubsub.dart index b46c174a..6f8af934 100644 --- a/pkgs/gcloud/lib/pubsub.dart +++ b/pkgs/gcloud/lib/pubsub.dart @@ -327,13 +327,13 @@ abstract class Subscription { /// Pull a message from the subscription. /// - /// If [noWait] is true, the method will complete the returned `Future` - /// with `null` if it finds that there are no messages available. + /// If `wait` is `true` (the default), the method will wait for a message + /// to become available, and will then complete the `Future` with a + /// `PullEvent` containing the message. /// - /// If `noWait` is false, the method will wait for a message to become - /// available, and will then complete the `Future` with a `PullEvent` - /// containing the message. - Future pull({bool noWait: true}); + /// If [wait] is `false`, the method will complete the returned `Future` + /// with `null` if it finds that there are no messages available. + Future pull({bool wait: true}); } /// The content of a Pub/Sub message. diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart index fe2b957b..9c482dfa 100644 --- a/pkgs/gcloud/lib/src/pubsub_impl.dart +++ b/pkgs/gcloud/lib/src/pubsub_impl.dart @@ -388,8 +388,8 @@ class _SubscriptionImpl implements Subscription { Future delete() => _api._deleteSubscription(_subscription.name); - Future pull({bool noWait: true}) { - return _api._pull(_subscription.name, noWait) + Future pull({bool wait: true}) { + return _api._pull(_subscription.name, !wait) .then((response) { // The documentation says 'Returns an empty list if there are no // messages available in the backlog'. However the receivedMessages diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 03bd497c..1d769a57 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.1.5-dev +version: 0.2.0-dev author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud diff --git a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart index 20d71c43..4ae131aa 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart @@ -100,7 +100,7 @@ runTests(PubSub pubsub, String project, String prefix) { expect(await subscription.pull(), isNull); expect(await topic.publishString('Hello, world!'), isNull); - var pullEvent = await subscription.pull(noWait: false); + var pullEvent = await subscription.pull(); expect(pullEvent, isNotNull); expect(pullEvent.message.asString, 'Hello, world!'); expect(await pullEvent.acknowledge(), isNull); From 95010e18baff1b7fe9c709e8838be79e88c702b6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Wed, 25 Mar 2015 10:24:55 +0100 Subject: [PATCH 066/239] Update the SDK constraint for gcloud to 1.9 We are bumping the version to 0.2.0, and this gives us the liberty of using async and await for changes to the package. The tests are already depending on 1.9. R=kustermann@google.com, wibling@google.com Review URL: https://codereview.chromium.org//1037713002 --- pkgs/gcloud/pubspec.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 1d769a57..b86e42cb 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -4,7 +4,7 @@ author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud environment: - sdk: '>=1.5.0 <2.0.0' + sdk: '>=1.9.1 <2.0.0' dependencies: crypto: '>=0.9.0 <0.10.0' googleapis: '>=0.2.0 <0.7.0' From 42aea64053e102ce4df9943f4c44a8ca8b28481d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Wed, 25 Mar 2015 15:54:13 +0100 Subject: [PATCH 067/239] Update README file. Now it actually have informationon what you cna do with the gcloud package. R=kustermann@google.com, wibling@google.com BUG= Review URL: https://codereview.chromium.org//1030593003 --- pkgs/gcloud/README.md | 226 ++++++++++++++++++++++++++++++++++- pkgs/gcloud/lib/storage.dart | 4 +- 2 files changed, 224 insertions(+), 6 deletions(-) diff --git a/pkgs/gcloud/README.md b/pkgs/gcloud/README.md index 698340fe..1186465f 100644 --- a/pkgs/gcloud/README.md +++ b/pkgs/gcloud/README.md @@ -1,16 +1,234 @@ -## Google Cloud Platform +## Google Cloud Platform support package (gcloud) -High level interface for Google Cloud Platform APIs +The `gcloud` package provides a high level "idomatic Dart" interface to +some of the most widely used Google Cloud Platform services. Currently the +following services are supported: + + * Cloud Datastore + * Cloud Storage + * Cloud Pub/Sub + +The APIs in this package are all based on the generic generated APIs in the +[googleapis] and [googleapis_beta][googleapisbeta] packages. + +This means that the authentication model for using the APIs in this package +uses the [googleapis_auth][googleapisauth] package. + +Note that this package is only intended for being used with the standalone VM +in a server or command line application. Don't expect this package to work on +the browser. + +The code snippets below demonstrating the use of this package all assume that +the following imports are present: + +```dart +import 'package:googleapis_auth/auth_io.dart' as auth; +import 'package:http/http.dart' as http; +import 'package:gcloud/db.dart'; +import 'package:gcloud/storage.dart'; +import 'package:gcloud/pubsub.dart'; +import 'package:gcloud/service_scope.dart' as ss; +import 'package:gcloud/src/datastore_impl.dart'; +``` + +### Getting access to the APIs + +The first step in using the APIs is to get an authenticated HTTP client and +with that create API class instances for accessing the different APIs. The +code below assumes that you have a Google Cloud Project called `my-project` +with credentials for a service account from that project stored in the file +`my-project.json`. + +```dart +// Read the service account credentials from the file. +var jsonCredentials = new File('my-project.json').readAsStringSync(); +var credentials = new auth.ServiceAccountCredentials.fromJson(jsonCredentials); + +// Get an HTTP authenticated client using the service account credentials. +var scopes = [] + ..addAll(dastore_impl.DatastoreImpl.SCOPES); + ..addAll(Storage.SCOPES) + ..addAll(PubSub.SCOPES) +var client = await auth.clientViaServiceAccount(creds, scopes); + +// Instantiate objects to access Cloud Datastore, Cloud Storage +// and Cloud Pub/Sub APIs. +var db = new DatastoreDB( + new dastore_impl.DatastoreImpl(client, 's~my-project')); +var storage = new Storage(client, 'my-project'); +var pubsub = new PubSub(client, 'my-project'); +``` + +All the APIs in this package supports the use of 'service scopes'. Service +scopes are described in details below. + +```dart +ss.fork(() { + // register the services in the new service scope. + registerDbService(db); + registerStorageService(storage); + registerPubSubService(pubsub); + + // Run application using these services. +}); +``` + +The services registered with the service scope can now be reached from within +all the code running in the same service scope using the below getters. + +```dart +dbService. +storageService. +pubsubService. +``` + +This way it is not necessary to pass the service objects around in your code. + +### Use with App Engine + +The `gcloud` package is also integrated in the Dart [appengine] package. This +means the `gcloud` services are available both via the appengine context and +service scopes. The authentication required to access the Google Cloud Platform +services is handled automatically. + +This means that getting to the App Engine Datastore can be through either +the App Engine context + +```dart +var db = context.services.db; +``` + +or just using the service scope registration. + +```dart +var db = dbService; +``` + +## Cloud Datastore +Google Cloud Datastore provide a NoSQL, schemaless database for storing +non-relational data. See the product page +[https://cloud.google.com/datastore/][Datastore] for more information. + +The Cloud Datastore API provides a mapping of Dart objects to entities stored +in the Datastore. The following example shows how to annotate a class to +make it possible to store instances of it in the Datastore. + +```dart +@db.Kind() +class Person extends db.Model { + @db.StringProperty() + String name; + + @db.IntProperty() + int age; +} +``` + +The `Kind` annotation tell that instances of this class can be stored. The +class must also inherit from `Model`. Now to store an object into the +Datastore create an instance and use the `commit` function. + +```dart +var person = new Person() + ..name = '' + ..age = 42; +await db.commit(inserts: [person]); +``` + +The function `query` is used to build a `Query` object which can be run to +perform the query. + +```dart +var persons = (await db.query(Person).run()).toList(); +``` + +NOTE: This package include a lower level API provided through the class +`Datastore` on top of which the `DatastoreDB` API is build. The main reason +for this additional API level is to bridge the gap between the different APIs +exposed inside App Engine and through the public REST API. We reserve the +rights to modify and maybe even remove this additional layer at any time. + +## Cloud Storage +Google Cloud Storage provide a highly available object store (aka BLOB +store). See the product page [https://cloud.google.com/storage/][GCS] +for more information. + +In Cloud Storage the objects (BLOBs) are organized in _buckets_. Each bucket +has a name in a global namespace. The following code creates a new bucket +named `my-bucket` and writes the content of the file `my-file.txt` to the +object named `my-object`. + +```dart +var bucket = await storage.createBucket('my-bucket'); +new File('my-file.txt').openRead().pipe(bucket.write('my-object')); +``` + +The following code will read back the object. + +```dart +bucket.read('my-object').pipe(new File('my-file-copy.txt').openWrite()); +``` + +## Cloud Pub/Sub +Google Cloud Pub/Sub provides many-to-many, asynchronous messaging. See the +product page [https://cloud.google.com/pubsub/][PubSub] for more information. + +Cloud Pub/Sub uses two concepts for messaging. _Topics_ are used if you want +to send messages and _subscriptions_ are used to subscribe to topics and +receive the messages. This decouples the producer of a message from the +consumer of a message. + +The following code creates a _topic_ and sends a simple test message: + +```dart +var topic = await pubsub.createTopic('my'topic'); +await topic.publishString('Hello, world!') +``` + +With the following code a _subscription_ is created on the _topic_ and +a message is pulled using the subscription. A received message must be +acknowledged when the consumer has processed it. + +```dart +var subscription = + await pubsub.createSubscription('my-subscription', 'my-topic); +var pullEvent = await subscription.pull(); +print(pullEvent.message.asString); +await pullEvent.acknowledge() +``` + +It is also possible to receive messages using push events instead of pulling +from the subscription. To do this the subscription should be configured as a +push subscription with an HTTP endpoint. + +```dart +await pubsub.createSubscription( + 'my-subscription', + 'my-topic', + endpoint: Uri.parse('https://server.example.com/push')); +``` + +With this subscription all messages will be send to the URL provided in the +`endpoint` argument. The server needs to acknowledge the reception of the +message with a `200 OK` reply. ### Running tests If you want to run the end-to-end tests, a Google Cloud project is required. -When running these tests the following envrionment variables needs to be set: +When running these tests the following environment variables need to be set: GCLOUD_E2E_TEST_PROJECT GCLOUD_E2E_TEST_KEY The vaule of the environment variable `GCLOUD_E2E_TEST_PROJECT` is the name of the Google Cloud project to use. The value of the environment variable -`GCLOUD_E2E_TEST_KEY` is a Google Cloud Storage path (starting wiht `gs://`) +`GCLOUD_E2E_TEST_KEY` is a Google Cloud Storage path (starting with `gs://`) to a JSON key file for a service account providing access to the Cloud Project. + +[Datastore]: https://cloud.google.com/datastore/ +[GCS]: https://cloud.google.com/storage/ +[PubSub]: https://cloud.google.com/pubsub/ +[googleapis]: https://pub.dartlang.org/packages/googleapis +[googleapisbeta]: https://pub.dartlang.org/packages/googleapis_beta +[googleapisauth]: https://pub.dartlang.org/packages/googleapis_beta +[appengine]: https://pub.dartlang.org/packages/appengine \ No newline at end of file diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index 4705e7a0..cc0ec52d 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -77,7 +77,7 @@ Storage get storageService => ss.lookup(_storageKey); /// Registers the [storage] object within the current service scope. /// -/// The provided `storage` object will be avilable via the top-level +/// The provided `storage` object will be available via the top-level /// `storageService` getter. /// /// Calling this function outside of a service scope will result in an error. @@ -433,7 +433,7 @@ class AclPermission { /// predefined ACLs have explicit names, and can _only_ be used to set an ACL, /// when either creating or updating a bucket or object. This set of predefined /// ACLs are expanded on the server to their actual list of [AclEntry] objects. -/// When information is retreived on a bucket or object, this expanded list will +/// When information is retrieved on a bucket or object, this expanded list will /// be present. For a description of these predefined ACLs see: /// https://cloud.google.com/storage/docs/accesscontrol#extension. class PredefinedAcl { From f066addd135d305f8dac9f92e0ec82bd5222889a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Mon, 13 Apr 2015 14:58:15 +0200 Subject: [PATCH 068/239] Prepare for releasing version 0.2.0 Update to accept the latest versions of the googleapis packages as well. R=kustermann@google.com, wibling@google.com Review URL: https://codereview.chromium.org//1057063004 --- pkgs/gcloud/CHANGELOG.md | 5 +++++ pkgs/gcloud/pubspec.yaml | 6 +++--- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 003b4cdf..4f421232 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,8 @@ +## 0.2.0 + +* Add support for Cloud Pub/Sub. +* Require Dart version 1.9. + ## 0.1.4+2 * Enforce fully populated entity keys in a number of places. diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index b86e42cb..a68c0ee2 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.2.0-dev +version: 0.2.0 author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud @@ -7,8 +7,8 @@ environment: sdk: '>=1.9.1 <2.0.0' dependencies: crypto: '>=0.9.0 <0.10.0' - googleapis: '>=0.2.0 <0.7.0' - googleapis_beta: '>=0.10.0 <0.11.0' + googleapis: '>=0.2.0 <0.9.0' + googleapis_beta: '>=0.10.0 <0.13.0' http: '>=0.11.0 <0.12.0' dev_dependencies: googleapis_auth: '>=0.1.1 <0.3.0' From 54eccda02261e83960578e8fb707f7d230d13617 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Mon, 13 Apr 2015 18:04:17 +0200 Subject: [PATCH 069/239] Remove dependencies on package:googleapis/common/common.dart TBR=kustermann@google.com, wibling@google.com Review URL: https://codereview.chromium.org//1084563003 --- pkgs/gcloud/CHANGELOG.md | 4 ++++ pkgs/gcloud/lib/src/storage_impl.dart | 17 ++++++++++------- pkgs/gcloud/lib/storage.dart | 1 - pkgs/gcloud/pubspec.yaml | 2 +- pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart | 1 - pkgs/gcloud/test/storage/e2e_test.dart | 4 ++-- pkgs/gcloud/test/storage/storage_test.dart | 10 +++++----- 7 files changed, 22 insertions(+), 17 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 4f421232..1d9d3a12 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.2.0+1 + +* Fix broken import of package:googleapis/common/common.dart. + ## 0.2.0 * Add support for Cloud Pub/Sub. diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index 21a9de92..d24b0015 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -65,7 +65,10 @@ class _StorageImpl implements Storage { } Future bucketExists(String bucketName) { - notFoundError(e) => e is common.DetailedApiRequestError && e.status == 404; + notFoundError(e) { + return e is storage_api.DetailedApiRequestError && e.status == 404; + } + return _api.buckets.get(bucketName) .then((_) => true) .catchError((e) => false, test: notFoundError); @@ -196,7 +199,7 @@ class _BucketImpl implements Bucket { _api.objects.get( bucketName, objectName, - downloadOptions: common.DownloadOptions.FullMedia).then( + downloadOptions: storage_api.DownloadOptions.FullMedia).then( (media) => media.stream.pipe(controller.sink)); return controller.stream; } @@ -532,7 +535,7 @@ class _MediaUploadStreamSink implements StreamSink> { // TODO: Avoid using another stream-controller. _resumableController = new StreamController(sync: true); buffer.forEach(_resumableController.add); - var media = new common.Media(_resumableController.stream, null); + var media = new storage_api.Media(_resumableController.stream, null); _startResumableUpload(_resumableController.stream, _length); _state = _STATE_DECIDED_RESUMABLE; } @@ -576,13 +579,13 @@ class _MediaUploadStreamSink implements StreamSink> { void _startNormalUpload(Stream stream, int length) { var contentType = _object.contentType != null ? _object.contentType : 'application/octet-stream'; - var media = new common.Media(stream, length, contentType: contentType); + var media = new storage_api.Media(stream, length, contentType: contentType); _api.objects.insert(_object, _bucketName, name: _objectName, predefinedAcl: _predefinedAcl, uploadMedia: media, - uploadOptions: common.UploadOptions.Default) + uploadOptions: storage_api.UploadOptions.Default) .then((response) { _doneCompleter.complete(new _ObjectInfoImpl(response)); }, onError: _completeError); @@ -591,13 +594,13 @@ class _MediaUploadStreamSink implements StreamSink> { void _startResumableUpload(Stream stream, int length) { var contentType = _object.contentType != null ? _object.contentType : 'application/octet-stream'; - var media = new common.Media(stream, length, contentType: contentType); + var media = new storage_api.Media(stream, length, contentType: contentType); _api.objects.insert(_object, _bucketName, name: _objectName, predefinedAcl: _predefinedAcl, uploadMedia: media, - uploadOptions: common.UploadOptions.Resumable) + uploadOptions: storage_api.UploadOptions.Resumable) .then((response) { _doneCompleter.complete(new _ObjectInfoImpl(response)); }, onError: _completeError); diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index cc0ec52d..01b23546 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -55,7 +55,6 @@ import 'package:http/http.dart' as http; import 'package:crypto/crypto.dart' as crypto; import 'package:googleapis/storage/v1.dart' as storage_api; -import 'package:googleapis/common/common.dart' as common; import 'service_scope.dart' as ss; diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index a68c0ee2..b88fe373 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.2.0 +version: 0.2.0+1 author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud diff --git a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart index 4ae131aa..5cbfb228 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart @@ -5,7 +5,6 @@ import 'dart:async'; import 'package:gcloud/pubsub.dart'; -import 'package:googleapis/common/common.dart' as common; import 'package:unittest/unittest.dart'; import '../common_e2e.dart'; diff --git a/pkgs/gcloud/test/storage/e2e_test.dart b/pkgs/gcloud/test/storage/e2e_test.dart index fabbf48d..3a21eb05 100644 --- a/pkgs/gcloud/test/storage/e2e_test.dart +++ b/pkgs/gcloud/test/storage/e2e_test.dart @@ -6,8 +6,8 @@ library gcloud.storage; import 'dart:async'; +import 'package:googleapis/storage/v1.dart' as storage_api; import 'package:gcloud/storage.dart'; -import 'package:googleapis/common/common.dart' as common; import 'package:unittest/unittest.dart'; import '../common_e2e.dart'; @@ -17,7 +17,7 @@ String generateBucketName() { return 'dart-e2e-test-$id'; } -bool testDetailedApiError(e) => e is common.DetailedApiRequestError; +bool testDetailedApiError(e) => e is storage_api.DetailedApiRequestError; // Generate a list just above the limit when changing to resumable upload. const int MB = 1024 * 1024; diff --git a/pkgs/gcloud/test/storage/storage_test.dart b/pkgs/gcloud/test/storage/storage_test.dart index c63ff8f8..f2eb3175 100644 --- a/pkgs/gcloud/test/storage/storage_test.dart +++ b/pkgs/gcloud/test/storage/storage_test.dart @@ -7,13 +7,13 @@ library gcloud.storage; import 'dart:async'; import 'dart:convert'; +import 'package:googleapis/storage/v1.dart' as storage_api; import 'package:http/http.dart' as http; import 'package:unittest/unittest.dart'; import 'package:gcloud/storage.dart'; import 'package:googleapis/storage/v1.dart' as storage; -import 'package:googleapis/common/common.dart' as common; import '../common.dart'; import '../common_e2e.dart'; @@ -325,8 +325,8 @@ main() { new List.generate(minResumableUpload, (e) => e & 255); bool testArgumentError(e) => e is ArgumentError; - bool testApiError(e) => e is common.ApiRequestError; - bool testDetailedApiError(e) => e is common.DetailedApiRequestError; + bool testApiError(e) => e is storage_api.ApiRequestError; + bool testDetailedApiError(e) => e is storage_api.DetailedApiRequestError; Function expectStatus(status) => (e) => expect(e.status, status); Function expectNotNull(status) => (o) => expect(o, isNotNull); @@ -536,13 +536,13 @@ main() { .then((_) => throw 'Unexpected') .catchError( expectAsync(expectNotNull), - test: (e) => e is String || e is common.ApiRequestError); + test: (e) => e is String || e is storage_api.ApiRequestError); return new Stream.fromIterable(data) .pipe(sink) .then((_) => throw 'Unexpected') .catchError( expectAsync(expectNotNull), - test: (e) => e is String || e is common.ApiRequestError); + test: (e) => e is String || e is storage_api.ApiRequestError); } test([bytesResumableUpload], bytesResumableUpload.length + 1) From bff22ebc18be20dd6b9fcfb47c12eae6d422cf0b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Wed, 15 Apr 2015 09:02:42 +0200 Subject: [PATCH 070/239] Remove leftover solo_test Also added a few prints and try/catchs to get more info on errors on the bots. TBR=kustermann@google.com Review URL: https://codereview.chromium.org//1089183002 --- pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart | 55 ++++++++++++-------- 1 file changed, 33 insertions(+), 22 deletions(-) diff --git a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart index 5cbfb228..74a252d0 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart @@ -32,7 +32,7 @@ runTests(PubSub pubsub, String project, String prefix) { expect(await pubsub.deleteTopic(topicName), isNull); }); - solo_test('create-list-delete', () async { + test('create-list-delete', () async { const int count = 5; var topicPrefix = generateTopicName(); @@ -124,33 +124,44 @@ main() { runTests(pubsub, project, prefix); }); } finally { - // Try to delete any leftover subscriptions from the tests. - var subscriptions = await pubsub.listSubscriptions().toList(); - for (var subscription in subscriptions) { - if (subscription.name.startsWith(prefix)) { - try { - print('WARNING: Removing leftover subscription ' - '${subscription.name}'); - leftovers = true; - await pubsub.deleteSubscription(subscription.name); - } catch (e) { - print('Error during test cleanup of subscription ' - '${subscription.name} ($e)'); + print('checking for leftover subscriptions'); + try { + // Try to delete any leftover subscriptions from the tests. + var subscriptions = await pubsub.listSubscriptions().toList(); + for (var subscription in subscriptions) { + if (subscription.name.startsWith(prefix)) { + try { + print('WARNING: Removing leftover subscription ' + '${subscription.name}'); + leftovers = true; + await pubsub.deleteSubscription(subscription.name); + } catch (e) { + print('Error during test cleanup of subscription ' + '${subscription.name} ($e)'); + } } } + } catch (e) { + print('Error checking for leftover subscriptions ($e)'); } + // Try to delete any leftover topics from the tests. - var topics = await pubsub.listTopics().toList(); - for (var topic in topics) { - if (topic.name.startsWith(prefix)) { - try { - print('WARNING: Removing leftover topic ${topic.name}'); - leftovers = true; - await pubsub.deleteTopic(topic.name); - } catch (e) { - print('Error during test cleanup of topic ${topic.name} ($e)'); + print('checking for leftover topics'); + try { + var topics = await pubsub.listTopics().toList(); + for (var topic in topics) { + if (topic.name.startsWith(prefix)) { + try { + print('WARNING: Removing leftover topic ${topic.name}'); + leftovers = true; + await pubsub.deleteTopic(topic.name); + } catch (e) { + print('Error during test cleanup of topic ${topic.name} ($e)'); + } } } + } catch (e) { + print('Error checking for leftover topics ($e)'); } } From dc8b77a18e97bb0fd3fa60ad9518f691efbc5352 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Wed, 15 Apr 2015 09:19:37 +0200 Subject: [PATCH 071/239] Mark Pub/Sub e2e test as slow TBR=kustermann@google.com Review URL: https://codereview.chromium.org//1064633004 --- pkgs/gcloud/.status | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pkgs/gcloud/.status b/pkgs/gcloud/.status index 285891af..4fce6c26 100644 --- a/pkgs/gcloud/.status +++ b/pkgs/gcloud/.status @@ -14,6 +14,10 @@ build/test/pubsub/pubsub_e2e_test: Skip # - it combines several tests to avoid concurrent tests touching the same data test/db_all_e2e_test: Slow, Pass +# This test is slow because +# - it does e2e testing +test/pubsub/pubsub_e2e_test: Slow, Pass + [ $browser ] build/test/storage/e2e_test: Skip test/storage/e2e_test: Skip From 5b2f97717aca3902babbcd91a2ee61aed0a8ed9a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Mon, 20 Apr 2015 14:12:50 +0200 Subject: [PATCH 072/239] Make test fail if post-processing of leftovers caused errors R=kustermann@google.com Review URL: https://codereview.chromium.org//1089173004 --- pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart index 74a252d0..a2af0718 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart @@ -118,6 +118,7 @@ main() { withAuthClient(PubSub.SCOPES, (String project, httpClient) async { // Share the same pubsub connection for all tests. bool leftovers = false; + bool cleanupErrors = false; var pubsub = new PubSub(httpClient, project); try { await runE2EUnittest(() { @@ -138,11 +139,13 @@ main() { } catch (e) { print('Error during test cleanup of subscription ' '${subscription.name} ($e)'); + cleanupErrors = true; } } } } catch (e) { print('Error checking for leftover subscriptions ($e)'); + cleanupErrors = true; } // Try to delete any leftover topics from the tests. @@ -157,16 +160,22 @@ main() { await pubsub.deleteTopic(topic.name); } catch (e) { print('Error during test cleanup of topic ${topic.name} ($e)'); + cleanupErrors = true; } } } } catch (e) { print('Error checking for leftover topics ($e)'); + cleanupErrors = true; } } if (leftovers) { throw 'Test terminated with leftover topics and/or subscriptions'; } + + if (cleanupErrors) { + throw 'Test encountered errors while checking for leftovers'; + } }); } From 31b25f6a348f7502b591752792d498227d4f4f0c Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Mon, 1 Jun 2015 17:46:36 +0200 Subject: [PATCH 073/239] Widen constraint on googleapis/googleapis_beta BUG=https://github.com/dart-lang/appengine/issues/36 R=sgjesse@google.com Review URL: https://codereview.chromium.org//1153063006 --- pkgs/gcloud/CHANGELOG.md | 4 ++++ pkgs/gcloud/pubspec.yaml | 6 +++--- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 1d9d3a12..441954f1 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.2.0+2 + +* Widen constraint on `googleapis/googleapis_beta` + ## 0.2.0+1 * Fix broken import of package:googleapis/common/common.dart. diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index b88fe373..66475e9b 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.2.0+1 +version: 0.2.0+2 author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud @@ -7,8 +7,8 @@ environment: sdk: '>=1.9.1 <2.0.0' dependencies: crypto: '>=0.9.0 <0.10.0' - googleapis: '>=0.2.0 <0.9.0' - googleapis_beta: '>=0.10.0 <0.13.0' + googleapis: '>=0.2.0 <0.11.0' + googleapis_beta: '>=0.10.0 <0.15.0' http: '>=0.11.0 <0.12.0' dev_dependencies: googleapis_auth: '>=0.1.1 <0.3.0' From 89df228b3927c1cbc9cfcf2882f061305f2217f3 Mon Sep 17 00:00:00 2001 From: Gustav Wibling Date: Fri, 12 Jun 2015 11:03:30 +0200 Subject: [PATCH 074/239] Widen constraints to pick up latest googleapis versions. R=kustermann@google.com, sgjesse@google.com BUG= Review URL: https://codereview.chromium.org//1185613002 --- pkgs/gcloud/lib/src/pubsub_impl.dart | 2 +- pkgs/gcloud/pubspec.yaml | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart index 9c482dfa..6a4fc905 100644 --- a/pkgs/gcloud/lib/src/pubsub_impl.dart +++ b/pkgs/gcloud/lib/src/pubsub_impl.dart @@ -107,7 +107,7 @@ class _PubSubImpl implements PubSub { ..ackIds = [ ackId ]; // The Pub/Sub acknowledge API returns an instance of Empty. return _api.projects.subscriptions.acknowledge( - request, subscription).then((_) => null);; + request, subscription).then((_) => null); } void _checkTopicName(name) { diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 66475e9b..c4249bb8 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.2.0+2 +version: 0.2.0+3 author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud @@ -7,8 +7,8 @@ environment: sdk: '>=1.9.1 <2.0.0' dependencies: crypto: '>=0.9.0 <0.10.0' - googleapis: '>=0.2.0 <0.11.0' - googleapis_beta: '>=0.10.0 <0.15.0' + googleapis: '>=0.2.0 <0.12.0' + googleapis_beta: '>=0.10.0 <0.16.0' http: '>=0.11.0 <0.12.0' dev_dependencies: googleapis_auth: '>=0.1.1 <0.3.0' From 414acc1af8f82448c2b985bfc9b94dd4300c9256 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Tue, 23 Jun 2015 23:49:19 -0700 Subject: [PATCH 075/239] remove unused members R=sgjesse@google.com Review URL: https://codereview.chromium.org//1204943003. --- pkgs/gcloud/lib/src/storage_impl.dart | 1 - pkgs/gcloud/pubspec.yaml | 2 +- pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart | 2 -- pkgs/gcloud/test/datastore/e2e/utils.dart | 1 - pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart | 2 -- pkgs/gcloud/test/pubsub/pubsub_test.dart | 6 ------ pkgs/gcloud/test/storage/e2e_test.dart | 6 ------ pkgs/gcloud/test/storage/storage_test.dart | 7 ------- 8 files changed, 1 insertion(+), 26 deletions(-) diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index d24b0015..f599cc0b 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -535,7 +535,6 @@ class _MediaUploadStreamSink implements StreamSink> { // TODO: Avoid using another stream-controller. _resumableController = new StreamController(sync: true); buffer.forEach(_resumableController.add); - var media = new storage_api.Media(_resumableController.stream, null); _startResumableUpload(_resumableController.stream, _length); _state = _STATE_DECIDED_RESUMABLE; } diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index c4249bb8..e0f6595f 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.2.0+3 +version: 0.2.1-dev author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index 3b1f7cb0..1f9e89dd 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -402,8 +402,6 @@ runTests(Datastore datastore, String namespace) { return test(null); } - var unnamedEntities1 = buildEntities(42, 43, partition: partition); - var unnamedEntities5 = buildEntities(1, 6, partition: partition); var unnamedEntities99 = buildEntities(6, 106, partition: partition); test('delete', () { diff --git a/pkgs/gcloud/test/datastore/e2e/utils.dart b/pkgs/gcloud/test/datastore/e2e/utils.dart index 2a1ea1c1..cf5ca730 100644 --- a/pkgs/gcloud/test/datastore/e2e/utils.dart +++ b/pkgs/gcloud/test/datastore/e2e/utils.dart @@ -92,7 +92,6 @@ List buildEntityWithAllProperties( for (var i = from; i < to; i++) { var key = buildKey( i, idFunction: (i) => 'allprop$i', kind: kind, p: partition); - var unIndexedCopy = new Set.from(unIndexed); var properties = buildProperties(i); entities.add(new Entity(key, properties, unIndexedProperties: unIndexed)); } diff --git a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart index a2af0718..c32b384a 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart @@ -2,8 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -import 'dart:async'; - import 'package:gcloud/pubsub.dart'; import 'package:unittest/unittest.dart'; diff --git a/pkgs/gcloud/test/pubsub/pubsub_test.dart b/pkgs/gcloud/test/pubsub/pubsub_test.dart index 267b46cb..e12a6415 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_test.dart @@ -131,8 +131,6 @@ main() { }); group('query', () { - var defaultPageSize = 50; - addTopics(pubsub.ListTopicsResponse response, int first, int count) { response.topics = []; for (int i = 0; i < count; i++) { @@ -504,8 +502,6 @@ main() { }); group('query', () { - var defaultPageSize = 50; - addSubscriptions( pubsub.ListSubscriptionsResponse response, int first, int count) { response.subscriptions = []; @@ -934,8 +930,6 @@ main() { group('subscription', () { var name = 'test-subscription'; var absoluteName = 'projects/$PROJECT/subscriptions/test-subscription'; - var topicName = 'test-topic'; - var absoluteTopicName = 'projects/$PROJECT/topics/test-topic'; test('delete', () { var mock = mockClient(); diff --git a/pkgs/gcloud/test/storage/e2e_test.dart b/pkgs/gcloud/test/storage/e2e_test.dart index 3a21eb05..c27efad1 100644 --- a/pkgs/gcloud/test/storage/e2e_test.dart +++ b/pkgs/gcloud/test/storage/e2e_test.dart @@ -74,8 +74,6 @@ runTests(Storage storage, Bucket testBucket) { }); test('create-error', () { - var bucketName = generateBucketName(); - storage.createBucket('goog-reserved').catchError(expectAsync((e) { expect(e, isNotNull); }), test: testDetailedApiError); @@ -118,7 +116,6 @@ runTests(Storage storage, Bucket testBucket) { test('create-with-predefined-acl-delete', () { return withTestBucket((Bucket bucket) { Future test(objectName, predefinedAcl, expectedLength) { - var bucketName = generateBucketName(); return bucket.writeBytes( objectName, [1, 2, 3], predefinedAcl: predefinedAcl) .then(expectAsync((result) { @@ -149,7 +146,6 @@ runTests(Storage storage, Bucket testBucket) { test('create-with-acl-delete', () { return withTestBucket((Bucket bucket) { Future test(objectName, acl, expectedLength) { - var bucketName = generateBucketName(); return bucket.writeBytes(objectName, [1, 2, 3], acl: acl) .then(expectAsync((result) { expect(result, isNotNull); @@ -198,12 +194,10 @@ runTests(Storage storage, Bucket testBucket) { test('create-with-metadata-delete', () { return withTestBucket((Bucket bucket) { Future test(objectName, metadata, bytes) { - var bucketName = generateBucketName(); return bucket.writeBytes(objectName, bytes, metadata: metadata) .then(expectAsync((result) { expect(result, isNotNull); return bucket.info(objectName).then(expectAsync((info) { - var acl = info.metadata.acl; expect(info.name, objectName); expect(info.length, bytes.length); expect(info.updated is DateTime, isTrue); diff --git a/pkgs/gcloud/test/storage/storage_test.dart b/pkgs/gcloud/test/storage/storage_test.dart index f2eb3175..fc72a2d3 100644 --- a/pkgs/gcloud/test/storage/storage_test.dart +++ b/pkgs/gcloud/test/storage/storage_test.dart @@ -33,7 +33,6 @@ withMockClient(function) { main() { group('bucket', () { var bucketName = 'test-bucket'; - var absoluteName = 'gs://test-bucket'; test('create', () { withMockClient((mock, api) { @@ -325,9 +324,7 @@ main() { new List.generate(minResumableUpload, (e) => e & 255); bool testArgumentError(e) => e is ArgumentError; - bool testApiError(e) => e is storage_api.ApiRequestError; bool testDetailedApiError(e) => e is storage_api.DetailedApiRequestError; - Function expectStatus(status) => (e) => expect(e.status, status); Function expectNotNull(status) => (o) => expect(o, isNotNull); expectNormalUpload(mock, data, objectName) { @@ -554,7 +551,6 @@ main() { test('write-add-error', () { withMockClient((mock, api) { var bucket = api.bucket(bucketName); - var controller = new StreamController(sync: true); var sink = bucket.write(bucketName); sink.done .then((_) => throw 'Unexpected') @@ -569,7 +565,6 @@ main() { }); test('write-long-add-error', () { - int count = 0; withMockClient((mock, api) { mock.registerResumableUpload( 'POST', 'b/$bucketName/o', expectAsync((request) { @@ -673,8 +668,6 @@ main() { var object = new storage.Object.fromJson(JSON.decode(request.body)); ObjectMetadata m = metadata[countInitial]; expect(object.name, objectName); - var contentType = m.contentType != null - ? m.contentType : 'application/octet-stream'; expect(object.cacheControl, m.cacheControl); expect(object.contentDisposition, m.contentDisposition); expect(object.contentEncoding, m.contentEncoding); From 1fa1fc4823323c51fd29ebf64b5982f1e5037331 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Thu, 25 Jun 2015 13:19:31 +0200 Subject: [PATCH 076/239] Widen constraints to pick up latest googleapis versions. R=kustermann@google.com, wibling@google.com BUG= Review URL: https://codereview.chromium.org//1203393002. --- pkgs/gcloud/pubspec.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index e0f6595f..269f4e2b 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -7,7 +7,7 @@ environment: sdk: '>=1.9.1 <2.0.0' dependencies: crypto: '>=0.9.0 <0.10.0' - googleapis: '>=0.2.0 <0.12.0' + googleapis: '>=0.2.0 <0.14.0' googleapis_beta: '>=0.10.0 <0.16.0' http: '>=0.11.0 <0.12.0' dev_dependencies: From 0de164d156a2e42894cd759f675462699b8421ce Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Fri, 26 Jun 2015 07:06:23 -0700 Subject: [PATCH 077/239] simplify _MediaUploadStreamSink._onDone R=sgjesse@google.com Review URL: https://codereview.chromium.org//1210103003. --- pkgs/gcloud/lib/src/storage_impl.dart | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index f599cc0b..994c0f8b 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -548,10 +548,7 @@ class _MediaUploadStreamSink implements StreamSink> { if (_state == _STATE_PROBING_LENGTH) { // As the data is already cached don't bother to wait on somebody // listening on the stream before adding the data. - var controller = new StreamController(); - buffer.forEach(controller.add); - controller.close(); - _startNormalUpload(controller.stream, _bufferLength); + _startNormalUpload(new Stream.fromIterable(buffer), _bufferLength); } else { _resumableController.close(); } From a666fa7fb44a806d3a7aef425de3b7ab382d80f3 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Fri, 26 Jun 2015 07:32:27 -0700 Subject: [PATCH 078/239] Make sure errors that occur during Storage.read are sent to the returned Stream Closes dart-lang/gcloud#29 R=kustermann@google.com Review URL: https://codereview.chromium.org//1208483010. --- pkgs/gcloud/CHANGELOG.md | 4 + pkgs/gcloud/lib/src/storage_impl.dart | 36 ++++-- pkgs/gcloud/lib/storage.dart | 11 +- pkgs/gcloud/pubspec.yaml | 2 +- pkgs/gcloud/test/common.dart | 26 ++++- pkgs/gcloud/test/storage/storage_test.dart | 125 ++++++++++++++++++--- 6 files changed, 172 insertions(+), 32 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 441954f1..11aa35e1 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.2.0+4 + +* `Storage.read` now honors `offset` and `length` arguments. + ## 0.2.0+2 * Widen constraint on `googleapis/googleapis_beta` diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index 994c0f8b..21ac9707 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -194,14 +194,34 @@ class _BucketImpl implements Bucket { return sink.close(); } - Stream read(String objectName, {int offset: 0, int length}) { - var controller = new StreamController(); - _api.objects.get( - bucketName, - objectName, - downloadOptions: storage_api.DownloadOptions.FullMedia).then( - (media) => media.stream.pipe(controller.sink)); - return controller.stream; + Stream> read(String objectName, {int offset, int length}) async* { + if (offset == null) { + offset = 0; + } + + if (offset != 0 && length == null) { + throw new ArgumentError( + 'length must have a value if offset is non-zero.'); + } + + var options = storage_api.DownloadOptions.FullMedia; + + if (length != null) { + if (length <= 0) { + throw new ArgumentError.value(length, 'length', + 'If provided, length must greater than zero.'); + } + // For ByteRange, end is *inclusive*. + var end = offset + length - 1; + var range = new storage_api.ByteRange(offset, end); + assert(range.length == length); + options = new storage_api.PartialDownloadOptions(range); + } + + var media = await _api.objects.get(bucketName, objectName, + downloadOptions: options); + + yield* media.stream; } Future info(String objectName) { diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index 01b23546..42db58dc 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -735,10 +735,15 @@ abstract class Bucket { {ObjectMetadata metadata, Acl acl, PredefinedAcl predefinedAcl, String contentType}); - /// Read object content. + /// Read object content as byte stream. /// - // TODO: More documentation - Stream> read(String objectName, {int offset: 0, int length}); + /// If [offset] is provided, [length] must also be provided. + /// + /// If [length] is provided, it must be greater than `0`. + /// + /// If there is a problem accessing the file, a [DetailedApiRequestError] is + /// thrown. + Stream> read(String objectName, {int offset, int length}); /// Lookup object metadata. /// diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 269f4e2b..d9707964 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.2.1-dev +version: 0.2.0+4 author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud diff --git a/pkgs/gcloud/test/common.dart b/pkgs/gcloud/test/common.dart index 97aa783c..166d1034 100644 --- a/pkgs/gcloud/test/common.dart +++ b/pkgs/gcloud/test/common.dart @@ -19,6 +19,10 @@ const RESPONSE_HEADERS = const { }; class MockClient extends http.BaseClient { + static const bytes = const [1, 2, 3, 4, 5]; + + final _bytesHeaderRegexp = new RegExp(r"bytes=(\d+)-(\d+)"); + final String hostname; final String rootPath; final Uri rootUri; @@ -112,9 +116,25 @@ class MockClient extends http.BaseClient { new http.Response('', 308, headers: RESPONSE_HEADERS)); } - Future respondBytes(List bytes) { - return new Future.value( - new http.Response.bytes(bytes, 200, headers: RESPONSE_HEADERS)); + Future respondBytes(http.Request request) async { + expect(request.url.queryParameters['alt'], 'media'); + + var myBytes = bytes; + var headers = new Map.from(RESPONSE_HEADERS); + + var range = request.headers['range']; + if (range != null) { + var match = _bytesHeaderRegexp.allMatches(range).single; + + var start = int.parse(match[1]); + var end = int.parse(match[2]); + + myBytes = bytes.sublist(start, end + 1); + headers['content-length'] = myBytes.length.toString(); + headers['content-range'] = 'bytes $start-$end/'; + } + + return new http.Response.bytes(myBytes, 200, headers: headers); } Future respondError(statusCode) { diff --git a/pkgs/gcloud/test/storage/storage_test.dart b/pkgs/gcloud/test/storage/storage_test.dart index fc72a2d3..9185e9ba 100644 --- a/pkgs/gcloud/test/storage/storage_test.dart +++ b/pkgs/gcloud/test/storage/storage_test.dart @@ -7,7 +7,6 @@ library gcloud.storage; import 'dart:async'; import 'dart:convert'; -import 'package:googleapis/storage/v1.dart' as storage_api; import 'package:http/http.dart' as http; import 'package:unittest/unittest.dart'; @@ -25,11 +24,17 @@ const String ROOT_PATH = '/storage/v1/'; http.Client mockClient() => new MockClient(HOSTNAME, ROOT_PATH); -withMockClient(function) { +withMockClient(function(MockClient client, Storage storage)) { var mock = mockClient(); function(mock, new Storage(mock, PROJECT)); } +Future withMockClientAsync( + Future function(MockClient client, Storage storage)) async { + var mock = mockClient(); + await function(mock, new Storage(mock, PROJECT)); +} + main() { group('bucket', () { var bucketName = 'test-bucket'; @@ -324,7 +329,7 @@ main() { new List.generate(minResumableUpload, (e) => e & 255); bool testArgumentError(e) => e is ArgumentError; - bool testDetailedApiError(e) => e is storage_api.DetailedApiRequestError; + bool testDetailedApiError(e) => e is storage.DetailedApiRequestError; Function expectNotNull(status) => (o) => expect(o, isNotNull); expectNormalUpload(mock, data, objectName) { @@ -533,13 +538,13 @@ main() { .then((_) => throw 'Unexpected') .catchError( expectAsync(expectNotNull), - test: (e) => e is String || e is storage_api.ApiRequestError); + test: (e) => e is String || e is storage.ApiRequestError); return new Stream.fromIterable(data) .pipe(sink) .then((_) => throw 'Unexpected') .catchError( expectAsync(expectNotNull), - test: (e) => e is String || e is storage_api.ApiRequestError); + test: (e) => e is String || e is storage.ApiRequestError); } test([bytesResumableUpload], bytesResumableUpload.length + 1) @@ -881,21 +886,107 @@ main() { }); }); + group('read', () { + test('success', () async { + await withMockClientAsync((MockClient mock, Storage api) async { + mock.register('GET', + 'b/$bucketName/o/$objectName', + expectAsync(mock.respondBytes)); + var bucket = api.bucket(bucketName); + var data = []; - test('read', () { - var bytes = [1, 2, 3]; - withMockClient((mock, api) { - mock.register( - 'GET', 'b/$bucketName/o/$objectName', expectAsync((request) { - expect(request.url.queryParameters['alt'], 'media'); - return mock.respondBytes(bytes); - })); + await bucket.read(objectName).forEach(data.addAll); + expect(data, MockClient.bytes); + }); + }); - var bucket = api.bucket(bucketName); - var data = []; - bucket.read(objectName).listen(data.addAll).asFuture() - .then(expectAsync((_) => expect(data, bytes))); + test('with offset, without length', () async { + await withMockClientAsync((MockClient mock, Storage api) async { + var bucket = api.bucket(bucketName); + + try { + await bucket.read(objectName, offset: 1).toList(); + fail('An exception should be thrown'); + } on ArgumentError catch (e) { + expect(e.message, + "length must have a value if offset is non-zero."); + } + }); + }); + + test('with offset and length zero', () async { + await withMockClientAsync((MockClient mock, Storage api) async { + var bucket = api.bucket(bucketName); + + try { + await bucket.read(objectName, offset: 1, length: 0).toList(); + fail('An exception should be thrown'); + } on ArgumentError catch (e) { + expect(e.message, "If provided, length must greater than zero."); + } + }); + }); + + test('with invalid length', () async { + await withMockClientAsync((MockClient mock, Storage api) async { + var bucket = api.bucket(bucketName); + + try { + await bucket.read(objectName, length: -1).toList(); + fail('An exception should be thrown'); + } on ArgumentError catch (e) { + expect(e.message, "If provided, length must greater than zero."); + } + }); + }); + + test('with length', () async { + await withMockClientAsync((MockClient mock, Storage api) async { + mock.register('GET', + 'b/$bucketName/o/$objectName', + expectAsync(mock.respondBytes)); + + var bucket = api.bucket(bucketName); + var data = []; + + await bucket.read(objectName, length: 4).forEach(data.addAll); + expect(data, MockClient.bytes.sublist(0, 4)); + }); + }); + + test('with offset and length', () async { + await withMockClientAsync((MockClient mock, Storage api) async { + mock.register('GET', + 'b/$bucketName/o/$objectName', + expectAsync(mock.respondBytes)); + + var bucket = api.bucket(bucketName); + var data = []; + + await bucket.read(objectName, offset: 1, length: 3) + .forEach(data.addAll); + expect(data, MockClient.bytes.sublist(1, 4)); + }); + }); + + test('file does not exist', () async { + await withMockClientAsync((MockClient mock, Storage api) async { + mock.register( + 'GET', 'b/$bucketName/o/$objectName', expectAsync((request) { + expect(request.url.queryParameters['alt'], 'media'); + return mock.respondError(404); + })); + + var bucket = api.bucket(bucketName); + + try { + await bucket.read(objectName).toList(); + fail('An exception should be thrown'); + } on storage.DetailedApiRequestError catch (e) { + expect(e.status, 404); + } + }); }); }); From 7137ae9c4d45fbf71c3405fbb75c9240d94608e4 Mon Sep 17 00:00:00 2001 From: Luke Church Date: Thu, 2 Jul 2015 12:27:12 +0100 Subject: [PATCH 079/239] Nit in readme --- pkgs/gcloud/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/README.md b/pkgs/gcloud/README.md index 1186465f..13ab252b 100644 --- a/pkgs/gcloud/README.md +++ b/pkgs/gcloud/README.md @@ -181,7 +181,7 @@ consumer of a message. The following code creates a _topic_ and sends a simple test message: ```dart -var topic = await pubsub.createTopic('my'topic'); +var topic = await pubsub.createTopic('my-topic'); await topic.publishString('Hello, world!') ``` @@ -231,4 +231,4 @@ to a JSON key file for a service account providing access to the Cloud Project. [googleapis]: https://pub.dartlang.org/packages/googleapis [googleapisbeta]: https://pub.dartlang.org/packages/googleapis_beta [googleapisauth]: https://pub.dartlang.org/packages/googleapis_beta -[appengine]: https://pub.dartlang.org/packages/appengine \ No newline at end of file +[appengine]: https://pub.dartlang.org/packages/appengine From 5519100eb9ed3b5483b913bf4f8912ed73d09bba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Wed, 5 Aug 2015 11:51:49 +0200 Subject: [PATCH 080/239] Widen constraints to pick up latest googleapis versions. The constraints for googleapis was not changed because the previous change wrongly raised it one too much. R=wibling@google.com BUG= Review URL: https://codereview.chromium.org//1268333003 . --- pkgs/gcloud/pubspec.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index d9707964..5450d807 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.2.0+4 +version: 0.2.0+5 author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud @@ -8,7 +8,7 @@ environment: dependencies: crypto: '>=0.9.0 <0.10.0' googleapis: '>=0.2.0 <0.14.0' - googleapis_beta: '>=0.10.0 <0.16.0' + googleapis_beta: '>=0.10.0 <0.17.0' http: '>=0.11.0 <0.12.0' dev_dependencies: googleapis_auth: '>=0.1.1 <0.3.0' From 4ba506bbd4cde58cd0631bb8fca176deec0fc450 Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Mon, 17 Aug 2015 17:23:21 +0200 Subject: [PATCH 081/239] Widen constraints to pick up latest googleapis versions. R=sgjesse@google.com Review URL: https://codereview.chromium.org//1289763003 . --- pkgs/gcloud/pubspec.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 5450d807..456b820a 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.2.0+5 +version: 0.2.0+6 author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud @@ -7,8 +7,8 @@ environment: sdk: '>=1.9.1 <2.0.0' dependencies: crypto: '>=0.9.0 <0.10.0' - googleapis: '>=0.2.0 <0.14.0' - googleapis_beta: '>=0.10.0 <0.17.0' + googleapis: '>=0.2.0 <0.15.0' + googleapis_beta: '>=0.10.0 <0.18.0' http: '>=0.11.0 <0.12.0' dev_dependencies: googleapis_auth: '>=0.1.1 <0.3.0' From f4f25c760791587aa3f0962d5f955e4d815779cb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Thu, 3 Sep 2015 16:23:24 +0200 Subject: [PATCH 082/239] Fix bugs in gcloud storage e2e test The ACLs returned include a 'user-ID' entry in adition to the ones set when creating the object. The scope for misc@dartlang.org must be a group otherwise the error "status: 400, message: Invalid argument" is returned. Removed the solo_, as it looks like running the test no longer hits a rate limit. BUG= https://github.com/dart-lang/gcloud/issues/32 R=kustermann@google.com Review URL: https://codereview.chromium.org//1312593003 . --- pkgs/gcloud/test/storage/e2e_test.dart | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/pkgs/gcloud/test/storage/e2e_test.dart b/pkgs/gcloud/test/storage/e2e_test.dart index c27efad1..77a4f838 100644 --- a/pkgs/gcloud/test/storage/e2e_test.dart +++ b/pkgs/gcloud/test/storage/e2e_test.dart @@ -80,8 +80,7 @@ runTests(Storage storage, Bucket testBucket) { }); }); - // TODO: Remove solo_ here when the rate-limit issue have been resolved. - solo_group('object', () { + group('object', () { // Run all object tests in the same bucket to try to avoid the rate-limit // for creating and deleting buckets while testing. Future withTestBucket(function) { @@ -171,7 +170,7 @@ runTests(Storage storage, Bucket testBucket) { [new AclEntry(AclScope.allUsers, AclPermission.WRITE), new AclEntry(new AccountScope('sgjesse@google.com'), AclPermission.WRITE), - new AclEntry(new AccountScope('misc@dartlang.org'), + new AclEntry(new GroupScope('misc@dartlang.org'), AclPermission.READ)]); Acl acl4 = new Acl( [new AclEntry(AclScope.allUsers, AclPermission.WRITE), @@ -182,11 +181,14 @@ runTests(Storage storage, Bucket testBucket) { new AclEntry(new DomainScope('dartlang.org'), AclPermission.FULL_CONTROL)]); + // The expected length of the returned ACL is one longer than the one + // use during creation as an additional 'used-ID' ACL entry is added + // by cloud storage during creation. return Future.forEach([ - () => test('test-1', acl1, 1), - () => test('test-2', acl2, 2), - () => test('test-3', acl3, 3), - () => test('test-4', acl4, 4) + () => test('test-1', acl1, acl1.entries.length + 1), + () => test('test-2', acl2, acl2.entries.length + 1), + () => test('test-3', acl3, acl3.entries.length + 1), + () => test('test-4', acl4, acl4.entries.length + 1) ], (f) => f().then(expectAsync((_) {}))); }); }); From 7aa29d175d5b41c0fdfc0535efacab85a6357bd8 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Wed, 9 Sep 2015 21:19:20 +0100 Subject: [PATCH 083/239] Widen constraints to pick up latest googleapis versions. Closes dart-lang/gclouddart-lang/gcloud#33 R=kustermann@google.com Review URL: https://codereview.chromium.org//1334533003 . --- pkgs/gcloud/.gitignore | 1 + pkgs/gcloud/pubspec.yaml | 8 ++++---- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/pkgs/gcloud/.gitignore b/pkgs/gcloud/.gitignore index 192d2706..2f08921c 100644 --- a/pkgs/gcloud/.gitignore +++ b/pkgs/gcloud/.gitignore @@ -1,3 +1,4 @@ pubspec.lock packages .pub +.packages diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 456b820a..40b07af7 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.2.0+6 +version: 0.2.0+7 author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud @@ -7,12 +7,12 @@ environment: sdk: '>=1.9.1 <2.0.0' dependencies: crypto: '>=0.9.0 <0.10.0' - googleapis: '>=0.2.0 <0.15.0' - googleapis_beta: '>=0.10.0 <0.18.0' + googleapis: '>=0.2.0 <0.16.0' + googleapis_beta: '>=0.10.0 <0.19.0' http: '>=0.11.0 <0.12.0' dev_dependencies: googleapis_auth: '>=0.1.1 <0.3.0' - http_parser: '>=0.0.2+5 <0.1.0' + http_parser: '>=0.0.2+5 <2.0.0' mime: '>=0.9.0+3 <0.10.0' unittest: '>=0.11.0 <0.12.0' transformers: From ddea462d3cf684e3df7eb7507aa0a624aec5338b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Mon, 14 Sep 2015 14:33:40 +0200 Subject: [PATCH 084/239] Widen constraints to pick up latest googleapis versions R=wibling@google.com BUG= Review URL: https://codereview.chromium.org//1340023002 . --- pkgs/gcloud/pubspec.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 40b07af7..cbf41732 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -7,8 +7,8 @@ environment: sdk: '>=1.9.1 <2.0.0' dependencies: crypto: '>=0.9.0 <0.10.0' - googleapis: '>=0.2.0 <0.16.0' - googleapis_beta: '>=0.10.0 <0.19.0' + googleapis: '>=0.2.0 <0.17.0' + googleapis_beta: '>=0.10.0 <0.20.0' http: '>=0.11.0 <0.12.0' dev_dependencies: googleapis_auth: '>=0.1.1 <0.3.0' From dd66bc56daf33b6ef743e403a3c0e9e70ef94248 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Mon, 14 Sep 2015 14:34:35 +0200 Subject: [PATCH 085/239] Add the solo_test back Turned out that the bucket creation rate could still be hit. R=kustermann@google.com BUG= Review URL: https://codereview.chromium.org//1339923003 . --- pkgs/gcloud/test/storage/e2e_test.dart | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/gcloud/test/storage/e2e_test.dart b/pkgs/gcloud/test/storage/e2e_test.dart index 77a4f838..ef3b77b7 100644 --- a/pkgs/gcloud/test/storage/e2e_test.dart +++ b/pkgs/gcloud/test/storage/e2e_test.dart @@ -80,7 +80,7 @@ runTests(Storage storage, Bucket testBucket) { }); }); - group('object', () { + solo_group('object', () { // Run all object tests in the same bucket to try to avoid the rate-limit // for creating and deleting buckets while testing. Future withTestBucket(function) { From e86f3ec0a90bbb5186c878cbf3b1668c7867e10e Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Mon, 21 Sep 2015 16:16:48 +0200 Subject: [PATCH 086/239] spelling fixes R=sgjesse@google.com Review URL: https://codereview.chromium.org//1355853003 . --- pkgs/gcloud/README.md | 4 ++-- pkgs/gcloud/lib/datastore.dart | 15 ++++++++------- pkgs/gcloud/lib/db.dart | 2 +- pkgs/gcloud/lib/http.dart | 2 +- pkgs/gcloud/lib/pubsub.dart | 4 ++-- pkgs/gcloud/lib/src/datastore_impl.dart | 2 +- pkgs/gcloud/lib/src/db/db.dart | 10 +++++----- pkgs/gcloud/lib/src/db/model_db_impl.dart | 4 ++-- pkgs/gcloud/lib/storage.dart | 14 +++++++------- pkgs/gcloud/test/common.dart | 2 +- pkgs/gcloud/test/common_e2e.dart | 2 +- .../test/datastore/e2e/datastore_test_impl.dart | 6 +++--- pkgs/gcloud/test/db/e2e/db_test_impl.dart | 4 ++-- 13 files changed, 36 insertions(+), 35 deletions(-) diff --git a/pkgs/gcloud/README.md b/pkgs/gcloud/README.md index 13ab252b..e183bc04 100644 --- a/pkgs/gcloud/README.md +++ b/pkgs/gcloud/README.md @@ -1,6 +1,6 @@ ## Google Cloud Platform support package (gcloud) -The `gcloud` package provides a high level "idomatic Dart" interface to +The `gcloud` package provides a high level "idiomatic Dart" interface to some of the most widely used Google Cloud Platform services. Currently the following services are supported: @@ -220,7 +220,7 @@ When running these tests the following environment variables need to be set: GCLOUD_E2E_TEST_PROJECT GCLOUD_E2E_TEST_KEY -The vaule of the environment variable `GCLOUD_E2E_TEST_PROJECT` is the name +The value of the environment variable `GCLOUD_E2E_TEST_PROJECT` is the name of the Google Cloud project to use. The value of the environment variable `GCLOUD_E2E_TEST_KEY` is a Google Cloud Storage path (starting with `gs://`) to a JSON key file for a service account providing access to the Cloud Project. diff --git a/pkgs/gcloud/lib/datastore.dart b/pkgs/gcloud/lib/datastore.dart index a65ec47a..c904cc25 100644 --- a/pkgs/gcloud/lib/datastore.dart +++ b/pkgs/gcloud/lib/datastore.dart @@ -27,7 +27,7 @@ Datastore get datastoreService => ss.lookup(_datastoreKey); /// Registers the [Datastore] object within the current service scope. /// -/// The provided `datastore` object will be avilable via the top-level +/// The provided `datastore` object will be available via the top-level /// `datastore` getter. /// /// Calling this function outside of a service scope will result in an error. @@ -114,10 +114,10 @@ class Entity { /// A complete or partial key. /// -/// A key can uniquely identifiy a datastore `Entity`s. It consists of a +/// A key can uniquely identify a datastore `Entity`s. It consists of a /// partition and path. The path consists of one or more `KeyElement`s. /// -/// A key may be incomplete. This is usesfull when inserting `Entity`s which IDs +/// A key may be incomplete. This is usesful when inserting `Entity`s which IDs /// should be automatically allocated. /// /// Example of a fully populated [Key]: @@ -125,7 +125,7 @@ class Entity { /// var fullKey = new Key([new KeyElement('Person', 1), /// new KeyElement('Address', 2)]); /// -/// Example of a partially populated [Key] / an imcomplete [Key]: +/// Example of a partially populated [Key] / an incomplete [Key]: /// /// var partialKey = new Key([new KeyElement('Person', 1), /// new KeyElement('Address', null)]); @@ -254,7 +254,7 @@ class Filter { /// The relation used for comparing `name` with `value`. final FilterRelation relation; - /// The name of the datastore property used in the comparision. + /// The name of the datastore property used in the comparison. final String name; /// The value used for comparing against the property named by `name`. @@ -368,8 +368,9 @@ abstract class Datastore { /// be either added to the datastore or updated. /// /// - `autoIdInserts` are [Entity]s which do not have a fully populated [Key] - /// and should be added to the dataset, automatically assiging integer IDs. - /// The returned [CommitResult] will contain the fuly populated keys. + /// and should be added to the dataset, automatically assigning integer + /// IDs. + /// The returned [CommitResult] will contain the fully populated keys. /// /// - `deletes` are a list of fully populated [Key]s which uniquely identify /// the [Entity]s which should be deleted. diff --git a/pkgs/gcloud/lib/db.dart b/pkgs/gcloud/lib/db.dart index 41282038..ed45625c 100644 --- a/pkgs/gcloud/lib/db.dart +++ b/pkgs/gcloud/lib/db.dart @@ -31,7 +31,7 @@ DatastoreDB get dbService => ss.lookup(_dbKey); /// Registers the [DatastoreDB] object within the current service scope. /// -/// The provided `db` object will be avilable via the top-level `dbService` +/// The provided `db` object will be available via the top-level `dbService` /// getter. /// /// Calling this function outside of a service scope will result in an error. diff --git a/pkgs/gcloud/lib/http.dart b/pkgs/gcloud/lib/http.dart index 07950933..0127ab15 100644 --- a/pkgs/gcloud/lib/http.dart +++ b/pkgs/gcloud/lib/http.dart @@ -24,7 +24,7 @@ http.Client get authClientService => ss.lookup(_authenticatedClientKey); /// Registers the [http.Client] object within the current service scope. /// -/// The provided `client` object will be avilable via the top-level +/// The provided `client` object will be available via the top-level /// `authenticatedHttp` getter. /// /// Calling this function outside of a service scope will result in an error. diff --git a/pkgs/gcloud/lib/pubsub.dart b/pkgs/gcloud/lib/pubsub.dart index 6f8af934..44bf21b1 100644 --- a/pkgs/gcloud/lib/pubsub.dart +++ b/pkgs/gcloud/lib/pubsub.dart @@ -61,7 +61,7 @@ void registerPubSubService(PubSub pubsub) { /// import 'package:gcloud/pubsub.dart'; /// /// Future createClient() { -/// // Service account credentials retreived from Cloud Console. +/// // Service account credentials retrieved from Cloud Console. /// String creds = /// r''' /// { @@ -363,7 +363,7 @@ abstract class Message { /// The binary body is decoded into a String using an UTF-8 decoder. /// /// If the body is not UTF-8 encoded use the [asBytes] getter and manually - /// apply the corect decoding. + /// apply the correct decoding. String get asString; /// The message body as bytes. diff --git a/pkgs/gcloud/lib/src/datastore_impl.dart b/pkgs/gcloud/lib/src/datastore_impl.dart index babe0fea..c5820123 100644 --- a/pkgs/gcloud/lib/src/datastore_impl.dart +++ b/pkgs/gcloud/lib/src/datastore_impl.dart @@ -634,7 +634,7 @@ class QueryPageImpl implements Page { // In case a limit was specified, we need to subtraction the number of // entities we already got. - // (the checks above guarantee that this subraction is >= 0). + // (the checks above guarantee that this subtraction is >= 0). int remainingEntities; if (limit != null) { remainingEntities = limit - returnedEntities.length; diff --git a/pkgs/gcloud/lib/src/db/db.dart b/pkgs/gcloud/lib/src/db/db.dart index c417d1c2..210ee2a6 100644 --- a/pkgs/gcloud/lib/src/db/db.dart +++ b/pkgs/gcloud/lib/src/db/db.dart @@ -15,7 +15,7 @@ typedef Future TransactionHandler(Transaction transaction); /** * A datastore transaction. * - * It can be used for making lookups/queries and queue modifcations + * It can be used for making lookups/queries and queue modifications * (inserts/updates/deletes). Finally the transaction can be either committed * or rolled back. */ @@ -42,7 +42,7 @@ class Transaction { } /** - * Enqueues [inserts] and [deletes] which should be commited at commit time. + * Enqueues [inserts] and [deletes] which should be committed at commit time. */ void queueMutations({List inserts, List deletes}) { _checkSealed(); @@ -162,7 +162,7 @@ class Query { "Invalid filter string '$filterString'."); } - // TODO: do value transformation on [comparisionObject] + // TODO: do value transformation on [comparisonObject] var propertyName = _convertToDatastoreName(parts[0]); _filters.add(new datastore.Filter( @@ -173,7 +173,7 @@ class Query { * Adds an order to this [Query]. * * [orderString] has the form "-name" where 'name' is a fieldName of the model - * and the optional '-' says whether the order is decending or ascending. + * and the optional '-' says whether the order is descending or ascending. */ void order(String orderString) { // TODO: validate [orderString] (e.g. is name valid) @@ -363,7 +363,7 @@ Future _commitHelper(DatastoreDB db, autoIdModelInserts = []; for (var model in inserts) { - // If parent was not explicity set, we assume this model will map to + // If parent was not explicitly set, we assume this model will map to // it's own entity group. if (model.parentKey == null) { model.parentKey = db.defaultPartition.emptyKey; diff --git a/pkgs/gcloud/lib/src/db/model_db_impl.dart b/pkgs/gcloud/lib/src/db/model_db_impl.dart index 3b44cdec..2a15464d 100644 --- a/pkgs/gcloud/lib/src/db/model_db_impl.dart +++ b/pkgs/gcloud/lib/src/db/model_db_impl.dart @@ -46,7 +46,7 @@ class ModelDBImpl implements ModelDB { /// Initializes a new [ModelDB] from all libraries. /// - /// This will scan the given [librarySymnbol] for classes with a [Kind] + /// This will scan the given [librarySymbol] for classes with a [Kind] /// annotation. /// /// In case an error is encountered (e.g. two model classes with the same kind @@ -268,7 +268,7 @@ class ModelDBImpl implements ModelDB { var properties = new Map(); var propertyNames = new Set(); - // Loop over all classes in the inheritence path up to the Object class. + // Loop over all classes in the inheritance path up to the Object class. while (modelClassMirror.superclass != null) { var memberMap = modelClassMirror.instanceMembers; // Loop over all declarations (which includes fields) diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index 42db58dc..59932a92 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -6,7 +6,7 @@ /// /// Google Cloud Storage is an object store for binary objects. Each /// object has a set of metadata attached to it. For more information on -/// Google Cloud Sorage see https://developers.google.com/storage/. +/// Google Cloud Storage see https://developers.google.com/storage/. /// /// There are two main concepts in Google Cloud Storage: Buckets and Objects. /// A bucket is a container for objects and objects are the actual binary @@ -17,7 +17,7 @@ /// The class `Storage` is the main API class providing access to working /// with buckets. This is the 'bucket service' interface. /// -/// The class `Bucket` provide access to working with objcts in a specific +/// The class `Bucket` provide access to working with objects in a specific /// bucket. This is the 'object service' interface. /// /// Both buckets have objects, have names. The bucket namespace is flat and @@ -25,8 +25,8 @@ /// addressable using its name without requiring further context. /// /// Within buckets the object namespace is also flat. Object are *not* -/// organized hierachical. However, as object names allow the slash `/` -/// character this is often used to simulate a hierachical structure +/// organized hierarchical. However, as object names allow the slash `/` +/// character this is often used to simulate a hierarchical structure /// based on common prefixes. /// /// This package uses relative and absolute names to refer to objects. A @@ -524,7 +524,7 @@ abstract class Storage { /// Access bucket object operations. /// - /// Instantiates a `Bucket` object refering to the bucket named [bucketName]. + /// Instantiates a `Bucket` object referring to the bucket named [bucketName]. /// /// When an object is created using the resulting `Bucket` an ACL will always /// be set. If the object creation does not pass any explicit ACL information @@ -553,7 +553,7 @@ abstract class Storage { /// /// Provide metadata information for bucket named [bucketName]. /// - /// Returns a [Future] which completes with a `BuckerInfo` object. + /// Returns a [Future] which completes with a `BucketInfo` object. Future bucketInfo(String bucketName); /// List names of all buckets. @@ -685,7 +685,7 @@ class BucketEntry { bool get isDirectory => !isObject; } -/// Access to operations on a specific cloud storage buket. +/// Access to operations on a specific cloud storage bucket. abstract class Bucket { /// Name of this bucket. String get bucketName; diff --git a/pkgs/gcloud/test/common.dart b/pkgs/gcloud/test/common.dart index 166d1034..689b72d8 100644 --- a/pkgs/gcloud/test/common.dart +++ b/pkgs/gcloud/test/common.dart @@ -234,7 +234,7 @@ class TraceClient extends http.BaseClient { } } -// http.BaseRequest implementationn used by the TraceClient. +// http.BaseRequest implementation used by the TraceClient. class RequestImpl extends http.BaseRequest { final List _body; diff --git a/pkgs/gcloud/test/common_e2e.dart b/pkgs/gcloud/test/common_e2e.dart index 4df65f65..aa75dae5 100644 --- a/pkgs/gcloud/test/common_e2e.dart +++ b/pkgs/gcloud/test/common_e2e.dart @@ -15,7 +15,7 @@ import 'common.dart'; const PROJECT = 'test-project'; -// Enviroment variables for specifying the cloud project to use and the +// Environment variables for specifying the cloud project to use and the // location of the service account key for that project. const String PROJECT_ENV = 'GCLOUD_E2E_TEST_PROJECT'; const String SERVICE_KEY_LOCATION_ENV = 'GCLOUD_E2E_TEST_KEY'; diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index 1f9e89dd..1e54f025 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -457,7 +457,7 @@ runTests(Datastore datastore, String namespace) { Future testRollback(List keys, {bool xg: false}) { return withTransaction((Transaction transaction) { return datastore.lookup(keys, transaction: transaction) - .then((List entitites) { + .then((List entities) { return datastore.rollback(transaction); }); }, xg: xg); @@ -485,7 +485,7 @@ runTests(Datastore datastore, String namespace) { List keys, {bool transactional: false, bool xg: false}) { Future test(Transaction transaction) { return datastore.lookup(keys, transaction: transaction) - .then((List entitites) { + .then((List entities) { return datastore.commit(transaction: transaction); }); } @@ -869,7 +869,7 @@ runTests(Datastore datastore, String namespace) { }); }); - // TODO: query by multiple keys, multiple sort oders, ... + // TODO: query by multiple keys, multiple sort orders, ... }); test('ancestor_query', () { diff --git a/pkgs/gcloud/test/db/e2e/db_test_impl.dart b/pkgs/gcloud/test/db/e2e/db_test_impl.dart index 0d25407a..1e376066 100644 --- a/pkgs/gcloud/test/db/e2e/db_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/db_test_impl.dart @@ -336,7 +336,7 @@ runTests(db.DatastoreDB store, String namespace) { ..age = 83 ..name = 'user83'); return store.commit(inserts: persons).then(expectAsync((_) { - // At this point, autoIds are allocated and are relfected in the + // At this point, autoIds are allocated and are reflected in the // models (as well as parentKey if it was empty). var keys = persons.map((db.Model obj) => obj.key).toList(); @@ -367,7 +367,7 @@ runTests(db.DatastoreDB store, String namespace) { return store.lookup(keys).then(expectAsync((List models) { // Since the id/parentKey fields are set after commit and a lookup - // returns new model instances, we can do full model comparision + // returns new model instances, we can do full model comparison // here. compareModels(persons, models); return store.commit(deletes: keys).then(expectAsync((_) { From 7400da03182bd88ea7f9ed0e307d9dc0078f9c63 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Mon, 21 Sep 2015 16:19:03 +0200 Subject: [PATCH 087/239] widen constraints on gcloudapis[_beta] packages R=kustermann@google.com Review URL: https://codereview.chromium.org//1352453006 . --- pkgs/gcloud/CHANGELOG.md | 4 ++++ pkgs/gcloud/pubspec.yaml | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 11aa35e1..a64441bb 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.2.0+8 + +* Widen constraint on `googleapis` and `googleapis_beta`. + ## 0.2.0+4 * `Storage.read` now honors `offset` and `length` arguments. diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index cbf41732..beb3b048 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.2.0+7 +version: 0.2.0+8 author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud From f52a28fca78341fe576628c2d77a41a0b946124a Mon Sep 17 00:00:00 2001 From: Matias Meno Date: Tue, 22 Sep 2015 16:44:28 +0200 Subject: [PATCH 088/239] README cleanup The original changes are from @lukechurch, I just squashed them into one commit --- pkgs/gcloud/README.md | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/pkgs/gcloud/README.md b/pkgs/gcloud/README.md index e183bc04..21fd4e44 100644 --- a/pkgs/gcloud/README.md +++ b/pkgs/gcloud/README.md @@ -22,13 +22,14 @@ The code snippets below demonstrating the use of this package all assume that the following imports are present: ```dart +import 'dart:io'; import 'package:googleapis_auth/auth_io.dart' as auth; import 'package:http/http.dart' as http; import 'package:gcloud/db.dart'; import 'package:gcloud/storage.dart'; import 'package:gcloud/pubsub.dart'; import 'package:gcloud/service_scope.dart' as ss; -import 'package:gcloud/src/datastore_impl.dart'; +import 'package:gcloud/src/datastore_impl.dart' as datastore_impl; ``` ### Getting access to the APIs @@ -46,15 +47,15 @@ var credentials = new auth.ServiceAccountCredentials.fromJson(jsonCredentials); // Get an HTTP authenticated client using the service account credentials. var scopes = [] - ..addAll(dastore_impl.DatastoreImpl.SCOPES); + ..addAll(datastore_impl.DatastoreImpl.SCOPES) ..addAll(Storage.SCOPES) - ..addAll(PubSub.SCOPES) -var client = await auth.clientViaServiceAccount(creds, scopes); + ..addAll(PubSub.SCOPES); +var client = await auth.clientViaServiceAccount(credentials, scopes); // Instantiate objects to access Cloud Datastore, Cloud Storage // and Cloud Pub/Sub APIs. var db = new DatastoreDB( - new dastore_impl.DatastoreImpl(client, 's~my-project')); + new datastore_impl.DatastoreImpl(client, 's~my-project')); var storage = new Storage(client, 'my-project'); var pubsub = new PubSub(client, 'my-project'); ``` From 4930a0cfb893f68194f3e0af015a2d4a9e59998c Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Fri, 23 Oct 2015 17:22:03 +0200 Subject: [PATCH 089/239] Update README.md to fix minor dart code R=sgjesse@google.com Review URL: https://codereview.chromium.org//1416353003 . --- pkgs/gcloud/README.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/pkgs/gcloud/README.md b/pkgs/gcloud/README.md index 21fd4e44..f74b3901 100644 --- a/pkgs/gcloud/README.md +++ b/pkgs/gcloud/README.md @@ -69,7 +69,7 @@ ss.fork(() { registerDbService(db); registerStorageService(storage); registerPubSubService(pubsub); - + // Run application using these services. }); ``` @@ -188,14 +188,14 @@ await topic.publishString('Hello, world!') With the following code a _subscription_ is created on the _topic_ and a message is pulled using the subscription. A received message must be -acknowledged when the consumer has processed it. +acknowledged when the consumer has processed it. ```dart var subscription = - await pubsub.createSubscription('my-subscription', 'my-topic); + await pubsub.createSubscription('my-subscription', 'my-topic'); var pullEvent = await subscription.pull(); print(pullEvent.message.asString); -await pullEvent.acknowledge() +await pullEvent.acknowledge(); ``` It is also possible to receive messages using push events instead of pulling @@ -224,7 +224,7 @@ When running these tests the following environment variables need to be set: The value of the environment variable `GCLOUD_E2E_TEST_PROJECT` is the name of the Google Cloud project to use. The value of the environment variable `GCLOUD_E2E_TEST_KEY` is a Google Cloud Storage path (starting with `gs://`) -to a JSON key file for a service account providing access to the Cloud Project. +to a JSON key file for a service account providing access to the Cloud Project. [Datastore]: https://cloud.google.com/datastore/ [GCS]: https://cloud.google.com/storage/ From 03f413d46ee484c893bc3c447dd1d0f7d73fd3ef Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Mon, 14 Dec 2015 10:28:27 +0100 Subject: [PATCH 090/239] Implement support for converting db values to datastore values with db.Query.filter Closes dart-lang/gcloud#37 R=sgjesse@google.com Review URL: https://codereview.chromium.org//1520863002 . --- pkgs/gcloud/CHANGELOG.md | 5 + pkgs/gcloud/lib/src/datastore_impl.dart | 2 +- pkgs/gcloud/lib/src/db/db.dart | 25 +++- pkgs/gcloud/lib/src/db/model_db.dart | 5 + pkgs/gcloud/lib/src/db/model_db_impl.dart | 24 ++- pkgs/gcloud/pubspec.yaml | 6 +- pkgs/gcloud/test/db/e2e/db_test_impl.dart | 175 +++++++++++++--------- 7 files changed, 161 insertions(+), 81 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index a64441bb..5220911d 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,8 @@ +## 0.2.0+9 + +* Support value transformation in `db.query().filter()`. +* Widen constraint on `googleapis` and `googleapis_beta`. + ## 0.2.0+8 * Widen constraint on `googleapis` and `googleapis_beta`. diff --git a/pkgs/gcloud/lib/src/datastore_impl.dart b/pkgs/gcloud/lib/src/datastore_impl.dart index c5820123..9229a36f 100644 --- a/pkgs/gcloud/lib/src/datastore_impl.dart +++ b/pkgs/gcloud/lib/src/datastore_impl.dart @@ -312,7 +312,7 @@ class DatastoreImpl implements datastore.Datastore { if (value is List && value.length == 1) { value = value.first; } else { - throw new ArgumentError('List values not supported'); + throw new ArgumentError('List values not supported (was: $value).'); } } diff --git a/pkgs/gcloud/lib/src/db/db.dart b/pkgs/gcloud/lib/src/db/db.dart index 210ee2a6..d1a79783 100644 --- a/pkgs/gcloud/lib/src/db/db.dart +++ b/pkgs/gcloud/lib/src/db/db.dart @@ -162,11 +162,30 @@ class Query { "Invalid filter string '$filterString'."); } - // TODO: do value transformation on [comparisonObject] + var name = parts[0]; + var comparison = parts[1]; + var propertyName = _convertToDatastoreName(name); + + // This is for backwards compatibility: We allow [datastore.Key]s for now. + // TODO: We should remove the condition in a major version update of + // `package:gcloud`. + if (comparisonObject is! datastore.Key) { + var encoded = _db.modelDB.toDatastoreValue(_kind, name, comparisonObject); + + // We encode Lists as repeated properties normally, and the encoding of + // `['abc']` will just be `'abc'` (see [ListProperty]). + // But for IN filters, we need to treat them as lists. + if (comparison == 'IN' && + comparisonObject is List && + comparisonObject.length == 1 && + encoded is! List) { + encoded = [encoded]; + } - var propertyName = _convertToDatastoreName(parts[0]); + comparisonObject = encoded; + } _filters.add(new datastore.Filter( - _relationMapping[parts[1]], propertyName, comparisonObject)); + _relationMapping[comparison], propertyName, comparisonObject)); } /** diff --git a/pkgs/gcloud/lib/src/db/model_db.dart b/pkgs/gcloud/lib/src/db/model_db.dart index 0655ac3a..8408ac53 100644 --- a/pkgs/gcloud/lib/src/db/model_db.dart +++ b/pkgs/gcloud/lib/src/db/model_db.dart @@ -40,4 +40,9 @@ abstract class ModelDB { */ // TODO: Get rid of this eventually. String fieldNameToPropertyName(String kind, String fieldName); + + /** + * Converts [value] according to the [Property] named [fieldName] in [type]. + */ + Object toDatastoreValue(Type type, String fieldName, Object value); } diff --git a/pkgs/gcloud/lib/src/db/model_db_impl.dart b/pkgs/gcloud/lib/src/db/model_db_impl.dart index 2a15464d..d4b02346 100644 --- a/pkgs/gcloud/lib/src/db/model_db_impl.dart +++ b/pkgs/gcloud/lib/src/db/model_db_impl.dart @@ -144,11 +144,19 @@ class ModelDBImpl implements ModelDB { String fieldNameToPropertyName(String kind, String fieldName) { var modelDescription = _kind2ModelDesc[kind]; if (modelDescription == null) { - throw new ArgumentError('The kind $kind is unknown.'); + throw new ArgumentError('The kind "$kind" is unknown.'); } return modelDescription.fieldNameToPropertyName(fieldName); } + /// Converts [value] according to the [Property] named [name] in [type]. + Object toDatastoreValue(String kind, String fieldName, Object value) { + var modelDescription = _kind2ModelDesc[kind]; + if (modelDescription == null) { + throw new ArgumentError('The kind "$kind" is unknown.'); + } + return modelDescription.encodeField(this, fieldName, value); + } Iterable<_ModelDescription> get _modelDescriptions { return _modelDesc2Type.values; @@ -443,9 +451,16 @@ class _ModelDescription { return _property2FieldName[propertySearchName]; } - Object encodeField(ModelDBImpl db, String fieldName, Object value) { + Object encodeField(ModelDBImpl db, String fieldName, Object value, + {bool enforceFieldExists: true}) { Property property = db._propertiesForModel(this)[fieldName]; - if (property != null) return property.encodeValue(db, value); + if (property != null) { + return property.encodeValue(db, value); + } + if (enforceFieldExists) { + throw new ArgumentError( + 'A field named "$fieldName" does not exist in kind "$kind".'); + } return null; } } @@ -516,7 +531,8 @@ class _ExpandoModelDescription extends _ModelDescription { } Object encodeField(ModelDBImpl db, String fieldName, Object value) { - Object primitiveValue = super.encodeField(db, fieldName, value); + Object primitiveValue = super.encodeField(db, fieldName, value, + enforceFieldExists: false); // If superclass can't encode field, we return value here (and assume // it's primitive) // NOTE: Implicit assumption: diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index beb3b048..d53151f4 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.2.0+8 +version: 0.2.0+9 author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud @@ -7,8 +7,8 @@ environment: sdk: '>=1.9.1 <2.0.0' dependencies: crypto: '>=0.9.0 <0.10.0' - googleapis: '>=0.2.0 <0.17.0' - googleapis_beta: '>=0.10.0 <0.20.0' + googleapis: '>=0.2.0 <0.21.0' + googleapis_beta: '>=0.10.0 <0.23.0' http: '>=0.11.0 <0.12.0' dev_dependencies: googleapis_auth: '>=0.1.1 <0.3.0' diff --git a/pkgs/gcloud/test/db/e2e/db_test_impl.dart b/pkgs/gcloud/test/db/e2e/db_test_impl.dart index 1e376066..ce84c26a 100644 --- a/pkgs/gcloud/test/db/e2e/db_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/db_test_impl.dart @@ -61,7 +61,7 @@ class Person extends db.Model { @db.IntProperty() int age; - @db.ModelKeyProperty() + @db.ModelKeyProperty(propertyName: 'mangledWife') db.Key wife; operator==(Object other) => sameAs(other); @@ -138,11 +138,7 @@ class ExpandoPerson extends db.ExpandoModel { } -Future sleep(Duration duration) { - var completer = new Completer(); - new Timer(duration, completer.complete); - return completer.future; -} +Future sleep(Duration duration) => new Future.delayed(duration); runTests(db.DatastoreDB store, String namespace) { var partition = store.newPartition(namespace); @@ -395,6 +391,7 @@ runTests(db.DatastoreDB store, String namespace) { users.add(new User() ..id = i ..parentKey = root + ..wife = root.append(User, id: 42 + i) ..age = 42 + i ..name = 'user$i' ..nickname = 'nickname${i%3}' @@ -445,6 +442,8 @@ runTests(db.DatastoreDB store, String namespace) { (User u) => u.languages.contains('foo')).toList(); var barUsers = users.where( (User u) => u.languages.contains('bar')).toList(); + var usersWithWife = users.where( + (User u) => u.wife == root.append(User, id: 42 + 3)); var allInserts = [] ..addAll(users) @@ -471,112 +470,127 @@ runTests(db.DatastoreDB store, String namespace) { }, // Sorted query - () { - return store.query(User, partition: partition) + () async { + var query = store.query(User, partition: partition) ..order('-name') - ..order('nickname') - ..run().toList().then((List models) { - compareModels( - usersSortedNameDescNicknameAsc, models); - }); + ..order('nickname'); + var models = await runQueryWithExponentialBackoff( + query, usersSortedNameDescNicknameAsc.length); + compareModels( + usersSortedNameDescNicknameAsc, models); }, - () { - return store.query(User, partition: partition) + () async { + var query = store.query(User, partition: partition) ..order('-name') ..order('-nickname') - ..run().toList().then((List models) { - compareModels( - usersSortedNameDescNicknameDesc, models); - }); + ..run(); + var models = await runQueryWithExponentialBackoff( + query, usersSortedNameDescNicknameDesc.length); + compareModels( + usersSortedNameDescNicknameDesc, models); }, // Sorted query with filter - () { - return store.query(User, partition: partition) + () async { + var query = store.query(User, partition: partition) ..filter('name >=', LOWER_BOUND) ..order('-name') - ..order('nickname') - ..run().toList().then((List models) { - compareModels(usersSortedAndFilteredNameDescNicknameAsc, - models); - }); + ..order('nickname'); + var models = await runQueryWithExponentialBackoff( + query, usersSortedAndFilteredNameDescNicknameAsc.length); + compareModels(usersSortedAndFilteredNameDescNicknameAsc, + models); }, - () { - return store.query(User, partition: partition) + () async { + var query = store.query(User, partition: partition) ..filter('name >=', LOWER_BOUND) ..order('-name') ..order('-nickname') - ..run().toList().then((List models) { - compareModels(usersSortedAndFilteredNameDescNicknameDesc, - models); - }); + ..run(); + var models = await runQueryWithExponentialBackoff( + query, usersSortedAndFilteredNameDescNicknameDesc.length); + compareModels(usersSortedAndFilteredNameDescNicknameDesc, + models); }, // Filter lists /* FIXME: TODO: FIXME: "IN" not supported in public proto/apiary */ - () { - return store.query(User, partition: partition) + () async { + var query = store.query(User, partition: partition) ..filter('languages IN', ['foo']) ..order('name') - ..run().toList().then((List models) { - compareModels(fooUsers, models, anyOrder: true); - }); + ..run(); + var models = await runQueryWithExponentialBackoff( + query, fooUsers.length); + compareModels(fooUsers, models, anyOrder: true); }, - () { - return store.query(User, partition: partition) + () async { + var query = store.query(User, partition: partition) ..filter('languages IN', ['bar']) ..order('name') - ..run().toList().then((List models) { - compareModels(barUsers, models, anyOrder: true); - }); + ..run(); + var models = await runQueryWithExponentialBackoff( + query, barUsers.length); + compareModels(barUsers, models, anyOrder: true); + }, + + // Filter equals + () async { + var wifeKey = root.append(User, id: usersWithWife.first.wife.id); + var query = store.query(User, partition: partition) + ..filter('wife =', wifeKey) + ..run(); + var models = await runQueryWithExponentialBackoff( + query, usersWithWife.length); + compareModels(usersWithWife, models, anyOrder: true); }, // Simple limit/offset test. - () { - return store.query(User, partition: partition) + () async { + var query = store.query(User, partition: partition) ..order('-name') ..order('nickname') ..offset(3) - ..limit(4) - ..run().toList().then((List models) { - var expectedModels = - usersSortedAndFilteredNameDescNicknameAsc.sublist(3, 7); - compareModels(expectedModels, models); - }); + ..limit(4); + var expectedModels = + usersSortedAndFilteredNameDescNicknameAsc.sublist(3, 7); + var models = await runQueryWithExponentialBackoff( + query, expectedModels.length); + compareModels(expectedModels, models); }, // Expando queries: Filter on normal property. - () { - return store.query(ExpandoPerson, partition: partition) + () async { + var query = store.query(ExpandoPerson, partition: partition) ..filter('name =', expandoPersons.last.name) - ..run().toList().then((List models) { - compareModels([expandoPersons.last], models); - }); + ..run(); + var models = await runQueryWithExponentialBackoff(query, 1); + compareModels([expandoPersons.last], models); }, // Expando queries: Filter on expanded String property - () { - return store.query(ExpandoPerson, partition: partition) + () async { + var query = store.query(ExpandoPerson, partition: partition) ..filter('foo =', expandoPersons.last.foo) - ..run().toList().then((List models) { - compareModels([expandoPersons.last], models); - }); + ..run(); + var models = await runQueryWithExponentialBackoff(query, 1); + compareModels([expandoPersons.last], models); }, // Expando queries: Filter on expanded int property - () { - return store.query(ExpandoPerson, partition: partition) + () async { + var query = store.query(ExpandoPerson, partition: partition) ..filter('bar =', expandoPersons.last.bar) - ..run().toList().then((List models) { - compareModels([expandoPersons.last], models); - }); + ..run(); + var models = await runQueryWithExponentialBackoff(query, 1); + compareModels([expandoPersons.last], models); }, // Expando queries: Filter normal property with different // propertyName (datastore name is 'NN'). - () { - return store.query(ExpandoPerson, partition: partition) + () async { + var query = store.query(ExpandoPerson, partition: partition) ..filter('nickname =', expandoPersons.last.nickname) - ..run().toList().then((List models) { - compareModels([expandoPersons.last], models); - }); + ..run(); + var models = await runQueryWithExponentialBackoff(query, 1); + compareModels([expandoPersons.last], models); }, // Delete results @@ -600,6 +614,27 @@ runTests(db.DatastoreDB store, String namespace) { }); } +Future> runQueryWithExponentialBackoff( + db.Query query, int expectedResults) async { + for (int i = 0; i <= 6; i++) { + if (i > 0) { + // Wait for 0.1s, 0.2s, ..., 12.8s + var duration = new Duration(milliseconds: 100 * (2 << i)); + print("Running query did return less results than expected." + "Using exponential backoff: Sleeping for $duration."); + await sleep(duration); + } + + List models = await query.run().toList(); + if (models.length >= expectedResults) { + return models; + } + } + + throw new Exception( + "Tried running a query with exponential backoff, giving up now."); +} + Future waitUntilEntitiesReady(db.DatastoreDB mdb, List keys, db.Partition partition) { From 7ad652cbf910d4cd9c2efdbb0430ea7fdbaa1f0b Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Thu, 17 Dec 2015 13:08:35 +0100 Subject: [PATCH 091/239] Address analyzer warnings BUG= R=sgjesse@google.com Review URL: https://codereview.chromium.org//1535683002 . --- pkgs/gcloud/CHANGELOG.md | 4 ++++ pkgs/gcloud/lib/src/db/model_db.dart | 4 ++-- pkgs/gcloud/lib/src/db/model_db_impl.dart | 5 ++++- pkgs/gcloud/pubspec.yaml | 2 +- 4 files changed, 11 insertions(+), 4 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 5220911d..de838c9a 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.2.0+10 + +* Address analyzer warnings. + ## 0.2.0+9 * Support value transformation in `db.query().filter()`. diff --git a/pkgs/gcloud/lib/src/db/model_db.dart b/pkgs/gcloud/lib/src/db/model_db.dart index 8408ac53..0c18e39a 100644 --- a/pkgs/gcloud/lib/src/db/model_db.dart +++ b/pkgs/gcloud/lib/src/db/model_db.dart @@ -42,7 +42,7 @@ abstract class ModelDB { String fieldNameToPropertyName(String kind, String fieldName); /** - * Converts [value] according to the [Property] named [fieldName] in [type]. + * Converts [value] according to the [Property] named [fieldName] in [kind]. */ - Object toDatastoreValue(Type type, String fieldName, Object value); + Object toDatastoreValue(String kind, String fieldName, Object value); } diff --git a/pkgs/gcloud/lib/src/db/model_db_impl.dart b/pkgs/gcloud/lib/src/db/model_db_impl.dart index d4b02346..93424227 100644 --- a/pkgs/gcloud/lib/src/db/model_db_impl.dart +++ b/pkgs/gcloud/lib/src/db/model_db_impl.dart @@ -530,7 +530,10 @@ class _ExpandoModelDescription extends _ModelDescription { return fieldName; } - Object encodeField(ModelDBImpl db, String fieldName, Object value) { + Object encodeField(ModelDBImpl db, String fieldName, Object value, + {bool enforceFieldExists: true}) { + // The [enforceFieldExists] argument is intentionally ignored. + Object primitiveValue = super.encodeField(db, fieldName, value, enforceFieldExists: false); // If superclass can't encode field, we return value here (and assume diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index d53151f4..9950fd1d 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.2.0+9 +version: 0.2.0+10 author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud From 9af6b0ff8a60e1161661db0b96110c29220a2218 Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Tue, 8 Mar 2016 18:38:47 +0100 Subject: [PATCH 092/239] Throw a StateError in case no model was registered for a given kind Review URL: https://codereview.chromium.org//1774183002 . --- pkgs/gcloud/CHANGELOG.md | 5 +++++ pkgs/gcloud/lib/src/db/model_db_impl.dart | 7 ++++++- pkgs/gcloud/pubspec.yaml | 2 +- 3 files changed, 12 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index de838c9a..63a786e7 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,8 @@ +## 0.2.0+11 + +* Throw a [StateError] in case a query returned a kind for which there was no + model registered. + ## 0.2.0+10 * Address analyzer warnings. diff --git a/pkgs/gcloud/lib/src/db/model_db_impl.dart b/pkgs/gcloud/lib/src/db/model_db_impl.dart index 93424227..dc26f134 100644 --- a/pkgs/gcloud/lib/src/db/model_db_impl.dart +++ b/pkgs/gcloud/lib/src/db/model_db_impl.dart @@ -61,7 +61,12 @@ class ModelDBImpl implements ModelDB { Key key = namespace.emptyKey; for (var element in datastoreKey.elements) { var type = _type2ModelDesc[_kind2ModelDesc[element.kind]]; - assert (type != null); + if (type == null) { + throw new StateError( + 'Could not find a model associated with kind "${element.kind}". ' + 'Please ensure a model class was annotated with ' + '`@Kind(name: "${element.kind}")`.'); + } key = key.append(type, id: element.id); } return key; diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 9950fd1d..fcb3986b 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.2.0+10 +version: 0.2.0+11 author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud From 3a40438d146c679683b60bb94645d23f0bfbb239 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Gjesse?= Date: Mon, 11 Apr 2016 09:40:45 +0200 Subject: [PATCH 093/239] Update dependencies on googleapis/googlepais_beta packages Closes dart-lang/gcloud#38 TBR=kustermann@google.com BUG= https://github.com/dart-lang/gcloud/issues/38 Review URL: https://codereview.chromium.org//1877763003 . --- pkgs/gcloud/pubspec.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index fcb3986b..b2e30e56 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.2.0+11 +version: 0.2.0+12 author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud @@ -7,8 +7,8 @@ environment: sdk: '>=1.9.1 <2.0.0' dependencies: crypto: '>=0.9.0 <0.10.0' - googleapis: '>=0.2.0 <0.21.0' - googleapis_beta: '>=0.10.0 <0.23.0' + googleapis: '>=0.2.0 <0.25.0' + googleapis_beta: '>=0.10.0 <0.25.0' http: '>=0.11.0 <0.12.0' dev_dependencies: googleapis_auth: '>=0.1.1 <0.3.0' From 60370d147dad78b0eb42e850b1d593f2e55e507f Mon Sep 17 00:00:00 2001 From: Matias Meno Date: Mon, 11 Apr 2016 16:37:07 +0200 Subject: [PATCH 094/239] Removing crypto and upgrading dart sdk and googleapis deps --- pkgs/gcloud/CHANGELOG.md | 6 ++++++ pkgs/gcloud/lib/pubsub.dart | 1 - pkgs/gcloud/lib/src/pubsub_impl.dart | 2 +- pkgs/gcloud/lib/src/storage_impl.dart | 5 ++--- pkgs/gcloud/lib/storage.dart | 2 +- pkgs/gcloud/pubspec.yaml | 9 ++++----- pkgs/gcloud/test/common.dart | 3 +-- 7 files changed, 15 insertions(+), 13 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 63a786e7..5f7f1ec0 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,9 @@ +## 0.2.0+12 + +* Remove crypto dependency and upgrade dart dependency to >=1.13 since + this dart version provides the Base64 codec. + + ## 0.2.0+11 * Throw a [StateError] in case a query returned a kind for which there was no diff --git a/pkgs/gcloud/lib/pubsub.dart b/pkgs/gcloud/lib/pubsub.dart index 44bf21b1..7e251b49 100644 --- a/pkgs/gcloud/lib/pubsub.dart +++ b/pkgs/gcloud/lib/pubsub.dart @@ -7,7 +7,6 @@ library gcloud.pubsub; import 'dart:async'; import 'dart:collection'; import 'dart:convert'; -import 'package:crypto/crypto.dart'; import 'package:http/http.dart' as http; import 'package:googleapis_beta/pubsub/v1beta2.dart' as pubsub; diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart index 6a4fc905..5f95f884 100644 --- a/pkgs/gcloud/lib/src/pubsub_impl.dart +++ b/pkgs/gcloud/lib/src/pubsub_impl.dart @@ -268,7 +268,7 @@ class _PushMessage implements Message { _PushMessage(this._base64Message, this.attributes); - List get asBytes => CryptoUtils.base64StringToBytes(_base64Message); + List get asBytes => BASE64.decode(_base64Message); String get asString => UTF8.decode(asBytes); } diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index 21ac9707..0be49fe9 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -374,11 +374,10 @@ class _ObjectInfoImpl implements ObjectInfo { String get etag => _object.etag; - List get md5Hash => - crypto.CryptoUtils.base64StringToBytes(_object.md5Hash); + List get md5Hash => BASE64.decode(_object.md5Hash); int get crc32CChecksum { - var list = crypto.CryptoUtils.base64StringToBytes(_object.crc32c); + var list = BASE64.decode(_object.crc32c); return (list[3] << 24) | (list[2] << 16) | (list[1] << 8) | list[0]; } diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index 59932a92..c525b9f6 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -49,11 +49,11 @@ library gcloud.storage; import 'dart:async'; +import 'dart:convert'; import 'dart:collection' show UnmodifiableListView, UnmodifiableMapView; import 'package:http/http.dart' as http; -import 'package:crypto/crypto.dart' as crypto; import 'package:googleapis/storage/v1.dart' as storage_api; import 'service_scope.dart' as ss; diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index b2e30e56..47fff771 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -4,15 +4,14 @@ author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud environment: - sdk: '>=1.9.1 <2.0.0' + sdk: '>=1.13.0 <2.0.0' dependencies: - crypto: '>=0.9.0 <0.10.0' - googleapis: '>=0.2.0 <0.25.0' - googleapis_beta: '>=0.10.0 <0.25.0' + googleapis: '>=0.2.0 <0.26.0' + googleapis_beta: '>=0.10.0 <0.26.0' http: '>=0.11.0 <0.12.0' dev_dependencies: googleapis_auth: '>=0.1.1 <0.3.0' - http_parser: '>=0.0.2+5 <2.0.0' + http_parser: '>=2.0.0 <3.0.0' mime: '>=0.9.0+3 <0.10.0' unittest: '>=0.11.0 <0.12.0' transformers: diff --git a/pkgs/gcloud/test/common.dart b/pkgs/gcloud/test/common.dart index 689b72d8..d9bcb277 100644 --- a/pkgs/gcloud/test/common.dart +++ b/pkgs/gcloud/test/common.dart @@ -5,7 +5,6 @@ import 'dart:async'; import 'dart:convert'; -import 'package:crypto/crypto.dart' as crypto; import 'package:http/http.dart' as http; import 'package:http/testing.dart' as http_testing; import 'package:http_parser/http_parser.dart' as http_parser; @@ -177,7 +176,7 @@ class MockClient extends http.BaseClient { mimeMultipart .transform(ASCII.decoder) .fold('', (p, e) => '$p$e') - .then(crypto.CryptoUtils.base64StringToBytes) + .then(BASE64.decode) .then((bytes) { completer.complete( new NormalMediaUpload(json, bytes, contentType)); From d290d3de968f644236e3ef4465301c4736475fca Mon Sep 17 00:00:00 2001 From: Matias Meno Date: Mon, 11 Apr 2016 17:13:48 +0200 Subject: [PATCH 095/239] Upgrade googleapis_auth dependency --- pkgs/gcloud/pubspec.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 47fff771..fa0f8524 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -10,7 +10,7 @@ dependencies: googleapis_beta: '>=0.10.0 <0.26.0' http: '>=0.11.0 <0.12.0' dev_dependencies: - googleapis_auth: '>=0.1.1 <0.3.0' + googleapis_auth: '>=0.3.0 <0.4.0' http_parser: '>=2.0.0 <3.0.0' mime: '>=0.9.0+3 <0.10.0' unittest: '>=0.11.0 <0.12.0' From 28faa9864a29938d868bf8de026112d5805c6028 Mon Sep 17 00:00:00 2001 From: Matias Meno Date: Mon, 11 Apr 2016 17:18:26 +0200 Subject: [PATCH 096/239] Remove crypto dependency in pubsub test --- pkgs/gcloud/test/pubsub/pubsub_test.dart | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pkgs/gcloud/test/pubsub/pubsub_test.dart b/pkgs/gcloud/test/pubsub/pubsub_test.dart index e12a6415..9953231a 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_test.dart @@ -5,7 +5,6 @@ import 'dart:async'; import 'dart:convert'; -import 'package:crypto/crypto.dart' as crypto; import 'package:http/http.dart' as http; import 'package:unittest/unittest.dart'; @@ -812,7 +811,7 @@ main() { var absoluteName = 'projects/$PROJECT/topics/test-topic'; var message = 'Hello, world!'; var messageBytes = UTF8.encode(message); - var messageBase64 = crypto.CryptoUtils.bytesToBase64(messageBytes); + var messageBase64 = BASE64.encode(messageBytes); var attributes = {'a': '1', 'b': 'text'}; registerLookup(mock) { From 9c4e4fd56215839def4d01c9cb58fa6a7c0212e5 Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Wed, 8 Jun 2016 19:50:52 +0200 Subject: [PATCH 097/239] Roll versions of googleapis/googleapis_beta, fix googleapis_auth dependency Review URL: https://codereview.chromium.org//2045843004 . --- pkgs/gcloud/CHANGELOG.md | 1 - pkgs/gcloud/pubspec.yaml | 6 +++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 5f7f1ec0..aca13f81 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -3,7 +3,6 @@ * Remove crypto dependency and upgrade dart dependency to >=1.13 since this dart version provides the Base64 codec. - ## 0.2.0+11 * Throw a [StateError] in case a query returned a kind for which there was no diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index fa0f8524..fe2a02e6 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -6,11 +6,11 @@ homepage: https://github.com/dart-lang/gcloud environment: sdk: '>=1.13.0 <2.0.0' dependencies: - googleapis: '>=0.2.0 <0.26.0' - googleapis_beta: '>=0.10.0 <0.26.0' + googleapis: '>=0.2.0 <0.29.0' + googleapis_beta: '>=0.10.0 <0.28.0' http: '>=0.11.0 <0.12.0' dev_dependencies: - googleapis_auth: '>=0.3.0 <0.4.0' + googleapis_auth: '>=0.2.3 <0.3.0' http_parser: '>=2.0.0 <3.0.0' mime: '>=0.9.0+3 <0.10.0' unittest: '>=0.11.0 <0.12.0' From 0dfa9f48862645101f1c9678f4420fd2aa255cdc Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Wed, 8 Jun 2016 19:50:52 +0200 Subject: [PATCH 098/239] Update pubspec.yaml/changelog. --- pkgs/gcloud/CHANGELOG.md | 2 +- pkgs/gcloud/pubspec.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index aca13f81..eb561f15 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,4 +1,4 @@ -## 0.2.0+12 +## 0.2.0+13 * Remove crypto dependency and upgrade dart dependency to >=1.13 since this dart version provides the Base64 codec. diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index fe2a02e6..e5a79077 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.2.0+12 +version: 0.2.0+13 author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud From 8135b6316c3b24cc980d7a07c1fce152027cbe36 Mon Sep 17 00:00:00 2001 From: William Hesse Date: Thu, 18 Aug 2016 16:24:38 +0200 Subject: [PATCH 099/239] Implement missing interface member in mock ModelDB This fixes an analyzer warning BUG= R=kustermann@google.com Review URL: https://codereview.chromium.org//2261453002 . --- pkgs/gcloud/CHANGELOG.md | 4 ++++ pkgs/gcloud/pubspec.yaml | 2 +- pkgs/gcloud/test/db/properties_test.dart | 1 + 3 files changed, 6 insertions(+), 1 deletion(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index eb561f15..70e57675 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.2.0+14 + +* Fix analyzer warning. + ## 0.2.0+13 * Remove crypto dependency and upgrade dart dependency to >=1.13 since diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index e5a79077..29df7fb3 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.2.0+13 +version: 0.2.0+14 author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud diff --git a/pkgs/gcloud/test/db/properties_test.dart b/pkgs/gcloud/test/db/properties_test.dart index d0ae074f..eed59e9e 100644 --- a/pkgs/gcloud/test/db/properties_test.dart +++ b/pkgs/gcloud/test/db/properties_test.dart @@ -238,4 +238,5 @@ class ModelDBMock implements ModelDB { datastore.Entity toDatastoreEntity(Model model) => null; String fieldNameToPropertyName(String kind, String fieldName) => null; String kindName(Type type) => null; + Object toDatastoreValue(String kind, String fieldName, Object value) => null; } From 31afc0c8f18a6921b2c15803e2bceb5c75c7d9a7 Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Mon, 29 Aug 2016 18:24:02 +0200 Subject: [PATCH 100/239] Upgrade to use stable `package:googleapis/v1.dart` of datastore BUG=https://github.com/dart-lang/gcloud/issues/41 R=sgjesse@google.com, whesse@google.com Review URL: https://codereview.chromium.org//2285803002 . --- pkgs/gcloud/CHANGELOG.md | 7 + pkgs/gcloud/lib/db.dart | 2 +- pkgs/gcloud/lib/src/datastore_impl.dart | 227 ++++++------------ pkgs/gcloud/pubspec.yaml | 8 +- .../datastore/e2e/datastore_test_impl.dart | 4 +- pkgs/gcloud/test/datastore/e2e/utils.dart | 1 + pkgs/gcloud/test/db/e2e/db_test_impl.dart | 2 +- .../test/db/e2e/metamodel_test_impl.dart | 2 +- pkgs/gcloud/test/db_all_e2e_test.dart | 2 +- 9 files changed, 95 insertions(+), 160 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 70e57675..573451cb 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,10 @@ +## 0.3.0 + +* Upgrade to use stable `package:googleapis/datastore/v1.dart`. + +* The internal [DatastoreImpl] class takes now a project name without the `s~` + prefix. + ## 0.2.0+14 * Fix analyzer warning. diff --git a/pkgs/gcloud/lib/db.dart b/pkgs/gcloud/lib/db.dart index ed45625c..50540238 100644 --- a/pkgs/gcloud/lib/db.dart +++ b/pkgs/gcloud/lib/db.dart @@ -8,7 +8,7 @@ import 'dart:async'; import 'dart:collection'; import 'dart:mirrors' as mirrors; -import 'common.dart' show Page, StreamFromPages; +import 'common.dart' show StreamFromPages; import 'service_scope.dart' as ss; import 'datastore.dart' as datastore; diff --git a/pkgs/gcloud/lib/src/datastore_impl.dart b/pkgs/gcloud/lib/src/datastore_impl.dart index 9229a36f..edb7e7a5 100644 --- a/pkgs/gcloud/lib/src/datastore_impl.dart +++ b/pkgs/gcloud/lib/src/datastore_impl.dart @@ -10,7 +10,7 @@ import 'package:http/http.dart' as http; import '../datastore.dart' as datastore; import '../common.dart' show Page; -import 'package:googleapis_beta/datastore/v1beta2.dart' as api; +import 'package:googleapis/datastore/v1.dart' as api; class TransactionImpl implements datastore.Transaction { final String data; @@ -20,24 +20,26 @@ class TransactionImpl implements datastore.Transaction { class DatastoreImpl implements datastore.Datastore { static const List SCOPES = const [ api.DatastoreApi.DatastoreScope, - api.DatastoreApi.UserinfoEmailScope, + api.DatastoreApi.CloudPlatformScope, ]; final api.DatastoreApi _api; final String _project; - DatastoreImpl(http.Client client, this._project) - : _api = new api.DatastoreApi(client); + /// The [project] parameter is the name of the cloud project (it should not + /// start with a `s~`). + DatastoreImpl(http.Client client, String project) + : _api = new api.DatastoreApi(client), _project = project; api.Key _convertDatastore2ApiKey(datastore.Key key, {bool enforceId: true}) { var apiKey = new api.Key(); apiKey.partitionId = new api.PartitionId() - ..datasetId = _project - ..namespace = key.partition.namespace; + ..projectId = _project + ..namespaceId = key.partition.namespace; apiKey.path = key.elements.map((datastore.KeyElement element) { - var part = new api.KeyPathElement(); + var part = new api.PathElement(); part.kind = element.kind; if (element.id is int) { part.id = '${element.id}'; @@ -55,7 +57,7 @@ class DatastoreImpl implements datastore.Datastore { } static datastore.Key _convertApi2DatastoreKey(api.Key key) { - var elements = key.path.map((api.KeyPathElement element) { + var elements = key.path.map((api.PathElement element) { if (element.id != null) { return new datastore.KeyElement(element.kind, int.parse(element.id)); } else if (element.name != null) { @@ -68,7 +70,7 @@ class DatastoreImpl implements datastore.Datastore { var partition; if (key.partitionId != null) { - partition = new datastore.Partition(key.partitionId.namespace); + partition = new datastore.Partition(key.partitionId.namespaceId); // TODO: assert projectId. } return new datastore.Key(elements, partition: partition); @@ -80,8 +82,8 @@ class DatastoreImpl implements datastore.Datastore { // FIXME(Issue #2): Is this comparison working correctly? if (a.partitionId != null) { if (b.partitionId == null) return false; - if (a.partitionId.datasetId != b.partitionId.datasetId) return false; - if (a.partitionId.namespace != b.partitionId.namespace) return false; + if (a.partitionId.projectId != b.partitionId.projectId) return false; + if (a.partitionId.namespaceId != b.partitionId.namespaceId) return false; } else { if (b.partitionId != null) return false; } @@ -94,37 +96,13 @@ class DatastoreImpl implements datastore.Datastore { return true; } - static _convertApi2DatastorePropertyValue(api.Value value) { - if (value.booleanValue != null) - return value.booleanValue; - else if (value.integerValue != null) - return int.parse(value.integerValue); - else if (value.doubleValue != null) - return value.doubleValue; - else if (value.stringValue != null) - return value.stringValue; - else if (value.dateTimeValue != null) - return value.dateTimeValue; - else if (value.blobValue != null) - return new datastore.BlobValue(value.blobValueAsBytes); - else if (value.keyValue != null) - return _convertApi2DatastoreKey(value.keyValue); - else if (value.listValue != null) - // FIXME(Issue #3): Consistently handle exceptions. - throw new Exception('Cannot have lists inside lists.'); - else if (value.blobKeyValue != null) - throw new UnsupportedError('Blob keys are not supported.'); - else if (value.entityValue != null) - throw new UnsupportedError('Entity values are not supported.'); - return null; - } - api.Value _convertDatastore2ApiPropertyValue( value, bool indexed, {bool lists: true}) { var apiValue = new api.Value() - ..indexed = indexed; + ..excludeFromIndexes = !indexed; if (value == null) { - return apiValue; + return apiValue + ..nullValue = "NULL_VALUE"; } else if (value is bool) { return apiValue ..booleanValue = value; @@ -139,7 +117,7 @@ class DatastoreImpl implements datastore.Datastore { ..stringValue = value; } else if (value is DateTime) { return apiValue - ..dateTimeValue = value; + ..timestampValue = value.toIso8601String(); } else if (value is datastore.BlobValue) { return apiValue ..blobValueAsBytes = value.bytes; @@ -155,106 +133,49 @@ class DatastoreImpl implements datastore.Datastore { convertItem(i) => _convertDatastore2ApiPropertyValue(i, indexed, lists: false); - return new api.Value() - ..listValue = value.map(convertItem).toList(); + return new api.Value()..arrayValue = ( + new api.ArrayValue()..values = value.map(convertItem).toList()); } else { throw new UnsupportedError( 'Types ${value.runtimeType} cannot be used for serializing.'); } } - static _convertApi2DatastoreProperty(api.Property property) { - if (property.booleanValue != null) - return property.booleanValue; - else if (property.integerValue != null) - return int.parse(property.integerValue); - else if (property.doubleValue != null) - return property.doubleValue; - else if (property.stringValue != null) - return property.stringValue; - else if (property.dateTimeValue != null) - return property.dateTimeValue; - else if (property.blobValue != null) - return new datastore.BlobValue(property.blobValueAsBytes); - else if (property.keyValue != null) - return _convertApi2DatastoreKey(property.keyValue); - else if (property.listValue != null) - return - property.listValue.map(_convertApi2DatastorePropertyValue).toList(); - else if (property.blobKeyValue != null) - throw new UnsupportedError('Blob keys are not supported.'); - else if (property.entityValue != null) + static dynamic _convertApi2DatastoreProperty(api.Value value) { + if (value.booleanValue != null) + return value.booleanValue; + else if (value.integerValue != null) + return int.parse(value.integerValue); + else if (value.doubleValue != null) + return value.doubleValue; + else if (value.stringValue != null) + return value.stringValue; + else if (value.timestampValue != null) + return DateTime.parse(value.timestampValue); + else if (value.blobValue != null) + return new datastore.BlobValue(value.blobValueAsBytes); + else if (value.keyValue != null) + return _convertApi2DatastoreKey(value.keyValue); + else if (value.arrayValue != null && value.arrayValue.values != null) + return value + .arrayValue.values.map(_convertApi2DatastoreProperty).toList(); + else if (value.entityValue != null) throw new UnsupportedError('Entity values are not supported.'); + else if (value.geoPointValue != null) + throw new UnsupportedError('GeoPoint values are not supported.'); return null; } - api.Property _convertDatastore2ApiProperty( - value, bool indexed, {bool lists: true}) { - var apiProperty = new api.Property() - ..indexed = indexed; - if (value == null) { - return null; - } else if (value is bool) { - return apiProperty - ..booleanValue = value; - } else if (value is int) { - return apiProperty - ..integerValue = '$value'; - } else if (value is double) { - return apiProperty - ..doubleValue = value; - } else if (value is String) { - return apiProperty - ..stringValue = value; - } else if (value is DateTime) { - return apiProperty - ..dateTimeValue = value; - } else if (value is datastore.BlobValue) { - return apiProperty - ..blobValueAsBytes = value.bytes; - } else if (value is datastore.Key) { - return apiProperty - ..keyValue = _convertDatastore2ApiKey(value, enforceId: false); - } else if (value is List) { - if (!lists) { - // FIXME(Issue #3): Consistently handle exceptions. - throw new Exception('List values are not allowed.'); - } - convertItem(i) - => _convertDatastore2ApiPropertyValue(i, indexed, lists: false); - return new api.Property()..listValue = value.map(convertItem).toList(); - } else { - throw new UnsupportedError( - 'Types ${value.runtimeType} cannot be used for serializing.'); - } - } - static datastore.Entity _convertApi2DatastoreEntity(api.Entity entity) { var unindexedProperties = new Set(); var properties = {}; if (entity.properties != null) { - entity.properties.forEach((String name, api.Property property) { - properties[name] = _convertApi2DatastoreProperty(property); - if (property.indexed == false) { - // TODO(Issue #$4): Should we support mixed indexed/non-indexed list - // values? - if (property.listValue != null) { - if (property.listValue.length > 0) { - var firstIndexed = property.listValue.first.indexed; - for (int i = 1; i < property.listValue.length; i++) { - if (property.listValue[i].indexed != firstIndexed) { - throw new Exception('Some list entries are indexed and some ' - 'are not. This is currently not supported.'); - } - } - if (firstIndexed == false) { - unindexedProperties.add(name); - } - } - } else { - unindexedProperties.add(name); - } + entity.properties.forEach((String name, api.Value value) { + properties[name] = _convertApi2DatastoreProperty(value); + if (value.excludeFromIndexes != null && + value.excludeFromIndexes) { + unindexedProperties.add(name); } }); } @@ -303,7 +224,7 @@ class DatastoreImpl implements datastore.Datastore { if (operator == null) { throw new ArgumentError('Unknown filter relation: ${filter.relation}.'); } - pf.operator = operator; + pf.op = operator; pf.property = new api.PropertyReference()..name = filter.name; // FIXME(Issue #5): Is this OK? @@ -322,7 +243,7 @@ class DatastoreImpl implements datastore.Datastore { api.Filter _convertDatastoreAncestorKey2ApiFilter(datastore.Key key) { var pf = new api.PropertyFilter(); - pf.operator = 'HAS_ANCESTOR'; + pf.op = 'HAS_ANCESTOR'; pf.property = new api.PropertyReference()..name = '__key__'; pf.value = new api.Value() ..keyValue = _convertDatastore2ApiKey(key, enforceId: true); @@ -347,7 +268,7 @@ class DatastoreImpl implements datastore.Datastore { compFilter.filters.add(filter); } } - compFilter.operator = 'AND'; + compFilter.op = 'AND'; return new api.Filter()..compositeFilter = compFilter; } @@ -389,7 +310,7 @@ class DatastoreImpl implements datastore.Datastore { request..keys = keys.map((key) { return _convertDatastore2ApiKey(key, enforceId: false); }).toList(); - return _api.datasets.allocateIds(request, _project).then((response) { + return _api.projects.allocateIds(request, _project).then((response) { return response.keys.map(_convertApi2DatastoreKey).toList(); }, onError: _handleError); } @@ -397,9 +318,7 @@ class DatastoreImpl implements datastore.Datastore { Future beginTransaction( {bool crossEntityGroup: false}) { var request = new api.BeginTransactionRequest(); - // TODO: Should this be made configurable? - request.isolationLevel = 'SERIALIZABLE'; - return _api.datasets.beginTransaction(request, _project).then((result) { + return _api.projects.beginTransaction(request, _project).then((result) { return new TransactionImpl(result.transaction); }, onError: _handleError); } @@ -417,34 +336,42 @@ class DatastoreImpl implements datastore.Datastore { request.mode = 'NON_TRANSACTIONAL'; } - request.mutation = new api.Mutation(); + var mutations = request.mutations = []; if (inserts != null) { - request.mutation.upsert = new List(inserts.length); for (int i = 0; i < inserts.length; i++) { - request.mutation.upsert[i] = _convertDatastore2ApiEntity(inserts[i]); + mutations.add( + new api.Mutation()..upsert = + _convertDatastore2ApiEntity(inserts[i], enforceId: true)); } } + int autoIdStartIndex = -1; if (autoIdInserts != null) { - request.mutation.insertAutoId = new List(autoIdInserts.length); + autoIdStartIndex = mutations.length; for (int i = 0; i < autoIdInserts.length; i++) { - request.mutation.insertAutoId[i] = - _convertDatastore2ApiEntity(autoIdInserts[i], enforceId: false); + mutations.add( + new api.Mutation()..insert = + _convertDatastore2ApiEntity(autoIdInserts[i], enforceId: false)); } } if (deletes != null) { - request.mutation.delete = new List(deletes.length); for (int i = 0; i < deletes.length; i++) { - request.mutation.delete[i] = - _convertDatastore2ApiKey(deletes[i], enforceId: true); + mutations.add( + new api.Mutation()..delete = + _convertDatastore2ApiKey(deletes[i], enforceId: true)); } } - return _api.datasets.commit(request, _project).then((result) { + return _api.projects.commit(request, _project).then((result) { var keys; if (autoIdInserts != null && autoIdInserts.length > 0) { - keys = result - .mutationResult - .insertAutoIdKeys - .map(_convertApi2DatastoreKey).toList(); + List mutationResults = result.mutationResults; + assert(autoIdStartIndex != -1); + assert(mutationResults.length >= + (autoIdStartIndex + autoIdInserts.length)); + keys = mutationResults + .skip(autoIdStartIndex) + .take(autoIdInserts.length) + .map((api.MutationResult r) => _convertApi2DatastoreKey(r.key)) + .toList(); } return new datastore.CommitResult(keys); }, onError: _handleError); @@ -462,7 +389,7 @@ class DatastoreImpl implements datastore.Datastore { request.readOptions = new api.ReadOptions(); request.readOptions.transaction = (transaction as TransactionImpl).data; } - return _api.datasets.lookup(request, _project).then((response) { + return _api.projects.lookup(request, _project).then((response) { if (response.deferred != null && response.deferred.length > 0) { throw new datastore.DatastoreError( 'Could not successfully look up all keys due to resource ' @@ -534,7 +461,7 @@ class DatastoreImpl implements datastore.Datastore { ..offset = query.offset; if (query.kind != null) { - apiQuery.kinds = [new api.KindExpression()..name = query.kind]; + apiQuery.kind = [new api.KindExpression()..name = query.kind]; } var request = new api.RunQueryRequest(); @@ -546,7 +473,7 @@ class DatastoreImpl implements datastore.Datastore { } if (partition != null) { request.partitionId = new api.PartitionId() - ..namespace = partition.namespace; + ..namespaceId = partition.namespace; } return QueryPageImpl.runQuery(_api, _project, request, query.limit) @@ -557,7 +484,7 @@ class DatastoreImpl implements datastore.Datastore { // TODO: Handle [transaction] var request = new api.RollbackRequest() ..transaction = (transaction as TransactionImpl).data; - return _api.datasets.rollback(request, _project).catchError(_handleError); + return _api.projects.rollback(request, _project).catchError(_handleError); } } @@ -592,7 +519,7 @@ class QueryPageImpl implements Page { request.query.limit = batchLimit; - return api.datasets.runQuery(request, project).then((response) { + return api.projects.runQuery(request, project).then((response) { var returnedEntities = const []; var batch = response.batch; diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 29df7fb3..83cd2bcd 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,17 +1,17 @@ name: gcloud -version: 0.2.0+14 +version: 0.3.0 author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud environment: sdk: '>=1.13.0 <2.0.0' dependencies: - googleapis: '>=0.2.0 <0.29.0' - googleapis_beta: '>=0.10.0 <0.28.0' + googleapis: '>=0.2.0 <0.32.0' + googleapis_beta: '>=0.10.0 <0.31.0' http: '>=0.11.0 <0.12.0' dev_dependencies: googleapis_auth: '>=0.2.3 <0.3.0' - http_parser: '>=2.0.0 <3.0.0' + http_parser: '>=2.0.0 <4.0.0' mime: '>=0.9.0+3 <0.10.0' unittest: '>=0.11.0 <0.12.0' transformers: diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index 1e54f025..b917d1d5 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -244,8 +244,8 @@ runTests(Datastore datastore, String namespace) { }); test('negative_insert__incomplete_path', () { - expect(datastore.commit(inserts: unnamedEntities1), - throwsA(isApplicationError)); + expect(() => datastore.commit(inserts: unnamedEntities1), + throwsA(isApplicationError)); }); test('negative_insert_transactional_xg', () { diff --git a/pkgs/gcloud/test/datastore/e2e/utils.dart b/pkgs/gcloud/test/datastore/e2e/utils.dart index cf5ca730..39a787c3 100644 --- a/pkgs/gcloud/test/datastore/e2e/utils.dart +++ b/pkgs/gcloud/test/datastore/e2e/utils.dart @@ -70,6 +70,7 @@ List buildEntityWithAllProperties( Map buildProperties(int i) { return { + 'nullValue' : null, 'boolProperty' : true, 'intProperty' : 42, 'doubleProperty' : 4.2, diff --git a/pkgs/gcloud/test/db/e2e/db_test_impl.dart b/pkgs/gcloud/test/db/e2e/db_test_impl.dart index ce84c26a..7ffd1123 100644 --- a/pkgs/gcloud/test/db/e2e/db_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/db_test_impl.dart @@ -443,7 +443,7 @@ runTests(db.DatastoreDB store, String namespace) { var barUsers = users.where( (User u) => u.languages.contains('bar')).toList(); var usersWithWife = users.where( - (User u) => u.wife == root.append(User, id: 42 + 3)); + (User u) => u.wife == root.append(User, id: 42 + 3)).toList(); var allInserts = [] ..addAll(users) diff --git a/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart index 43e50df5..40e8f49a 100644 --- a/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart @@ -9,7 +9,7 @@ import 'dart:async'; import 'package:unittest/unittest.dart'; import 'package:gcloud/datastore.dart'; -import 'package:gcloud/datastore.dart' show Key, Query, Partition; +import 'package:gcloud/datastore.dart' show Key, Partition; import 'package:gcloud/db.dart' as db; import 'package:gcloud/db/metamodel.dart'; diff --git a/pkgs/gcloud/test/db_all_e2e_test.dart b/pkgs/gcloud/test/db_all_e2e_test.dart index 4fe737c2..434f2a7d 100644 --- a/pkgs/gcloud/test/db_all_e2e_test.dart +++ b/pkgs/gcloud/test/db_all_e2e_test.dart @@ -25,7 +25,7 @@ main() { String namespace = '${Platform.operatingSystem}${now}'; withAuthClient(scopes, (String project, httpClient) { - var datastore = new datastore_impl.DatastoreImpl(httpClient, 's~$project'); + var datastore = new datastore_impl.DatastoreImpl(httpClient, project); var datastoreDB = new db.DatastoreDB(datastore); return runE2EUnittest(() { From 3346db27ea841eb63c52f54b5d1e992f9a0f4d00 Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Mon, 6 Mar 2017 15:11:02 +0100 Subject: [PATCH 101/239] Remove FilterRelation.In / "property IN" queries, upgrade googleapis{,_beta} dependencies BUG=https://github.com/dart-lang/gcloud/issues/5 R=vegorov@google.com Review-Url: https://codereview.chromium.org//2731933004 . --- pkgs/gcloud/CHANGELOG.md | 11 +++++++ pkgs/gcloud/lib/datastore.dart | 1 - pkgs/gcloud/lib/src/datastore_impl.dart | 20 ++---------- pkgs/gcloud/lib/src/db/annotations.dart | 31 ++++++++++++++++--- pkgs/gcloud/lib/src/db/db.dart | 17 ++-------- pkgs/gcloud/lib/src/db/model_db.dart | 3 +- pkgs/gcloud/lib/src/db/model_db_impl.dart | 16 ++++++---- pkgs/gcloud/pubspec.yaml | 6 ++-- pkgs/gcloud/test/common_e2e.dart | 2 +- .../datastore/e2e/datastore_test_impl.dart | 2 +- pkgs/gcloud/test/db/e2e/db_test_impl.dart | 5 ++- pkgs/gcloud/test/db/properties_test.dart | 5 ++- 12 files changed, 64 insertions(+), 55 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 573451cb..50b1d281 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,14 @@ +## 0.4.0 + +* Remove support for `FilterRelation.In` and "propertyname IN" for queries: + This is not supported by the newer APIs and was originally part of fat-client + libraries which performed multiple queries for each iten in the list. + +* Adds optional `forComparision` named argument to `Property.encodeValue` which + will be set to `true` when encoding a value for comparison in queries. + +* Upgrade to newer versions of `package:googleapis` and `package:googleapis_beta` + ## 0.3.0 * Upgrade to use stable `package:googleapis/datastore/v1.dart`. diff --git a/pkgs/gcloud/lib/datastore.dart b/pkgs/gcloud/lib/datastore.dart index c904cc25..231b93a9 100644 --- a/pkgs/gcloud/lib/datastore.dart +++ b/pkgs/gcloud/lib/datastore.dart @@ -240,7 +240,6 @@ class FilterRelation { static const FilterRelation GreatherThanOrEqual = const FilterRelation._('>='); static const FilterRelation Equal = const FilterRelation._('=='); - static const FilterRelation In = const FilterRelation._('IN'); final String name; diff --git a/pkgs/gcloud/lib/src/datastore_impl.dart b/pkgs/gcloud/lib/src/datastore_impl.dart index edb7e7a5..d2045fd8 100644 --- a/pkgs/gcloud/lib/src/datastore_impl.dart +++ b/pkgs/gcloud/lib/src/datastore_impl.dart @@ -210,34 +210,18 @@ class DatastoreImpl implements datastore.Datastore { datastore.FilterRelation.Equal: 'EQUAL', datastore.FilterRelation.GreatherThan: 'GREATER_THAN', datastore.FilterRelation.GreatherThanOrEqual: 'GREATER_THAN_OR_EQUAL', - // TODO(Issue #5): IN operator not supported currently. }; api.Filter _convertDatastore2ApiFilter(datastore.Filter filter) { var pf = new api.PropertyFilter(); var operator = relationMapping[filter.relation]; - // FIXME(Issue #5): Is this OK? - if (filter.relation == datastore.FilterRelation.In) { - operator = 'EQUAL'; - } - if (operator == null) { throw new ArgumentError('Unknown filter relation: ${filter.relation}.'); } pf.op = operator; pf.property = new api.PropertyReference()..name = filter.name; - - // FIXME(Issue #5): Is this OK? - var value = filter.value; - if (filter.relation == datastore.FilterRelation.In) { - if (value is List && value.length == 1) { - value = value.first; - } else { - throw new ArgumentError('List values not supported (was: $value).'); - } - } - - pf.value = _convertDatastore2ApiPropertyValue(value, true, lists: false); + pf.value = _convertDatastore2ApiPropertyValue( + filter.value, true, lists: false); return new api.Filter()..propertyFilter = pf; } diff --git a/pkgs/gcloud/lib/src/db/annotations.dart b/pkgs/gcloud/lib/src/db/annotations.dart index e158ad53..276f87f8 100644 --- a/pkgs/gcloud/lib/src/db/annotations.dart +++ b/pkgs/gcloud/lib/src/db/annotations.dart @@ -88,7 +88,7 @@ abstract class Property { return true; } - Object encodeValue(ModelDB db, Object value); + Object encodeValue(ModelDB db, Object value, {bool forComparison: false}); Object decodePrimitiveValue(ModelDB db, Object value); } @@ -100,7 +100,7 @@ abstract class PrimitiveProperty extends Property { {String propertyName, bool required: false, bool indexed: true}) : super(propertyName: propertyName, required: required, indexed: indexed); - Object encodeValue(ModelDB db, Object value) => value; + Object encodeValue(ModelDB db, Object value, {bool forComparison: false}) => value; Object decodePrimitiveValue(ModelDB db, Object value) => value; } @@ -169,7 +169,7 @@ class ModelKeyProperty extends PrimitiveProperty { bool validate(ModelDB db, Object value) => super.validate(db, value) && (value == null || value is Key); - Object encodeValue(ModelDB db, Object value) { + Object encodeValue(ModelDB db, Object value, {bool forComparison: false}) { if (value == null) return null; return db.toDatastoreKey(value); } @@ -196,7 +196,7 @@ class BlobProperty extends PrimitiveProperty { bool validate(ModelDB db, Object value) => super.validate(db, value) && (value == null || value is List); - Object encodeValue(ModelDB db, Object value) { + Object encodeValue(ModelDB db, Object value, {bool forComparison: false}) { if (value == null) return null; return new datastore.BlobValue(value); } @@ -254,7 +254,28 @@ class ListProperty extends Property { return true; } - Object encodeValue(ModelDB db, Object value) { + Object encodeValue(ModelDB db, Object value, {bool forComparison: false}) { + if (forComparison) { + // If we have comparison of list properties (i.e. repeated property names) + // the comparison object must not be a list, but the value itself. + // i.e. + // + // class Article { + // ... + // @ListProperty(StringProperty()) + // List tags; + // ... + // } + // + // should be queried via + // + // await db.query(Article, 'tags=', "Dart").toList(); + // + // So the [value] for the comparison is of type `String` and not + // `List`! + return subProperty.encodeValue(db, value, forComparison: true); + } + if (value == null) return null; List list = value; if (list.length == 0) return null; diff --git a/pkgs/gcloud/lib/src/db/db.dart b/pkgs/gcloud/lib/src/db/db.dart index d1a79783..38b88bfb 100644 --- a/pkgs/gcloud/lib/src/db/db.dart +++ b/pkgs/gcloud/lib/src/db/db.dart @@ -117,7 +117,6 @@ class Query { '>': datastore.FilterRelation.GreatherThan, '>=': datastore.FilterRelation.GreatherThanOrEqual, '=': datastore.FilterRelation.Equal, - 'IN': datastore.FilterRelation.In, }; final DatastoreDB _db; @@ -151,7 +150,6 @@ class Query { * * '>' (greater than) * * '>=' (greater than or equal) * * '=' (equal) - * * 'IN' (in - `comparisonObject` must be a list) * * [comparisonObject] is the object for comparison. */ @@ -170,19 +168,8 @@ class Query { // TODO: We should remove the condition in a major version update of // `package:gcloud`. if (comparisonObject is! datastore.Key) { - var encoded = _db.modelDB.toDatastoreValue(_kind, name, comparisonObject); - - // We encode Lists as repeated properties normally, and the encoding of - // `['abc']` will just be `'abc'` (see [ListProperty]). - // But for IN filters, we need to treat them as lists. - if (comparison == 'IN' && - comparisonObject is List && - comparisonObject.length == 1 && - encoded is! List) { - encoded = [encoded]; - } - - comparisonObject = encoded; + comparisonObject = _db.modelDB.toDatastoreValue(_kind, name, + comparisonObject, forComparison: true); } _filters.add(new datastore.Filter( _relationMapping[comparison], propertyName, comparisonObject)); diff --git a/pkgs/gcloud/lib/src/db/model_db.dart b/pkgs/gcloud/lib/src/db/model_db.dart index 0c18e39a..efda613d 100644 --- a/pkgs/gcloud/lib/src/db/model_db.dart +++ b/pkgs/gcloud/lib/src/db/model_db.dart @@ -44,5 +44,6 @@ abstract class ModelDB { /** * Converts [value] according to the [Property] named [fieldName] in [kind]. */ - Object toDatastoreValue(String kind, String fieldName, Object value); + Object toDatastoreValue(String kind, String fieldName, Object value, + {bool forComparison: false}); } diff --git a/pkgs/gcloud/lib/src/db/model_db_impl.dart b/pkgs/gcloud/lib/src/db/model_db_impl.dart index dc26f134..b08a5d51 100644 --- a/pkgs/gcloud/lib/src/db/model_db_impl.dart +++ b/pkgs/gcloud/lib/src/db/model_db_impl.dart @@ -155,12 +155,14 @@ class ModelDBImpl implements ModelDB { } /// Converts [value] according to the [Property] named [name] in [type]. - Object toDatastoreValue(String kind, String fieldName, Object value) { + Object toDatastoreValue(String kind, String fieldName, Object value, + {bool forComparison: false}) { var modelDescription = _kind2ModelDesc[kind]; if (modelDescription == null) { throw new ArgumentError('The kind "$kind" is unknown.'); } - return modelDescription.encodeField(this, fieldName, value); + return modelDescription.encodeField( + this, fieldName, value, forComparison: forComparison); } Iterable<_ModelDescription> get _modelDescriptions { @@ -457,10 +459,11 @@ class _ModelDescription { } Object encodeField(ModelDBImpl db, String fieldName, Object value, - {bool enforceFieldExists: true}) { + {bool enforceFieldExists: true, + bool forComparison: false}) { Property property = db._propertiesForModel(this)[fieldName]; if (property != null) { - return property.encodeValue(db, value); + return property.encodeValue(db, value, forComparison: forComparison); } if (enforceFieldExists) { throw new ArgumentError( @@ -536,11 +539,12 @@ class _ExpandoModelDescription extends _ModelDescription { } Object encodeField(ModelDBImpl db, String fieldName, Object value, - {bool enforceFieldExists: true}) { + {bool enforceFieldExists: true, + bool forComparison: false}) { // The [enforceFieldExists] argument is intentionally ignored. Object primitiveValue = super.encodeField(db, fieldName, value, - enforceFieldExists: false); + enforceFieldExists: false, forComparison: forComparison); // If superclass can't encode field, we return value here (and assume // it's primitive) // NOTE: Implicit assumption: diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 83cd2bcd..e3e0b325 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,13 +1,13 @@ name: gcloud -version: 0.3.0 +version: 0.4.0 author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud environment: sdk: '>=1.13.0 <2.0.0' dependencies: - googleapis: '>=0.2.0 <0.32.0' - googleapis_beta: '>=0.10.0 <0.31.0' + googleapis: '>=0.2.0 <0.37.0' + googleapis_beta: '>=0.10.0 <0.35.0' http: '>=0.11.0 <0.12.0' dev_dependencies: googleapis_auth: '>=0.2.3 <0.3.0' diff --git a/pkgs/gcloud/test/common_e2e.dart b/pkgs/gcloud/test/common_e2e.dart index aa75dae5..bbc56f18 100644 --- a/pkgs/gcloud/test/common_e2e.dart +++ b/pkgs/gcloud/test/common_e2e.dart @@ -51,7 +51,7 @@ bool onBot() { // Get the service key from the specified location. Future serviceKeyJson(String serviceKeyLocation) { if (!serviceKeyLocation.startsWith('gs://')) { - throw new Exception('Service key location must start with gs://'); + return new File(serviceKeyLocation).readAsString(); } var future; if (onBot()) { diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index b917d1d5..8704adf9 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -737,7 +737,7 @@ runTests(Datastore datastore, String namespace) { new Filter(FilterRelation.LessThan, QUERY_KEY, QUERY_UPPER_BOUND), ]; var listFilters = [ - new Filter(FilterRelation.In, TEST_LIST_PROPERTY, [QUERY_LIST_ENTRY]) + new Filter(FilterRelation.Equal, TEST_LIST_PROPERTY, QUERY_LIST_ENTRY) ]; var indexedPropertyFilter = [ new Filter(FilterRelation.Equal, diff --git a/pkgs/gcloud/test/db/e2e/db_test_impl.dart b/pkgs/gcloud/test/db/e2e/db_test_impl.dart index 7ffd1123..8757f67a 100644 --- a/pkgs/gcloud/test/db/e2e/db_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/db_test_impl.dart @@ -514,10 +514,9 @@ runTests(db.DatastoreDB store, String namespace) { }, // Filter lists - /* FIXME: TODO: FIXME: "IN" not supported in public proto/apiary */ () async { var query = store.query(User, partition: partition) - ..filter('languages IN', ['foo']) + ..filter('languages =', 'foo') ..order('name') ..run(); var models = await runQueryWithExponentialBackoff( @@ -526,7 +525,7 @@ runTests(db.DatastoreDB store, String namespace) { }, () async { var query = store.query(User, partition: partition) - ..filter('languages IN', ['bar']) + ..filter('languages =', 'bar') ..order('name') ..run(); var models = await runQueryWithExponentialBackoff( diff --git a/pkgs/gcloud/test/db/properties_test.dart b/pkgs/gcloud/test/db/properties_test.dart index eed59e9e..52217b60 100644 --- a/pkgs/gcloud/test/db/properties_test.dart +++ b/pkgs/gcloud/test/db/properties_test.dart @@ -117,6 +117,9 @@ main() { expect(prop.encodeValue(null, []), equals(null)); expect(prop.encodeValue(null, [true]), equals(true)); expect(prop.encodeValue(null, [true, false]), equals([true, false])); + expect(prop.encodeValue(null, true, forComparison: true), equals(true)); + expect(prop.encodeValue(null, false, forComparison: true), equals(false)); + expect(prop.encodeValue(null, null, forComparison: true), equals(null)); expect(prop.decodePrimitiveValue(null, null), equals([])); expect(prop.decodePrimitiveValue(null, []), equals([])); expect(prop.decodePrimitiveValue(null, true), equals([true])); @@ -238,5 +241,5 @@ class ModelDBMock implements ModelDB { datastore.Entity toDatastoreEntity(Model model) => null; String fieldNameToPropertyName(String kind, String fieldName) => null; String kindName(Type type) => null; - Object toDatastoreValue(String kind, String fieldName, Object value) => null; + Object toDatastoreValue(String kind, String fieldName, Object value, {bool forComparison: false}) => null; } From 24301d09306665c53828666989e7c740038d33b7 Mon Sep 17 00:00:00 2001 From: ntaoo Date: Fri, 2 Jun 2017 18:17:07 +0900 Subject: [PATCH 102/239] Fix entity deserialization error message to show a propertyName correctly. (dart-lang/gcloud#47) --- pkgs/gcloud/lib/src/db/model_db_impl.dart | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/gcloud/lib/src/db/model_db_impl.dart b/pkgs/gcloud/lib/src/db/model_db_impl.dart index b08a5d51..dc21c759 100644 --- a/pkgs/gcloud/lib/src/db/model_db_impl.dart +++ b/pkgs/gcloud/lib/src/db/model_db_impl.dart @@ -444,7 +444,7 @@ class _ModelDescription { if (!prop.validate(db, value)) { throw new StateError('Property validation failed while ' 'trying to deserialize entity of kind ' - '${entity.key.elements.last.kind} (property name: $prop)'); + '${entity.key.elements.last.kind} (property name: $propertyName)'); } mirror.setField(mirrors.MirrorSystem.getSymbol(fieldName), value); From 27fb67e7721e41a3664706f68e31223421b88406 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Wed, 13 Sep 2017 10:03:26 -0700 Subject: [PATCH 103/239] Standardize license file --- pkgs/gcloud/LICENSE | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pkgs/gcloud/LICENSE b/pkgs/gcloud/LICENSE index 49475511..5c60afea 100644 --- a/pkgs/gcloud/LICENSE +++ b/pkgs/gcloud/LICENSE @@ -2,6 +2,7 @@ Copyright 2014, the Dart project authors. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above @@ -11,6 +12,7 @@ met: * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR From 544bc416b6a4c3ae9a26b777e519f7a0b8846e1d Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Thu, 21 Sep 2017 09:46:24 +0200 Subject: [PATCH 104/239] Run dartfmt on all .dart files Review-Url: https://codereview.chromium.org//3013763002 . --- pkgs/gcloud/lib/common.dart | 14 +- pkgs/gcloud/lib/datastore.dart | 40 +- pkgs/gcloud/lib/pubsub.dart | 11 +- pkgs/gcloud/lib/service_scope.dart | 10 +- pkgs/gcloud/lib/src/datastore_impl.dart | 165 ++-- pkgs/gcloud/lib/src/db/annotations.dart | 51 +- pkgs/gcloud/lib/src/db/db.dart | 102 ++- pkgs/gcloud/lib/src/db/model_db.dart | 2 +- pkgs/gcloud/lib/src/db/model_db_impl.dart | 49 +- pkgs/gcloud/lib/src/db/models.dart | 14 +- pkgs/gcloud/lib/src/pubsub_impl.dart | 129 ++- pkgs/gcloud/lib/src/storage_impl.dart | 220 ++--- pkgs/gcloud/lib/storage.dart | 71 +- pkgs/gcloud/test/common.dart | 114 ++- pkgs/gcloud/test/common_e2e.dart | 11 +- .../datastore/e2e/datastore_test_impl.dart | 466 ++++++----- pkgs/gcloud/test/datastore/e2e/utils.dart | 54 +- .../gcloud/test/datastore/error_matchers.dart | 2 - pkgs/gcloud/test/db/db_test.dart | 4 +- pkgs/gcloud/test/db/e2e/db_test_impl.dart | 285 ++++--- .../test/db/e2e/metamodel_test_impl.dart | 19 +- pkgs/gcloud/test/db/model_db_test.dart | 3 +- .../test/db/model_dbs/duplicate_kind.dart | 4 +- .../db/model_dbs/multiple_annotations.dart | 2 +- pkgs/gcloud/test/db/properties_test.dart | 41 +- pkgs/gcloud/test/db_all_e2e_test.dart | 1 - pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart | 6 +- pkgs/gcloud/test/pubsub/pubsub_test.dart | 362 +++++---- pkgs/gcloud/test/service_scope_test.dart | 147 ++-- pkgs/gcloud/test/storage/e2e_test.dart | 226 +++--- pkgs/gcloud/test/storage/storage_test.dart | 756 +++++++++--------- 31 files changed, 1707 insertions(+), 1674 deletions(-) diff --git a/pkgs/gcloud/lib/common.dart b/pkgs/gcloud/lib/common.dart index 430e3dfd..d8135000 100644 --- a/pkgs/gcloud/lib/common.dart +++ b/pkgs/gcloud/lib/common.dart @@ -39,9 +39,12 @@ class StreamFromPages { StreamController _controller; StreamFromPages(this._firstPageProvider) { - _controller = new StreamController(sync: true, onListen: _onListen, - onPause: _onPause, onResume: _onResume, - onCancel: _onCancel); + _controller = new StreamController( + sync: true, + onListen: _onListen, + onPause: _onPause, + onResume: _onResume, + onCancel: _onCancel); } Stream get stream => _controller.stream; @@ -69,7 +72,9 @@ class StreamFromPages { _firstPageProvider(pageSize).then(_handlePage, onError: _handleError); } - _onPause() { _paused = true; } + _onPause() { + _paused = true; + } _onResume() { _paused = false; @@ -81,5 +86,4 @@ class StreamFromPages { _onCancel() { _cancelled = true; } - } diff --git a/pkgs/gcloud/lib/datastore.dart b/pkgs/gcloud/lib/datastore.dart index 231b93a9..cbdbd2dd 100644 --- a/pkgs/gcloud/lib/datastore.dart +++ b/pkgs/gcloud/lib/datastore.dart @@ -44,12 +44,13 @@ class ApplicationError implements Exception { String toString() => "ApplicationError: $message"; } - class DatastoreError implements Exception { final String message; - DatastoreError([String message]) : message = - (message != null ?message : 'DatastoreError: An unknown error occured'); + DatastoreError([String message]) + : message = (message != null + ? message + : 'DatastoreError: An unknown error occured'); String toString() => '$message'; } @@ -71,8 +72,7 @@ class TimeoutError extends DatastoreError { /// An application needs to specify indices in a `index.yaml` file and needs to /// create indices using the `gcloud preview datastore create-indexes` command. class NeedIndexError extends DatastoreError { - NeedIndexError() - : super("An index is needed for the query to succeed."); + NeedIndexError() : super("An index is needed for the query to succeed."); } class PermissionDeniedError extends DatastoreError { @@ -153,7 +153,7 @@ class Key { int get hashCode => elements.fold(partition.hashCode, (a, b) => a ^ b.hashCode); - bool operator==(Object other) { + bool operator ==(Object other) { if (identical(this, other)) return true; if (other is Key && @@ -197,7 +197,7 @@ class Partition { int get hashCode => namespace.hashCode; - bool operator==(Object other) => + bool operator ==(Object other) => other is Partition && namespace == other.namespace; } @@ -226,7 +226,7 @@ class KeyElement { int get hashCode => kind.hashCode ^ id.hashCode; - bool operator==(Object other) => + bool operator ==(Object other) => other is KeyElement && kind == other.kind && id == other.id; String toString() => "$kind.$id"; @@ -317,8 +317,13 @@ class Query { /// Limit the number of entities returned to [limit]. final int limit; - Query({this.ancestorKey, this.kind, this.filters, this.orders, - this.offset, this.limit}); + Query( + {this.ancestorKey, + this.kind, + this.filters, + this.orders, + this.offset, + this.limit}); } /// The result of a commit. @@ -342,7 +347,7 @@ class BlobValue { /// /// This token can be passed to the `commit` and `lookup` calls if they should /// operate within this transaction. -abstract class Transaction { } +abstract class Transaction {} /// Interface used to talk to the Google Cloud Datastore service. /// @@ -380,10 +385,11 @@ abstract class Datastore { /// This method might complete with a [TransactionAbortedError] error. /// Users must take care of retrying transactions. // TODO(Issue #6): Consider splitting `inserts` into insert/update/upsert. - Future commit({List inserts, - List autoIdInserts, - List deletes, - Transaction transaction}); + Future commit( + {List inserts, + List autoIdInserts, + List deletes, + Transaction transaction}); /// Roll a started transaction back. Future rollback(Transaction transaction); @@ -415,6 +421,6 @@ abstract class Datastore { /// /// Outside of transactions, the result set might be stale. Queries are by /// default eventually consistent. - Future> query( - Query query, {Partition partition, Transaction transaction}); + Future> query(Query query, + {Partition partition, Transaction transaction}); } diff --git a/pkgs/gcloud/lib/pubsub.dart b/pkgs/gcloud/lib/pubsub.dart index 7e251b49..0e4bb10a 100644 --- a/pkgs/gcloud/lib/pubsub.dart +++ b/pkgs/gcloud/lib/pubsub.dart @@ -111,7 +111,7 @@ void registerPubSubService(PubSub pubsub) { /// abstract class PubSub { /// List of required OAuth2 scopes for Pub/Sub operation. - static const SCOPES = const [ pubsub.PubsubApi.PubsubScope ]; + static const SCOPES = const [pubsub.PubsubApi.PubsubScope]; /// Access Pub/Sub using an authenticated client. /// @@ -171,8 +171,8 @@ abstract class PubSub { /// The [name] can be either an absolute name or a relative name. /// /// Returns a `Future` which completes with the newly created subscription. - Future createSubscription( - String name, String topic, {Uri endpoint}); + Future createSubscription(String name, String topic, + {Uri endpoint}); /// Delete subscription named [name]. /// @@ -323,7 +323,6 @@ abstract class Subscription { /// Returns a `Future` which completes when the operation completes. Future delete(); - /// Pull a message from the subscription. /// /// If `wait` is `true` (the default), the method will wait for a message @@ -354,8 +353,8 @@ abstract class Message { /// Creates a new message with a binary body. /// /// Message attributes can be passed in the [attributes] Map. - factory Message.withBytes(List message, {Map attributes}) = - _MessageImpl.withBytes; + factory Message.withBytes(List message, + {Map attributes}) = _MessageImpl.withBytes; /// The message body as a String. /// diff --git a/pkgs/gcloud/lib/service_scope.dart b/pkgs/gcloud/lib/service_scope.dart index 8d4641ee..a5a58af6 100644 --- a/pkgs/gcloud/lib/service_scope.dart +++ b/pkgs/gcloud/lib/service_scope.dart @@ -191,8 +191,8 @@ class _ServiceScope { _ensureNotInDestroyingState(); if (onScopeExitCallback != null) { - _registeredEntries.add( - new _RegisteredEntry(null, null, onScopeExitCallback)); + _registeredEntries + .add(new _RegisteredEntry(null, null, onScopeExitCallback)); } } @@ -202,12 +202,12 @@ class _ServiceScope { _ensureNotInDestroyingState(); var serviceScope = _copy(); - var map = { _ServiceScopeKey: serviceScope }; + var map = {_ServiceScopeKey: serviceScope}; return runZoned(() { var f = func(); if (f is! Future) { throw new ArgumentError('Forking a service scope zone requires the ' - 'callback function to return a future.'); + 'callback function to return a future.'); } return f.whenComplete(serviceScope._runScopeExitHandlers); }, zoneValues: map, onError: onError); @@ -251,7 +251,7 @@ class _ServiceScope { // Even if one fails, we continue cleaning up and report then the list of // errors (if there were any). return Future.forEach(_registeredEntries.reversed, - (_RegisteredEntry registeredEntry) { + (_RegisteredEntry registeredEntry) { if (registeredEntry.key != null) { _key2Values.remove(registeredEntry.key); } diff --git a/pkgs/gcloud/lib/src/datastore_impl.dart b/pkgs/gcloud/lib/src/datastore_impl.dart index d2045fd8..8f28a361 100644 --- a/pkgs/gcloud/lib/src/datastore_impl.dart +++ b/pkgs/gcloud/lib/src/datastore_impl.dart @@ -19,8 +19,8 @@ class TransactionImpl implements datastore.Transaction { class DatastoreImpl implements datastore.Datastore { static const List SCOPES = const [ - api.DatastoreApi.DatastoreScope, - api.DatastoreApi.CloudPlatformScope, + api.DatastoreApi.DatastoreScope, + api.DatastoreApi.CloudPlatformScope, ]; final api.DatastoreApi _api; @@ -29,14 +29,15 @@ class DatastoreImpl implements datastore.Datastore { /// The [project] parameter is the name of the cloud project (it should not /// start with a `s~`). DatastoreImpl(http.Client client, String project) - : _api = new api.DatastoreApi(client), _project = project; + : _api = new api.DatastoreApi(client), + _project = project; api.Key _convertDatastore2ApiKey(datastore.Key key, {bool enforceId: true}) { var apiKey = new api.Key(); apiKey.partitionId = new api.PartitionId() - ..projectId = _project - ..namespaceId = key.partition.namespace; + ..projectId = _project + ..namespaceId = key.partition.namespace; apiKey.path = key.elements.map((datastore.KeyElement element) { var part = new api.PathElement(); @@ -96,45 +97,38 @@ class DatastoreImpl implements datastore.Datastore { return true; } - api.Value _convertDatastore2ApiPropertyValue( - value, bool indexed, {bool lists: true}) { - var apiValue = new api.Value() - ..excludeFromIndexes = !indexed; + api.Value _convertDatastore2ApiPropertyValue(value, bool indexed, + {bool lists: true}) { + var apiValue = new api.Value()..excludeFromIndexes = !indexed; if (value == null) { - return apiValue - ..nullValue = "NULL_VALUE"; + return apiValue..nullValue = "NULL_VALUE"; } else if (value is bool) { - return apiValue - ..booleanValue = value; + return apiValue..booleanValue = value; } else if (value is int) { - return apiValue - ..integerValue = '$value'; + return apiValue..integerValue = '$value'; } else if (value is double) { - return apiValue - ..doubleValue = value; + return apiValue..doubleValue = value; } else if (value is String) { - return apiValue - ..stringValue = value; + return apiValue..stringValue = value; } else if (value is DateTime) { - return apiValue - ..timestampValue = value.toIso8601String(); + return apiValue..timestampValue = value.toIso8601String(); } else if (value is datastore.BlobValue) { - return apiValue - ..blobValueAsBytes = value.bytes; + return apiValue..blobValueAsBytes = value.bytes; } else if (value is datastore.Key) { return apiValue - ..keyValue = _convertDatastore2ApiKey(value, enforceId: false); + ..keyValue = _convertDatastore2ApiKey(value, enforceId: false); } else if (value is List) { if (!lists) { // FIXME(Issue #3): Consistently handle exceptions. throw new Exception('List values are not allowed.'); } - convertItem(i) - => _convertDatastore2ApiPropertyValue(i, indexed, lists: false); + convertItem(i) => + _convertDatastore2ApiPropertyValue(i, indexed, lists: false); - return new api.Value()..arrayValue = ( - new api.ArrayValue()..values = value.map(convertItem).toList()); + return new api.Value() + ..arrayValue = + (new api.ArrayValue()..values = value.map(convertItem).toList()); } else { throw new UnsupportedError( 'Types ${value.runtimeType} cannot be used for serializing.'); @@ -157,8 +151,9 @@ class DatastoreImpl implements datastore.Datastore { else if (value.keyValue != null) return _convertApi2DatastoreKey(value.keyValue); else if (value.arrayValue != null && value.arrayValue.values != null) - return value - .arrayValue.values.map(_convertApi2DatastoreProperty).toList(); + return value.arrayValue.values + .map(_convertApi2DatastoreProperty) + .toList(); else if (value.entityValue != null) throw new UnsupportedError('Entity values are not supported.'); else if (value.geoPointValue != null) @@ -173,22 +168,21 @@ class DatastoreImpl implements datastore.Datastore { if (entity.properties != null) { entity.properties.forEach((String name, api.Value value) { properties[name] = _convertApi2DatastoreProperty(value); - if (value.excludeFromIndexes != null && - value.excludeFromIndexes) { + if (value.excludeFromIndexes != null && value.excludeFromIndexes) { unindexedProperties.add(name); } }); } - return new datastore.Entity(_convertApi2DatastoreKey(entity.key), - properties, - unIndexedProperties: unindexedProperties); + return new datastore.Entity( + _convertApi2DatastoreKey(entity.key), properties, + unIndexedProperties: unindexedProperties); } api.Entity _convertDatastore2ApiEntity(datastore.Entity entity, - {bool enforceId: false}) { + {bool enforceId: false}) { var apiEntity = new api.Entity(); - apiEntity.key = _convertDatastore2ApiKey(entity.key, enforceId: enforceId); + apiEntity.key = _convertDatastore2ApiKey(entity.key, enforceId: enforceId); apiEntity.properties = {}; if (entity.properties != null) { for (var key in entity.properties.keys) { @@ -220,8 +214,8 @@ class DatastoreImpl implements datastore.Datastore { } pf.op = operator; pf.property = new api.PropertyReference()..name = filter.name; - pf.value = _convertDatastore2ApiPropertyValue( - filter.value, true, lists: false); + pf.value = + _convertDatastore2ApiPropertyValue(filter.value, true, lists: false); return new api.Filter()..propertyFilter = pf; } @@ -230,12 +224,12 @@ class DatastoreImpl implements datastore.Datastore { pf.op = 'HAS_ANCESTOR'; pf.property = new api.PropertyReference()..name = '__key__'; pf.value = new api.Value() - ..keyValue = _convertDatastore2ApiKey(key, enforceId: true); + ..keyValue = _convertDatastore2ApiKey(key, enforceId: true); return new api.Filter()..propertyFilter = pf; } - api.Filter _convertDatastore2ApiFilters(List filters, - datastore.Key ancestorKey) { + api.Filter _convertDatastore2ApiFilters( + List filters, datastore.Key ancestorKey) { if ((filters == null || filters.length == 0) && ancestorKey == null) { return null; } @@ -259,10 +253,11 @@ class DatastoreImpl implements datastore.Datastore { api.PropertyOrder _convertDatastore2ApiOrder(datastore.Order order) { var property = new api.PropertyReference()..name = order.propertyName; var direction = order.direction == datastore.OrderDirection.Ascending - ? 'ASCENDING' : 'DESCENDING'; + ? 'ASCENDING' + : 'DESCENDING'; return new api.PropertyOrder() - ..direction = direction - ..property = property; + ..direction = direction + ..property = property; } List _convertDatastore2ApiOrders( @@ -283,7 +278,7 @@ class DatastoreImpl implements datastore.Datastore { // TODO: return new Future.error(new datastore.TransactionAbortedError(), stack); } else if (error.status == 412) { - return new Future.error(new datastore.NeedIndexError(), stack); + return new Future.error(new datastore.NeedIndexError(), stack); } } return new Future.error(error, stack); @@ -291,9 +286,10 @@ class DatastoreImpl implements datastore.Datastore { Future> allocateIds(List keys) { var request = new api.AllocateIdsRequest(); - request..keys = keys.map((key) { - return _convertDatastore2ApiKey(key, enforceId: false); - }).toList(); + request + ..keys = keys.map((key) { + return _convertDatastore2ApiKey(key, enforceId: false); + }).toList(); return _api.projects.allocateIds(request, _project).then((response) { return response.keys.map(_convertApi2DatastoreKey).toList(); }, onError: _handleError); @@ -307,10 +303,11 @@ class DatastoreImpl implements datastore.Datastore { }, onError: _handleError); } - Future commit({List inserts, - List autoIdInserts, - List deletes, - datastore.Transaction transaction}) { + Future commit( + {List inserts, + List autoIdInserts, + List deletes, + datastore.Transaction transaction}) { var request = new api.CommitRequest(); if (transaction != null) { @@ -323,25 +320,23 @@ class DatastoreImpl implements datastore.Datastore { var mutations = request.mutations = []; if (inserts != null) { for (int i = 0; i < inserts.length; i++) { - mutations.add( - new api.Mutation()..upsert = - _convertDatastore2ApiEntity(inserts[i], enforceId: true)); + mutations.add(new api.Mutation() + ..upsert = _convertDatastore2ApiEntity(inserts[i], enforceId: true)); } } int autoIdStartIndex = -1; if (autoIdInserts != null) { autoIdStartIndex = mutations.length; for (int i = 0; i < autoIdInserts.length; i++) { - mutations.add( - new api.Mutation()..insert = + mutations.add(new api.Mutation() + ..insert = _convertDatastore2ApiEntity(autoIdInserts[i], enforceId: false)); } } if (deletes != null) { for (int i = 0; i < deletes.length; i++) { - mutations.add( - new api.Mutation()..delete = - _convertDatastore2ApiKey(deletes[i], enforceId: true)); + mutations.add(new api.Mutation() + ..delete = _convertDatastore2ApiKey(deletes[i], enforceId: true)); } } return _api.projects.commit(request, _project).then((result) { @@ -350,7 +345,7 @@ class DatastoreImpl implements datastore.Datastore { List mutationResults = result.mutationResults; assert(autoIdStartIndex != -1); assert(mutationResults.length >= - (autoIdStartIndex + autoIdInserts.length)); + (autoIdStartIndex + autoIdInserts.length)); keys = mutationResults .skip(autoIdStartIndex) .take(autoIdInserts.length) @@ -362,7 +357,7 @@ class DatastoreImpl implements datastore.Datastore { } Future> lookup(List keys, - {datastore.Transaction transaction}) { + {datastore.Transaction transaction}) { var apiKeys = keys.map((key) { return _convertDatastore2ApiKey(key, enforceId: true); }).toList(); @@ -433,16 +428,14 @@ class DatastoreImpl implements datastore.Datastore { }, onError: _handleError); } - Future> query( - datastore.Query query, {datastore.Partition partition, - datastore.Transaction transaction}) { + Future> query(datastore.Query query, + {datastore.Partition partition, datastore.Transaction transaction}) { // NOTE: We explicitly do not set 'limit' here, since this is handled by // QueryPageImpl.runQuery. var apiQuery = new api.Query() - ..filter = _convertDatastore2ApiFilters(query.filters, - query.ancestorKey) - ..order = _convertDatastore2ApiOrders(query.orders) - ..offset = query.offset; + ..filter = _convertDatastore2ApiFilters(query.filters, query.ancestorKey) + ..order = _convertDatastore2ApiOrders(query.orders) + ..offset = query.offset; if (query.kind != null) { apiQuery.kind = [new api.KindExpression()..name = query.kind]; @@ -457,17 +450,18 @@ class DatastoreImpl implements datastore.Datastore { } if (partition != null) { request.partitionId = new api.PartitionId() - ..namespaceId = partition.namespace; + ..namespaceId = partition.namespace; } - return QueryPageImpl.runQuery(_api, _project, request, query.limit) + return QueryPageImpl + .runQuery(_api, _project, request, query.limit) .catchError(_handleError); } Future rollback(datastore.Transaction transaction) { // TODO: Handle [transaction] var request = new api.RollbackRequest() - ..transaction = (transaction as TransactionImpl).data; + ..transaction = (transaction as TransactionImpl).data; return _api.projects.rollback(request, _project).catchError(_handleError); } } @@ -484,15 +478,12 @@ class QueryPageImpl implements Page { // This might be `null` in which case we request as many as we can get. final int _remainingNumberOfEntities; - QueryPageImpl(this._api, this._project, - this._nextRequest, this._entities, - this._isLast, this._remainingNumberOfEntities); + QueryPageImpl(this._api, this._project, this._nextRequest, this._entities, + this._isLast, this._remainingNumberOfEntities); - static Future runQuery(api.DatastoreApi api, - String project, - api.RunQueryRequest request, - int limit, - {int batchSize}) { + static Future runQuery(api.DatastoreApi api, String project, + api.RunQueryRequest request, int limit, + {int batchSize}) { int batchLimit = batchSize; if (batchLimit == null) { batchLimit = MAX_ENTITIES_PER_RESPONSE; @@ -530,7 +521,6 @@ class QueryPageImpl implements Page { '(${request.query.limit}) was.'); } - // FIXME: TODO: Big hack! // It looks like Apiary/Atlas is currently broken. /* @@ -554,8 +544,7 @@ class QueryPageImpl implements Page { // If the server signals there are more entities and we either have no // limit or our limit has not been reached, we set `moreBatches` to // `true`. - bool moreBatches = - (remainingEntities == null || remainingEntities > 0) && + bool moreBatches = (remainingEntities == null || remainingEntities > 0) && response.batch.moreResults == 'MORE_RESULTS_AFTER_LIMIT'; bool gotAll = limit != null && remainingEntities == 0; @@ -563,13 +552,13 @@ class QueryPageImpl implements Page { bool isLast = gotAll || noMore; // As a sanity check, we assert that `moreBatches XOR isLast`. - assert (isLast != moreBatches); + assert(isLast != moreBatches); // FIXME: TODO: Big hack! // It looks like Apiary/Atlas is currently broken. if (moreBatches && returnedEntities.length == 0) { print('Warning: Api to Google Cloud Datastore returned bogus response. ' - 'Trying a workaround.'); + 'Trying a workaround.'); isLast = true; moreBatches = false; } @@ -614,8 +603,8 @@ class QueryPageImpl implements Page { }); } - return QueryPageImpl.runQuery( - _api, _project, _nextRequest, _remainingNumberOfEntities) + return QueryPageImpl + .runQuery(_api, _project, _nextRequest, _remainingNumberOfEntities) .catchError(DatastoreImpl._handleError); } } diff --git a/pkgs/gcloud/lib/src/db/annotations.dart b/pkgs/gcloud/lib/src/db/annotations.dart index 276f87f8..9dd06148 100644 --- a/pkgs/gcloud/lib/src/db/annotations.dart +++ b/pkgs/gcloud/lib/src/db/annotations.dart @@ -100,7 +100,8 @@ abstract class PrimitiveProperty extends Property { {String propertyName, bool required: false, bool indexed: true}) : super(propertyName: propertyName, required: required, indexed: indexed); - Object encodeValue(ModelDB db, Object value, {bool forComparison: false}) => value; + Object encodeValue(ModelDB db, Object value, {bool forComparison: false}) => + value; Object decodePrimitiveValue(ModelDB db, Object value) => value; } @@ -114,8 +115,8 @@ class BoolProperty extends PrimitiveProperty { {String propertyName, bool required: false, bool indexed: true}) : super(propertyName: propertyName, required: required, indexed: indexed); - bool validate(ModelDB db, Object value) - => super.validate(db, value) && (value == null || value is bool); + bool validate(ModelDB db, Object value) => + super.validate(db, value) && (value == null || value is bool); } /// A integer [Property]. @@ -127,8 +128,8 @@ class IntProperty extends PrimitiveProperty { {String propertyName, bool required: false, bool indexed: true}) : super(propertyName: propertyName, required: required, indexed: indexed); - bool validate(ModelDB db, Object value) - => super.validate(db, value) && (value == null || value is int); + bool validate(ModelDB db, Object value) => + super.validate(db, value) && (value == null || value is int); } /// A double [Property]. @@ -140,8 +141,8 @@ class DoubleProperty extends PrimitiveProperty { {String propertyName, bool required: false, bool indexed: true}) : super(propertyName: propertyName, required: required, indexed: indexed); - bool validate(ModelDB db, Object value) - => super.validate(db, value) && (value == null || value is double); + bool validate(ModelDB db, Object value) => + super.validate(db, value) && (value == null || value is double); } /// A string [Property]. @@ -153,8 +154,8 @@ class StringProperty extends PrimitiveProperty { {String propertyName, bool required: false, bool indexed: true}) : super(propertyName: propertyName, required: required, indexed: indexed); - bool validate(ModelDB db, Object value) - => super.validate(db, value) && (value == null || value is String); + bool validate(ModelDB db, Object value) => + super.validate(db, value) && (value == null || value is String); } /// A key [Property]. @@ -166,8 +167,8 @@ class ModelKeyProperty extends PrimitiveProperty { {String propertyName, bool required: false, bool indexed: true}) : super(propertyName: propertyName, required: required, indexed: indexed); - bool validate(ModelDB db, Object value) - => super.validate(db, value) && (value == null || value is Key); + bool validate(ModelDB db, Object value) => + super.validate(db, value) && (value == null || value is Key); Object encodeValue(ModelDB db, Object value, {bool forComparison: false}) { if (value == null) return null; @@ -187,18 +188,18 @@ class ModelKeyProperty extends PrimitiveProperty { /// List. class BlobProperty extends PrimitiveProperty { const BlobProperty({String propertyName, bool required: false}) - : super(propertyName: propertyName, required: required, indexed: false); + : super(propertyName: propertyName, required: required, indexed: false); // NOTE: We don't validate that the entries of the list are really integers // of the range 0..255! // If an untyped list was created the type check will always succeed. i.e. // "[1, true, 'bar'] is List" evaluates to `true` - bool validate(ModelDB db, Object value) - => super.validate(db, value) && (value == null || value is List); + bool validate(ModelDB db, Object value) => + super.validate(db, value) && (value == null || value is List); Object encodeValue(ModelDB db, Object value, {bool forComparison: false}) { - if (value == null) return null; - return new datastore.BlobValue(value); + if (value == null) return null; + return new datastore.BlobValue(value); } Object decodePrimitiveValue(ModelDB db, Object value) { @@ -218,19 +219,18 @@ class DateTimeProperty extends PrimitiveProperty { {String propertyName, bool required: false, bool indexed: true}) : super(propertyName: propertyName, required: required, indexed: indexed); - bool validate(ModelDB db, Object value) - => super.validate(db, value) && (value == null || value is DateTime); + bool validate(ModelDB db, Object value) => + super.validate(db, value) && (value == null || value is DateTime); Object decodePrimitiveValue(ModelDB db, Object value) { if (value is int) { - return - new DateTime.fromMillisecondsSinceEpoch(value ~/ 1000, isUtc: true); + return new DateTime.fromMillisecondsSinceEpoch(value ~/ 1000, + isUtc: true); } return value; } } - /// A composed list [Property], with a `subProperty` for the list elements. /// /// It will validate that values are List objects before writing them to the @@ -242,14 +242,14 @@ class ListProperty extends Property { // TODO: We want to support optional list properties as well. // Get rid of "required: true" here. const ListProperty(this.subProperty, - {String propertyName, bool indexed: true}) + {String propertyName, bool indexed: true}) : super(propertyName: propertyName, required: true, indexed: indexed); bool validate(ModelDB db, Object value) { if (!super.validate(db, value) || value is! List) return false; for (var entry in value) { - if (!subProperty.validate(db, entry)) return false; + if (!subProperty.validate(db, entry)) return false; } return true; } @@ -280,8 +280,7 @@ class ListProperty extends Property { List list = value; if (list.length == 0) return null; if (list.length == 1) return subProperty.encodeValue(db, list[0]); - return list.map( - (value) => subProperty.encodeValue(db, value)).toList(); + return list.map((value) => subProperty.encodeValue(db, value)).toList(); } Object decodePrimitiveValue(ModelDB db, Object value) { @@ -297,5 +296,5 @@ class ListProperty extends Property { class StringListProperty extends ListProperty { const StringListProperty({String propertyName, bool indexed: true}) : super(const StringProperty(), - propertyName: propertyName, indexed: indexed); + propertyName: propertyName, indexed: indexed); } diff --git a/pkgs/gcloud/lib/src/db/db.dart b/pkgs/gcloud/lib/src/db/db.dart index 38b88bfb..aa738253 100644 --- a/pkgs/gcloud/lib/src/db/db.dart +++ b/pkgs/gcloud/lib/src/db/db.dart @@ -70,11 +70,10 @@ class Transaction { 'as the partition where the query executes in.'); } _checkSealed(); - return new Query(db, - kind, - partition: partition, - ancestorKey: ancestorKey, - datastoreTransaction: _datastoreTransaction); + return new Query(db, kind, + partition: partition, + ancestorKey: ancestorKey, + datastoreTransaction: _datastoreTransaction); } /** @@ -91,18 +90,16 @@ class Transaction { Future commit() { _checkSealed(changeState: _TRANSACTION_COMMITTED); return _commitHelper(db, - inserts: _inserts, - deletes: _deletes, - datastoreTransaction: _datastoreTransaction); + inserts: _inserts, + deletes: _deletes, + datastoreTransaction: _datastoreTransaction); } _checkSealed({int changeState}) { if (_transactionState == _TRANSACTION_COMMITTED) { - throw new StateError( - 'The transaction has already been committed.'); + throw new StateError('The transaction has already been committed.'); } else if (_transactionState == _TRANSACTION_ROLLED_BACK) { - throw new StateError( - 'The transaction has already been rolled back.'); + throw new StateError('The transaction has already been rolled back.'); } if (changeState != null) { _transactionState = changeState; @@ -111,7 +108,7 @@ class Transaction { } class Query { - final _relationMapping = const { + final _relationMapping = const { '<': datastore.FilterRelation.LessThan, '<=': datastore.FilterRelation.LessThanOrEqual, '>': datastore.FilterRelation.GreatherThan, @@ -132,12 +129,14 @@ class Query { int _limit; Query(DatastoreDB dbImpl, Type kind, - {Partition partition, Key ancestorKey, - datastore.Transaction datastoreTransaction}) + {Partition partition, + Key ancestorKey, + datastore.Transaction datastoreTransaction}) : _db = dbImpl, _kind = dbImpl.modelDB.kindName(kind), _partition = partition, - _ancestorKey = ancestorKey, _transaction = datastoreTransaction; + _ancestorKey = ancestorKey, + _transaction = datastoreTransaction; /** * Adds a filter to this [Query]. @@ -156,8 +155,7 @@ class Query { void filter(String filterString, Object comparisonObject) { var parts = filterString.split(' '); if (parts.length != 2 || !_relationMapping.containsKey(parts[1])) { - throw new ArgumentError( - "Invalid filter string '$filterString'."); + throw new ArgumentError("Invalid filter string '$filterString'."); } var name = parts[0]; @@ -168,8 +166,8 @@ class Query { // TODO: We should remove the condition in a major version update of // `package:gcloud`. if (comparisonObject is! datastore.Key) { - comparisonObject = _db.modelDB.toDatastoreValue(_kind, name, - comparisonObject, forComparison: true); + comparisonObject = _db.modelDB + .toDatastoreValue(_kind, name, comparisonObject, forComparison: true); } _filters.add(new datastore.Filter( _relationMapping[comparison], propertyName, comparisonObject)); @@ -184,12 +182,10 @@ class Query { void order(String orderString) { // TODO: validate [orderString] (e.g. is name valid) if (orderString.startsWith('-')) { - _orders.add(new datastore.Order( - datastore.OrderDirection.Decending, + _orders.add(new datastore.Order(datastore.OrderDirection.Decending, _convertToDatastoreName(orderString.substring(1)))); } else { - _orders.add(new datastore.Order( - datastore.OrderDirection.Ascending, + _orders.add(new datastore.Order(datastore.OrderDirection.Ascending, _convertToDatastoreName(orderString))); } } @@ -225,9 +221,12 @@ class Query { ancestorKey = _db.modelDB.toDatastoreKey(_ancestorKey); } var query = new datastore.Query( - ancestorKey: ancestorKey, kind: _kind, - filters: _filters, orders: _orders, - offset: _offset, limit: _limit); + ancestorKey: ancestorKey, + kind: _kind, + filters: _filters, + orders: _orders, + offset: _offset, + limit: _limit); var partition; if (_partition != null) { @@ -235,8 +234,8 @@ class Query { } return new StreamFromPages((int pageSize) { - return _db.datastore.query( - query, transaction: _transaction, partition: partition); + return _db.datastore + .query(query, transaction: _transaction, partition: partition); }).stream.map(_db.modelDB.fromDatastoreEntity); } @@ -246,11 +245,9 @@ class Query { // API. String _convertToDatastoreName(String name) { - var propertyName = - _db.modelDB.fieldNameToPropertyName(_kind, name); + var propertyName = _db.modelDB.fieldNameToPropertyName(_kind, name); if (propertyName == null) { - throw new ArgumentError( - "Field $name is not available for kind $_kind"); + throw new ArgumentError("Field $name is not available for kind $_kind"); } return propertyName; } @@ -261,10 +258,10 @@ class DatastoreDB { final ModelDB _modelDB; Partition _defaultPartition; - DatastoreDB(this.datastore, {ModelDB modelDB, Partition defaultPartition}) : - _modelDB = modelDB != null ? modelDB : new ModelDBImpl() { - _defaultPartition = - defaultPartition != null ? defaultPartition : new Partition(null); + DatastoreDB(this.datastore, {ModelDB modelDB, Partition defaultPartition}) + : _modelDB = modelDB != null ? modelDB : new ModelDBImpl() { + _defaultPartition = + defaultPartition != null ? defaultPartition : new Partition(null); } /** @@ -300,7 +297,8 @@ class DatastoreDB { */ // TODO: Add retries and/or auto commit/rollback. Future withTransaction(TransactionHandler transactionHandler) { - return datastore.beginTransaction(crossEntityGroup: true) + return datastore + .beginTransaction(crossEntityGroup: true) .then((datastoreTransaction) { var transaction = new Transaction(this, datastoreTransaction); return transactionHandler(transaction); @@ -326,10 +324,8 @@ class DatastoreDB { 'Ancestor queries must have the same partition in the ancestor key ' 'as the partition where the query executes in.'); } - return new Query(this, - kind, - partition: partition, - ancestorKey: ancestorKey); + return new Query(this, kind, + partition: partition, ancestorKey: ancestorKey); } /** @@ -358,9 +354,9 @@ class DatastoreDB { } Future _commitHelper(DatastoreDB db, - {List inserts, - List deletes, - datastore.Transaction datastoreTransaction}) { + {List inserts, + List deletes, + datastore.Transaction datastoreTransaction}) { var entityInserts, entityAutoIdInserts, entityDeletes; var autoIdModelInserts; if (inserts != null) { @@ -386,10 +382,12 @@ Future _commitHelper(DatastoreDB db, entityDeletes = deletes.map(db.modelDB.toDatastoreKey).toList(); } - return db.datastore.commit(inserts: entityInserts, - autoIdInserts: entityAutoIdInserts, - deletes: entityDeletes, - transaction: datastoreTransaction) + return db.datastore + .commit( + inserts: entityInserts, + autoIdInserts: entityAutoIdInserts, + deletes: entityDeletes, + transaction: datastoreTransaction) .then((datastore.CommitResult result) { if (entityAutoIdInserts != null && entityAutoIdInserts.length > 0) { for (var i = 0; i < result.autoIdInsertKeys.length; i++) { @@ -401,11 +399,11 @@ Future _commitHelper(DatastoreDB db, }); } -Future> _lookupHelper( - DatastoreDB db, List keys, +Future> _lookupHelper(DatastoreDB db, List keys, {datastore.Transaction datastoreTransaction}) { var entityKeys = keys.map(db.modelDB.toDatastoreKey).toList(); - return db.datastore.lookup(entityKeys, transaction: datastoreTransaction) + return db.datastore + .lookup(entityKeys, transaction: datastoreTransaction) .then((List entities) { return entities.map(db.modelDB.fromDatastoreEntity).toList(); }); diff --git a/pkgs/gcloud/lib/src/db/model_db.dart b/pkgs/gcloud/lib/src/db/model_db.dart index efda613d..63fc3e71 100644 --- a/pkgs/gcloud/lib/src/db/model_db.dart +++ b/pkgs/gcloud/lib/src/db/model_db.dart @@ -23,7 +23,7 @@ abstract class ModelDB { /** * Converts a [Model] instance to a [datastore.Entity]. */ - datastore.Entity toDatastoreEntity(Model model) ; + datastore.Entity toDatastoreEntity(Model model); /** * Converts a [datastore.Entity] to a [Model] instance. diff --git a/pkgs/gcloud/lib/src/db/model_db_impl.dart b/pkgs/gcloud/lib/src/db/model_db_impl.dart index dc21c759..1b3c7077 100644 --- a/pkgs/gcloud/lib/src/db/model_db_impl.dart +++ b/pkgs/gcloud/lib/src/db/model_db_impl.dart @@ -26,7 +26,8 @@ part of gcloud.db; /// they must have an empty default constructor which can be used to construct /// model objects when doing lookups/queries from datastore. class ModelDBImpl implements ModelDB { - final Map<_ModelDescription, Map> _modelDesc2Properties = {}; + final Map<_ModelDescription, Map> _modelDesc2Properties = + {}; final Map _kind2ModelDesc = {}; final Map<_ModelDescription, mirrors.ClassMirror> _modelDesc2ClassMirror = {}; final Map<_ModelDescription, Type> _type2ModelDesc = {}; @@ -97,8 +98,7 @@ class ModelDBImpl implements ModelDB { currentKey = currentKey.parent; } Partition partition = currentKey._parent; - return new datastore.Key( - elements.reversed.toList(), + return new datastore.Key(elements.reversed.toList(), partition: new datastore.Partition(partition.namespace)); } @@ -108,8 +108,7 @@ class ModelDBImpl implements ModelDB { var modelDescription = _modelDescriptionForType(model.runtimeType); return modelDescription.encodeModel(this, model); } catch (error, stack) { - throw - new ArgumentError('Error while encoding entity ($error, $stack).'); + throw new ArgumentError('Error while encoding entity ($error, $stack).'); } } @@ -161,8 +160,8 @@ class ModelDBImpl implements ModelDB { if (modelDescription == null) { throw new ArgumentError('The kind "$kind" is unknown.'); } - return modelDescription.encodeField( - this, fieldName, value, forComparison: forComparison); + return modelDescription.encodeField(this, fieldName, value, + forComparison: forComparison); } Iterable<_ModelDescription> get _modelDescriptions { @@ -182,7 +181,6 @@ class ModelDBImpl implements ModelDB { return _modelDesc2ClassMirror[md]; } - void _initialize(Iterable libraries) { libraries.forEach((mirrors.LibraryMirror lm) { lm.declarations.values @@ -203,8 +201,7 @@ class ModelDBImpl implements ModelDB { for (var modelDescription in _modelDescriptions) { var kindName = modelDescription.kindName(this); if (_kind2ModelDesc.containsKey(kindName)) { - throw new StateError( - 'Cannot have two ModelDescriptions ' + throw new StateError('Cannot have two ModelDescriptions ' 'with the same kind ($kindName)'); } _kind2ModelDesc[kindName] = modelDescription; @@ -235,16 +232,15 @@ class ModelDBImpl implements ModelDB { } // This constraint should be guaranteed by the Kind() const constructor. - assert ((integerId && !stringId) || (!integerId && stringId)); + assert((integerId && !stringId) || (!integerId && stringId)); _tryLoadNewModelClassFull(classMirror, name, integerId); } } - void _tryLoadNewModelClassFull(mirrors.ClassMirror modelClass, - String name, - bool useIntegerId) { - assert (!_modelDesc2Type.containsKey(modelClass.reflectedType)); + void _tryLoadNewModelClassFull( + mirrors.ClassMirror modelClass, String name, bool useIntegerId) { + assert(!_modelDesc2Type.containsKey(modelClass.reflectedType)); var modelDesc; if (_isExpandoClass(modelClass)) { @@ -287,8 +283,8 @@ class ModelDBImpl implements ModelDB { while (modelClassMirror.superclass != null) { var memberMap = modelClassMirror.instanceMembers; // Loop over all declarations (which includes fields) - modelClassMirror.declarations.forEach((Symbol fieldSymbol, - mirrors.DeclarationMirror decl) { + modelClassMirror.declarations + .forEach((Symbol fieldSymbol, mirrors.DeclarationMirror decl) { // Look if the symbol is a getter and we have metadata attached to it. if (memberMap.containsKey(fieldSymbol) && memberMap[fieldSymbol].isGetter && @@ -397,17 +393,17 @@ class _ModelDescription { _encodeProperty(db, model, mirror, properties, fieldName, prop); }); - return new datastore.Entity( - key, properties, unIndexedProperties: _unIndexedProperties); + return new datastore.Entity(key, properties, + unIndexedProperties: _unIndexedProperties); } _encodeProperty(ModelDBImpl db, Model model, mirrors.InstanceMirror mirror, - Map properties, String fieldName, Property prop) { + Map properties, String fieldName, Property prop) { String propertyName = prop.propertyName; if (propertyName == null) propertyName = fieldName; - var value = mirror.getField( - mirrors.MirrorSystem.getSymbol(fieldName)).reflectee; + var value = + mirror.getField(mirrors.MirrorSystem.getSymbol(fieldName)).reflectee; if (!prop.validate(db, value)) { throw new StateError('Property validation failed for ' 'property $fieldName while trying to serialize entity of kind ' @@ -434,8 +430,7 @@ class _ModelDescription { } _decodeProperty(ModelDBImpl db, datastore.Entity entity, - mirrors.InstanceMirror mirror, String fieldName, - Property prop) { + mirrors.InstanceMirror mirror, String fieldName, Property prop) { String propertyName = fieldNameToPropertyName(fieldName); var rawValue = entity.properties[propertyName]; @@ -459,8 +454,7 @@ class _ModelDescription { } Object encodeField(ModelDBImpl db, String fieldName, Object value, - {bool enforceFieldExists: true, - bool forComparison: false}) { + {bool enforceFieldExists: true, bool forComparison: false}) { Property property = db._propertiesForModel(this)[fieldName]; if (property != null) { return property.encodeValue(db, value, forComparison: forComparison); @@ -539,8 +533,7 @@ class _ExpandoModelDescription extends _ModelDescription { } Object encodeField(ModelDBImpl db, String fieldName, Object value, - {bool enforceFieldExists: true, - bool forComparison: false}) { + {bool enforceFieldExists: true, bool forComparison: false}) { // The [enforceFieldExists] argument is intentionally ignored. Object primitiveValue = super.encodeField(db, fieldName, value, diff --git a/pkgs/gcloud/lib/src/db/models.dart b/pkgs/gcloud/lib/src/db/models.dart index 0419b286..336b1af9 100644 --- a/pkgs/gcloud/lib/src/db/models.dart +++ b/pkgs/gcloud/lib/src/db/models.dart @@ -28,7 +28,9 @@ class Key { } Key.emptyKey(Partition partition) - : _parent = partition, type = null, id = null; + : _parent = partition, + type = null, + id = null; /** * Parent of this [Key]. @@ -57,9 +59,8 @@ class Key { bool get isEmpty => _parent is Partition; - operator==(Object other) { - return - other is Key && + operator ==(Object other) { + return other is Key && _parent == other._parent && type == other.type && id == other.id; @@ -79,8 +80,7 @@ class Partition { Partition(this.namespace) { if (namespace == '') { - throw new ArgumentError( - 'The namespace must not be an empty string'); + throw new ArgumentError('The namespace must not be an empty string'); } } @@ -92,7 +92,7 @@ class Partition { */ Key get emptyKey => new Key.emptyKey(this); - operator==(Object other) { + operator ==(Object other) { return other is Partition && namespace == other.namespace; } diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart index 5f95f884..077ad3f8 100644 --- a/pkgs/gcloud/lib/src/pubsub_impl.dart +++ b/pkgs/gcloud/lib/src/pubsub_impl.dart @@ -11,21 +11,19 @@ class _PubSubImpl implements PubSub { final String _topicPrefix; final String _subscriptionPrefix; - _PubSubImpl(client, project) : - this._client = client, - this.project = project, - _api = new pubsub.PubsubApi(client), - _topicPrefix = 'projects/$project/topics/', - _subscriptionPrefix = 'projects/$project/subscriptions/'; - + _PubSubImpl(client, project) + : this._client = client, + this.project = project, + _api = new pubsub.PubsubApi(client), + _topicPrefix = 'projects/$project/topics/', + _subscriptionPrefix = 'projects/$project/subscriptions/'; String _fullTopicName(String name) { return name.startsWith('projects/') ? name : '${_topicPrefix}$name'; } String _fullSubscriptionName(name) { - return name.startsWith('projects/') ? name - : '${_subscriptionPrefix}$name'; + return name.startsWith('projects/') ? name : '${_subscriptionPrefix}$name'; } Future _createTopic(String name) { @@ -43,18 +41,18 @@ class _PubSubImpl implements PubSub { Future _listTopics( int pageSize, String nextPageToken) { - return _api.projects.topics.list( - 'projects/$project', pageSize: pageSize, pageToken: nextPageToken); + return _api.projects.topics.list('projects/$project', + pageSize: pageSize, pageToken: nextPageToken); } Future _createSubscription( String name, String topic, Uri endpoint) { var subscription = new pubsub.Subscription() - ..name = name - ..topic = topic; + ..name = name + ..topic = topic; if (endpoint != null) { - var pushConfig = - new pubsub.PushConfig()..pushEndpoint = endpoint.toString(); + var pushConfig = new pubsub.PushConfig() + ..pushEndpoint = endpoint.toString(); subscription.pushConfig = pushConfig; } return _api.projects.subscriptions.create(subscription, name); @@ -62,7 +60,8 @@ class _PubSubImpl implements PubSub { Future _deleteSubscription(String name) { // The Pub/Sub delete API returns an instance of Empty. - return _api.projects.subscriptions.delete(_fullSubscriptionName(name)) + return _api.projects.subscriptions + .delete(_fullSubscriptionName(name)) .then((_) => null); } @@ -72,24 +71,25 @@ class _PubSubImpl implements PubSub { Future _listSubscriptions( String topic, int pageSize, String nextPageToken) { - return _api.projects.subscriptions.list( - 'projects/$project', pageSize: pageSize, pageToken: nextPageToken); + return _api.projects.subscriptions.list('projects/$project', + pageSize: pageSize, pageToken: nextPageToken); } Future _modifyPushConfig(String subscription, Uri endpoint) { var pushConfig = new pubsub.PushConfig() - ..pushEndpoint = endpoint != null ? endpoint.toString() : null; - var request = new pubsub.ModifyPushConfigRequest() - ..pushConfig = pushConfig; + ..pushEndpoint = endpoint != null ? endpoint.toString() : null; + var request = new pubsub.ModifyPushConfigRequest()..pushConfig = pushConfig; return _api.projects.subscriptions.modifyPushConfig(request, subscription); } Future _publish( String topic, List message, Map attributes) { var request = new pubsub.PublishRequest() - ..messages = [(new pubsub.PubsubMessage() - ..dataAsBytes = message - ..attributes = attributes)]; + ..messages = [ + (new pubsub.PubsubMessage() + ..dataAsBytes = message + ..attributes = attributes) + ]; // TODO(sgjesse): Handle PublishResponse containing message ids. return _api.projects.topics.publish(request, topic).then((_) => null); } @@ -97,17 +97,17 @@ class _PubSubImpl implements PubSub { Future _pull( String subscription, bool returnImmediately) { var request = new pubsub.PullRequest() - ..maxMessages = 1 - ..returnImmediately = returnImmediately; + ..maxMessages = 1 + ..returnImmediately = returnImmediately; return _api.projects.subscriptions.pull(request, subscription); } Future _ack(String ackId, String subscription) { - var request = new pubsub.AcknowledgeRequest() - ..ackIds = [ ackId ]; + var request = new pubsub.AcknowledgeRequest()..ackIds = [ackId]; // The Pub/Sub acknowledge API returns an instance of Empty. - return _api.projects.subscriptions.acknowledge( - request, subscription).then((_) => null); + return _api.projects.subscriptions + .acknowledge(request, subscription) + .then((_) => null); } void _checkTopicName(name) { @@ -155,8 +155,9 @@ class _PubSubImpl implements PubSub { Stream listTopics() { Future> firstPage(pageSize) { return _listTopics(pageSize, null) - .then((response) => new _TopicPageImpl(this, pageSize, response)); + .then((response) => new _TopicPageImpl(this, pageSize, response)); } + return new StreamFromPages(firstPage).stream; } @@ -166,13 +167,12 @@ class _PubSubImpl implements PubSub { }); } - Future createSubscription( - String name, String topic, {Uri endpoint}) { + Future createSubscription(String name, String topic, + {Uri endpoint}) { _checkSubscriptionName(name); _checkTopicName(topic); - return _createSubscription(_fullSubscriptionName(name), - _fullTopicName(topic), - endpoint) + return _createSubscription( + _fullSubscriptionName(name), _fullTopicName(topic), endpoint) .then((sub) => new _SubscriptionImpl(this, sub)); } @@ -189,10 +189,10 @@ class _PubSubImpl implements PubSub { Stream listSubscriptions([String query]) { Future> firstPage(pageSize) { - return _listSubscriptions(query, pageSize, null) - .then((response) => - new _SubscriptionPageImpl(this, query, pageSize, response)); + return _listSubscriptions(query, pageSize, null).then((response) => + new _SubscriptionPageImpl(this, query, pageSize, response)); } + return new StreamFromPages(firstPage).stream; } @@ -279,8 +279,10 @@ class _PushMessage implements Message { class _PullEventImpl implements PullEvent { /// Pub/Sub API object. final _PubSubImpl _api; + /// Subscription this was received from. final String _subscriptionName; + /// Low level response received from Pub/Sub. final pubsub.PullResponse _response; final Message message; @@ -293,7 +295,6 @@ class _PullEventImpl implements PullEvent { Future acknowledge() { return _api._ack(_response.receivedMessages[0].ackId, _subscriptionName); } - } /// Push event received from Pub/Sub push delivery. @@ -389,19 +390,17 @@ class _SubscriptionImpl implements Subscription { Future delete() => _api._deleteSubscription(_subscription.name); Future pull({bool wait: true}) { - return _api._pull(_subscription.name, !wait) - .then((response) { - // The documentation says 'Returns an empty list if there are no - // messages available in the backlog'. However the receivedMessages - // property can also be null in that case. - if (response.receivedMessages == null || - response.receivedMessages.length == 0) { - return null; - } - return new _PullEventImpl(_api, _subscription.name, response); - }).catchError((e) => null, - test: (e) => e is pubsub.DetailedApiRequestError && - e.status == 400); + return _api._pull(_subscription.name, !wait).then((response) { + // The documentation says 'Returns an empty list if there are no + // messages available in the backlog'. However the receivedMessages + // property can also be null in that case. + if (response.receivedMessages == null || + response.receivedMessages.length == 0) { + return null; + } + return new _PullEventImpl(_api, _subscription.name, response); + }).catchError((e) => null, + test: (e) => e is pubsub.DetailedApiRequestError && e.status == 400); } Uri get endpoint => null; @@ -421,9 +420,7 @@ class _TopicPageImpl implements Page { final String _nextPageToken; final List items; - _TopicPageImpl(this._api, - this._pageSize, - pubsub.ListTopicsResponse response) + _TopicPageImpl(this._api, this._pageSize, pubsub.ListTopicsResponse response) : items = new List(response.topics.length), _nextPageToken = response.nextPageToken { for (int i = 0; i < response.topics.length; i++) { @@ -450,14 +447,11 @@ class _SubscriptionPageImpl implements Page { final String _nextPageToken; final List items; - _SubscriptionPageImpl(this._api, - this._topic, - this._pageSize, - pubsub.ListSubscriptionsResponse response) - : items = new List(response.subscriptions != null - ? response.subscriptions.length - : 0), - _nextPageToken = response.nextPageToken{ + _SubscriptionPageImpl(this._api, this._topic, this._pageSize, + pubsub.ListSubscriptionsResponse response) + : items = new List( + response.subscriptions != null ? response.subscriptions.length : 0), + _nextPageToken = response.nextPageToken { if (response.subscriptions != null) { for (int i = 0; i < response.subscriptions.length; i++) { items[i] = new _SubscriptionImpl(_api, response.subscriptions[i]); @@ -471,9 +465,10 @@ class _SubscriptionPageImpl implements Page { if (_nextPageToken == null) return new Future.value(null); if (pageSize == null) pageSize = this._pageSize; - return _api._listSubscriptions( - _topic, pageSize, _nextPageToken).then((response) { + return _api + ._listSubscriptions(_topic, pageSize, _nextPageToken) + .then((response) { return new _SubscriptionPageImpl(_api, _topic, pageSize, response); }); } -} \ No newline at end of file +} diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index 0be49fe9..d6ea7f79 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -16,16 +16,16 @@ class _AbsoluteName { _AbsoluteName.parse(String absoluteName) { if (!absoluteName.startsWith(_ABSOLUTE_PREFIX)) { throw new FormatException("Absolute name '$absoluteName' does not start " - "with '$_ABSOLUTE_PREFIX'"); + "with '$_ABSOLUTE_PREFIX'"); } int index = absoluteName.indexOf('/', _ABSOLUTE_PREFIX.length); if (index == -1 || index == _ABSOLUTE_PREFIX.length) { throw new FormatException("Absolute name '$absoluteName' does not have " - "a bucket name"); + "a bucket name"); } if (index == absoluteName.length - 1) { throw new FormatException("Absolute name '$absoluteName' does not have " - "an object name"); + "an object name"); } bucketName = absoluteName.substring(_ABSOLUTE_PREFIX.length, index); objectName = absoluteName.substring(index + 1); @@ -41,15 +41,14 @@ class _StorageImpl implements Storage { : _api = new storage_api.StorageApi(client); Future createBucket(String bucketName, - {PredefinedAcl predefinedAcl, Acl acl}) { + {PredefinedAcl predefinedAcl, Acl acl}) { var bucket = new storage_api.Bucket()..name = bucketName; var predefinedName = predefinedAcl != null ? predefinedAcl._name : null; if (acl != null) { bucket.acl = acl._toBucketAccessControlList(); } - return _api.buckets.insert(bucket, - project, - predefinedAcl: predefinedName) + return _api.buckets + .insert(bucket, project, predefinedAcl: predefinedName) .then((bucket) => null); } @@ -58,8 +57,7 @@ class _StorageImpl implements Storage { } Bucket bucket(String bucketName, - {PredefinedAcl defaultPredefinedObjectAcl, - Acl defaultObjectAcl}) { + {PredefinedAcl defaultPredefinedObjectAcl, Acl defaultObjectAcl}) { return new _BucketImpl( this, bucketName, defaultPredefinedObjectAcl, defaultObjectAcl); } @@ -69,14 +67,15 @@ class _StorageImpl implements Storage { return e is storage_api.DetailedApiRequestError && e.status == 404; } - return _api.buckets.get(bucketName) + return _api.buckets + .get(bucketName) .then((_) => true) .catchError((e) => false, test: notFoundError); - } Future bucketInfo(String bucketName) { - return _api.buckets.get(bucketName, projection: 'full') + return _api.buckets + .get(bucketName, projection: 'full') .then((bucket) => new _BucketInfoImpl(bucket)); } @@ -85,6 +84,7 @@ class _StorageImpl implements Storage { return _listBuckets(pageSize, null) .then((response) => new _BucketPageImpl(this, pageSize, response)); } + return new StreamFromPages(firstPage).stream; } @@ -97,17 +97,15 @@ class _StorageImpl implements Storage { Future copyObject(String src, String dest) { var srcName = new _AbsoluteName.parse(src); var destName = new _AbsoluteName.parse(dest); - return _api.objects.copy(null, - srcName.bucketName, srcName.objectName, - destName.bucketName, destName.objectName) + return _api.objects + .copy(null, srcName.bucketName, srcName.objectName, destName.bucketName, + destName.objectName) .then((_) => null); } Future _listBuckets(int pageSize, String nextPageToken) { - return _api.buckets.list( - project, - maxResults: pageSize, - pageToken: nextPageToken); + return _api.buckets + .list(project, maxResults: pageSize, pageToken: nextPageToken); } } @@ -134,20 +132,20 @@ class _BucketImpl implements Bucket { Acl _defaultObjectAcl; final String bucketName; - _BucketImpl(_StorageImpl storage, - this.bucketName, - this._defaultPredefinedObjectAcl, - this._defaultObjectAcl) : - this._api = storage._api; + _BucketImpl(_StorageImpl storage, this.bucketName, + this._defaultPredefinedObjectAcl, this._defaultObjectAcl) + : this._api = storage._api; String absoluteObjectName(String objectName) { return '${_ABSOLUTE_PREFIX}$bucketName/$objectName'; } - StreamSink> write( - String objectName, - {int length, ObjectMetadata metadata, - Acl acl, PredefinedAcl predefinedAcl, String contentType}) { + StreamSink> write(String objectName, + {int length, + ObjectMetadata metadata, + Acl acl, + PredefinedAcl predefinedAcl, + String contentType}) { storage_api.Object object; if (metadata == null) { metadata = new _ObjectMetadata(acl: acl, contentType: contentType); @@ -156,7 +154,7 @@ class _BucketImpl implements Bucket { metadata = metadata.replace(acl: acl); } if (contentType != null) { - metadata = metadata.replace(contentType: contentType); + metadata = metadata.replace(contentType: contentType); } } _ObjectMetadata objectMetadata = metadata; @@ -183,13 +181,17 @@ class _BucketImpl implements Bucket { return sink; } - Future writeBytes( - String objectName, List bytes, + Future writeBytes(String objectName, List bytes, {ObjectMetadata metadata, - Acl acl, PredefinedAcl predefinedAcl, String contentType}) { - var sink = write(objectName, length: bytes.length, - metadata: metadata, acl: acl, predefinedAcl: predefinedAcl, - contentType: contentType); + Acl acl, + PredefinedAcl predefinedAcl, + String contentType}) { + var sink = write(objectName, + length: bytes.length, + metadata: metadata, + acl: acl, + predefinedAcl: predefinedAcl, + contentType: contentType); sink.add(bytes); return sink.close(); } @@ -208,8 +210,8 @@ class _BucketImpl implements Bucket { if (length != null) { if (length <= 0) { - throw new ArgumentError.value(length, 'length', - 'If provided, length must greater than zero.'); + throw new ArgumentError.value( + length, 'length', 'If provided, length must greater than zero.'); } // For ByteRange, end is *inclusive*. var end = offset + length - 1; @@ -218,14 +220,15 @@ class _BucketImpl implements Bucket { options = new storage_api.PartialDownloadOptions(range); } - var media = await _api.objects.get(bucketName, objectName, - downloadOptions: options); + var media = await _api.objects + .get(bucketName, objectName, downloadOptions: options); yield* media.stream; } Future info(String objectName) { - return _api.objects.get(bucketName, objectName, projection: 'full') + return _api.objects + .get(bucketName, objectName, projection: 'full') .then((object) => new _ObjectInfoImpl(object)); } @@ -236,18 +239,19 @@ class _BucketImpl implements Bucket { Stream list({String prefix}) { Future> firstPage(pageSize) { return _listObjects(bucketName, prefix, _DIRECTORY_DELIMITER, 50, null) - .then((response) => new _ObjectPageImpl( - this, prefix, pageSize, response)); + .then((response) => + new _ObjectPageImpl(this, prefix, pageSize, response)); } + return new StreamFromPages(firstPage).stream; } Future> page({String prefix, int pageSize: 50}) { return _listObjects( - bucketName, prefix, _DIRECTORY_DELIMITER, pageSize, null) + bucketName, prefix, _DIRECTORY_DELIMITER, pageSize, null) .then((response) { - return new _ObjectPageImpl(this, prefix, pageSize, response); - }); + return new _ObjectPageImpl(this, prefix, pageSize, response); + }); } Future updateMetadata(String objectName, ObjectMetadata metadata) { @@ -266,11 +270,9 @@ class _BucketImpl implements Bucket { return _api.objects.update(object, bucketName, objectName); } - Future _listObjects( - String bucketName, String prefix, String delimiter, - int pageSize, String nextPageToken) { - return _api.objects.list( - bucketName, + Future _listObjects(String bucketName, String prefix, + String delimiter, int pageSize, String nextPageToken) { + return _api.objects.list(bucketName, prefix: prefix, delimiter: delimiter, maxResults: pageSize, @@ -312,10 +314,8 @@ class _ObjectPageImpl implements Page { final List items; _ObjectPageImpl( - this._bucket, this._prefix, this._pageSize, - storage_api.Objects response) - : items = new List( - (response.items != null ? response.items.length : 0) + + this._bucket, this._prefix, this._pageSize, storage_api.Objects response) + : items = new List((response.items != null ? response.items.length : 0) + (response.prefixes != null ? response.prefixes.length : 0)), _nextPageToken = response.nextPageToken { var prefixes = 0; @@ -338,14 +338,11 @@ class _ObjectPageImpl implements Page { if (isLast) return new Future.value(null); if (pageSize == null) pageSize = this._pageSize; - return _bucket._listObjects( - _bucket.bucketName, - _prefix, - _DIRECTORY_DELIMITER, - pageSize, - _nextPageToken).then((response) { - return new _ObjectPageImpl( - _bucket, _prefix, pageSize, response); + return _bucket + ._listObjects(_bucket.bucketName, _prefix, _DIRECTORY_DELIMITER, + pageSize, _nextPageToken) + .then((response) { + return new _ObjectPageImpl(_bucket, _prefix, pageSize, response); }); } } @@ -363,20 +360,21 @@ class _ObjectInfoImpl implements ObjectInfo { Uri _downloadLink; ObjectGeneration _generation; - _ObjectInfoImpl(storage_api.Object object) : - _object = object, _metadata = new _ObjectMetadata._(object); + _ObjectInfoImpl(storage_api.Object object) + : _object = object, + _metadata = new _ObjectMetadata._(object); String get name => _object.name; int get length => int.parse(_object.size); - DateTime get updated => _object.updated; + DateTime get updated => _object.updated; String get etag => _object.etag; List get md5Hash => BASE64.decode(_object.md5Hash); - int get crc32CChecksum { + int get crc32CChecksum { var list = BASE64.decode(_object.crc32c); return (list[3] << 24) | (list[2] << 16) | (list[1] << 8) | list[0]; } @@ -406,13 +404,14 @@ class _ObjectMetadata implements ObjectMetadata { ObjectGeneration _cachedGeneration; Map _cachedCustom; - _ObjectMetadata({Acl acl, - String contentType, - String contentEncoding, - String cacheControl, - String contentDisposition, - String contentLanguage, - Map custom}) + _ObjectMetadata( + {Acl acl, + String contentType, + String contentEncoding, + String cacheControl, + String contentDisposition, + String contentLanguage, + Map custom}) : _object = new storage_api.Object() { _object.acl = acl != null ? acl._toObjectAccessControlList() : null; _object.contentType = contentType; @@ -458,23 +457,25 @@ class _ObjectMetadata implements ObjectMetadata { return _cachedCustom; } - ObjectMetadata replace({Acl acl, - String contentType, - String contentEncoding, - String cacheControl, - String contentDisposition, - String contentLanguage, - Map custom}) { + ObjectMetadata replace( + {Acl acl, + String contentType, + String contentEncoding, + String cacheControl, + String contentDisposition, + String contentLanguage, + Map custom}) { return new _ObjectMetadata( acl: acl != null ? acl : this.acl, contentType: contentType != null ? contentType : this.contentType, - contentEncoding: contentEncoding != null ? contentEncoding - : this.contentEncoding, + contentEncoding: + contentEncoding != null ? contentEncoding : this.contentEncoding, cacheControl: cacheControl != null ? cacheControl : this.cacheControl, - contentDisposition: contentDisposition != null ? contentDisposition - : this.contentEncoding, - contentLanguage: contentLanguage != null ? contentLanguage - : this.contentEncoding, + contentDisposition: contentDisposition != null + ? contentDisposition + : this.contentEncoding, + contentLanguage: + contentLanguage != null ? contentLanguage : this.contentEncoding, custom: custom != null ? new Map.from(custom) : this.custom); } } @@ -503,9 +504,8 @@ class _MediaUploadStreamSink implements StreamSink> { static const int _STATE_DECIDED_RESUMABLE = 2; int _state; - _MediaUploadStreamSink( - this._api, this._bucketName, this._objectName, this._object, - this._predefinedAcl, this._length, + _MediaUploadStreamSink(this._api, this._bucketName, this._objectName, + this._object, this._predefinedAcl, this._length, [this._maxNormalUploadLength = _DEFAULT_MAX_NORMAL_UPLOAD_LENGTH]) { if (_length != null) { // If the length is known in advance decide on the upload strategy @@ -520,8 +520,8 @@ class _MediaUploadStreamSink implements StreamSink> { _state = _STATE_PROBING_LENGTH; // If the length is not known in advance decide on the upload strategy // later. Start buffering until enough data has been read to decide. - _subscription = _controller.stream.listen( - _onData, onDone: _onDone, onError: _onError); + _subscription = _controller.stream + .listen(_onData, onDone: _onDone, onError: _onError); } } @@ -593,31 +593,33 @@ class _MediaUploadStreamSink implements StreamSink> { void _startNormalUpload(Stream stream, int length) { var contentType = _object.contentType != null - ? _object.contentType : 'application/octet-stream'; + ? _object.contentType + : 'application/octet-stream'; var media = new storage_api.Media(stream, length, contentType: contentType); - _api.objects.insert(_object, - _bucketName, - name: _objectName, - predefinedAcl: _predefinedAcl, - uploadMedia: media, - uploadOptions: storage_api.UploadOptions.Default) + _api.objects + .insert(_object, _bucketName, + name: _objectName, + predefinedAcl: _predefinedAcl, + uploadMedia: media, + uploadOptions: storage_api.UploadOptions.Default) .then((response) { - _doneCompleter.complete(new _ObjectInfoImpl(response)); - }, onError: _completeError); + _doneCompleter.complete(new _ObjectInfoImpl(response)); + }, onError: _completeError); } void _startResumableUpload(Stream stream, int length) { var contentType = _object.contentType != null - ? _object.contentType : 'application/octet-stream'; + ? _object.contentType + : 'application/octet-stream'; var media = new storage_api.Media(stream, length, contentType: contentType); - _api.objects.insert(_object, - _bucketName, - name: _objectName, - predefinedAcl: _predefinedAcl, - uploadMedia: media, - uploadOptions: storage_api.UploadOptions.Resumable) + _api.objects + .insert(_object, _bucketName, + name: _objectName, + predefinedAcl: _predefinedAcl, + uploadMedia: media, + uploadOptions: storage_api.UploadOptions.Resumable) .then((response) { - _doneCompleter.complete(new _ObjectInfoImpl(response)); - }, onError: _completeError); + _doneCompleter.complete(new _ObjectInfoImpl(response)); + }, onError: _completeError); } } diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index c525b9f6..dc9fec4d 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -122,7 +122,7 @@ class Acl { if (bucket.acl != null) { for (int i = 0; i < bucket.acl.length; i++) { _entries[i] = new AclEntry(_aclScopeFromEntity(bucket.acl[i].entity), - _aclPermissionFromRole(bucket.acl[i].role)); + _aclPermissionFromRole(bucket.acl[i].role)); } } } @@ -132,7 +132,7 @@ class Acl { if (object.acl != null) { for (int i = 0; i < object.acl.length; i++) { _entries[i] = new AclEntry(_aclScopeFromEntity(object.acl[i].entity), - _aclPermissionFromRole(object.acl[i].role)); + _aclPermissionFromRole(object.acl[i].role)); } } } @@ -158,8 +158,8 @@ class Acl { String tmp = entity.substring(8); int dash = tmp.indexOf('-'); if (dash != -1) { - return new ProjectScope(tmp.substring(dash + 1), - tmp.substring(0, dash)); + return new ProjectScope( + tmp.substring(dash + 1), tmp.substring(0, dash)); } } return new OpaqueScope(entity); @@ -187,7 +187,7 @@ class Acl { : _cachedHashCode = _jenkinsHash(_entries); } - bool operator==(Object other) { + bool operator ==(Object other) { if (other is Acl) { List entries = _entries; List otherEntries = other._entries; @@ -234,10 +234,10 @@ class AclEntry { : _cachedHashCode = _jenkinsHash([scope, permission]); } - bool operator==(Object other) { + bool operator ==(Object other) { return other is AclEntry && - scope == other.scope && - permission == other.permission; + scope == other.scope && + permission == other.permission; } String toString() => 'AclEntry($scope, $permission)'; @@ -303,7 +303,7 @@ abstract class AclScope { : _cachedHashCode = _jenkinsHash([_type, _id]); } - bool operator==(Object other) { + bool operator ==(Object other) { return other is AclScope && _type == other._type && _id == other._id; } @@ -328,7 +328,7 @@ class StorageIdScope extends AclScope { /// An ACL scope for an entity identified by an individual email address. class AccountScope extends AclScope { - AccountScope(String email): super._(AclScope._TYPE_ACCOUNT, email); + AccountScope(String email) : super._(AclScope._TYPE_ACCOUNT, email); /// Email address. String get email => _id; @@ -338,7 +338,7 @@ class AccountScope extends AclScope { /// An ACL scope for an entity identified by an Google Groups email. class GroupScope extends AclScope { - GroupScope(String group): super._(AclScope._TYPE_GROUP, group); + GroupScope(String group) : super._(AclScope._TYPE_GROUP, group); /// Group name. String get group => _id; @@ -348,7 +348,7 @@ class GroupScope extends AclScope { /// An ACL scope for an entity identified by a domain name. class DomainScope extends AclScope { - DomainScope(String domain): super._(AclScope._TYPE_DOMAIN, domain); + DomainScope(String domain) : super._(AclScope._TYPE_DOMAIN, domain); /// Domain name. String get domain => _id; @@ -381,15 +381,14 @@ class OpaqueScope extends AclScope { /// ACL scope for a all authenticated users. class AllAuthenticatedScope extends AclScope { - AllAuthenticatedScope() - : super._(AclScope._TYPE_ALL_AUTHENTICATED, null); + AllAuthenticatedScope() : super._(AclScope._TYPE_ALL_AUTHENTICATED, null); String get _storageEntity => 'allAuthenticatedUsers'; } /// ACL scope for a all users. class AllUsersScope extends AclScope { - AllUsersScope(): super._(AclScope._TYPE_ALL_USERS, null); + AllUsersScope() : super._(AclScope._TYPE_ALL_USERS, null); String get _storageEntity => 'allUsers'; } @@ -419,7 +418,7 @@ class AclPermission { int get hashCode => _id.hashCode; - bool operator==(Object other) { + bool operator ==(Object other) { return other is AclPermission && _id == other._id; } @@ -495,8 +494,9 @@ abstract class BucketInfo { /// Access to Cloud Storage abstract class Storage { /// List of required OAuth2 scopes for Cloud Storage operation. - static const List SCOPES = - const [storage_api.StorageApi.DevstorageFullControlScope]; + static const List SCOPES = const [ + storage_api.StorageApi.DevstorageFullControlScope + ]; /// Initializes access to cloud storage. factory Storage(http.Client client, String project) = _StorageImpl; @@ -511,7 +511,7 @@ abstract class Storage { /// /// Returns a [Future] which completes when the bucket has been created. Future createBucket(String bucketName, - {PredefinedAcl predefinedAcl, Acl acl}); + {PredefinedAcl predefinedAcl, Acl acl}); /// Delete a cloud storage bucket. /// @@ -539,8 +539,7 @@ abstract class Storage { /// /// Returns a `Bucket` instance. Bucket bucket(String bucketName, - {PredefinedAcl defaultPredefinedObjectAcl, - Acl defaultObjectAcl}); + {PredefinedAcl defaultPredefinedObjectAcl, Acl defaultObjectAcl}); /// Check whether a cloud storage bucket exists. /// @@ -629,9 +628,15 @@ class ObjectGeneration { /// Access to object metadata. abstract class ObjectMetadata { - factory ObjectMetadata({Acl acl, String contentType, String contentEncoding, - String cacheControl, String contentDisposition, String contentLanguage, + factory ObjectMetadata( + {Acl acl, + String contentType, + String contentEncoding, + String cacheControl, + String contentDisposition, + String contentLanguage, Map custom}) = _ObjectMetadata; + /// ACL. Acl get acl; @@ -658,8 +663,13 @@ abstract class ObjectMetadata { /// Create a copy of this object with some values replaced. /// // TODO: This cannot be used to set values to null. - ObjectMetadata replace({Acl acl, String contentType, String contentEncoding, - String cacheControl, String contentDisposition, String contentLanguage, + ObjectMetadata replace( + {Acl acl, + String contentType, + String contentEncoding, + String cacheControl, + String contentDisposition, + String contentLanguage, Map custom}); } @@ -720,8 +730,11 @@ abstract class Bucket { /// The object content has been written the `StreamSink` completes with /// an `ObjectInfo` instance with the information on the object created. StreamSink> write(String objectName, - {int length, ObjectMetadata metadata, - Acl acl, PredefinedAcl predefinedAcl, String contentType}); + {int length, + ObjectMetadata metadata, + Acl acl, + PredefinedAcl predefinedAcl, + String contentType}); /// Create an new object in the bucket with specified content. /// @@ -733,7 +746,9 @@ abstract class Bucket { /// the object is written. Future writeBytes(String name, List bytes, {ObjectMetadata metadata, - Acl acl, PredefinedAcl predefinedAcl, String contentType}); + Acl acl, + PredefinedAcl predefinedAcl, + String contentType}); /// Read object content as byte stream. /// diff --git a/pkgs/gcloud/test/common.dart b/pkgs/gcloud/test/common.dart index d9bcb277..4cf3bbd8 100644 --- a/pkgs/gcloud/test/common.dart +++ b/pkgs/gcloud/test/common.dart @@ -13,9 +13,7 @@ import 'package:unittest/unittest.dart'; const CONTENT_TYPE_JSON_UTF8 = 'application/json; charset=utf-8'; -const RESPONSE_HEADERS = const { - 'content-type': CONTENT_TYPE_JSON_UTF8 -}; +const RESPONSE_HEADERS = const {'content-type': CONTENT_TYPE_JSON_UTF8}; class MockClient extends http.BaseClient { static const bytes = const [1, 2, 3, 4, 5]; @@ -29,15 +27,15 @@ class MockClient extends http.BaseClient { Map> mocks = {}; http_testing.MockClient client; - MockClient(String hostname, String rootPath) : - hostname = hostname, - rootPath = rootPath, - rootUri = Uri.parse('https://$hostname$rootPath') { + MockClient(String hostname, String rootPath) + : hostname = hostname, + rootPath = rootPath, + rootUri = Uri.parse('https://$hostname$rootPath') { client = new http_testing.MockClient(handler); } - void register(String method, Pattern path, - http_testing.MockClientHandler handler) { + void register( + String method, Pattern path, http_testing.MockClientHandler handler) { var map = mocks.putIfAbsent(method, () => new Map()); if (path is RegExp) { map[new RegExp('$rootPath${path.pattern}')] = handler; @@ -46,14 +44,14 @@ class MockClient extends http.BaseClient { } } - void registerUpload(String method, Pattern path, - http_testing.MockClientHandler handler) { + void registerUpload( + String method, Pattern path, http_testing.MockClientHandler handler) { var map = mocks.putIfAbsent(method, () => new Map()); map['/upload$rootPath$path'] = handler; } - void registerResumableUpload(String method, Pattern path, - http_testing.MockClientHandler handler) { + void registerResumableUpload( + String method, Pattern path, http_testing.MockClientHandler handler) { var map = mocks.putIfAbsent(method, () => new Map()); map['/resumable/upload$rootPath$path'] = handler; } @@ -87,11 +85,9 @@ class MockClient extends http.BaseClient { } Future respond(response) { - return new Future.value( - new http.Response( - JSON.encode(response.toJson()), - 200, - headers: RESPONSE_HEADERS)); + return new Future.value(new http.Response( + JSON.encode(response.toJson()), 200, + headers: RESPONSE_HEADERS)); } Future respondEmpty() { @@ -101,13 +97,11 @@ class MockClient extends http.BaseClient { Future respondInitiateResumableUpload(project) { Map headers = new Map.from(RESPONSE_HEADERS); - headers['location'] = - 'https://www.googleapis.com/resumable/upload$rootPath' + headers['location'] = 'https://www.googleapis.com/resumable/upload$rootPath' 'b/$project/o?uploadType=resumable&alt=json&' 'upload_id=AEnB2UqucpaWy7d5cr5iVQzmbQcQlLDIKiClrm0SAX3rJ7UN' 'Mu5bEoC9b4teJcJUKpqceCUeqKzuoP_jz2ps_dV0P0nT8OTuZQ'; - return new Future.value( - new http.Response('', 200, headers: headers)); + return new Future.value(new http.Response('', 200, headers: headers)); } Future respondContinueResumableUpload() { @@ -138,54 +132,51 @@ class MockClient extends http.BaseClient { Future respondError(statusCode) { var error = { - 'error': { - 'code': statusCode, - 'message': 'error' - } + 'error': {'code': statusCode, 'message': 'error'} }; - return new Future.value( - new http.Response( - JSON.encode(error), statusCode, headers: RESPONSE_HEADERS)); + return new Future.value(new http.Response(JSON.encode(error), statusCode, + headers: RESPONSE_HEADERS)); } Future processNormalMediaUpload(http.Request request) { var completer = new Completer(); - var contentType = new http_parser.MediaType.parse( - request.headers['content-type']); + var contentType = + new http_parser.MediaType.parse(request.headers['content-type']); expect(contentType.mimeType, 'multipart/related'); var boundary = contentType.parameters['boundary']; var partCount = 0; var json; - new Stream.fromIterable([request.bodyBytes, [13, 10]]) + new Stream.fromIterable([ + request.bodyBytes, + [13, 10] + ]) .transform(new mime.MimeMultipartTransformer(boundary)) - .listen( - ((mime.MimeMultipart mimeMultipart) { - var contentType = mimeMultipart.headers['content-type']; - partCount++; - if (partCount == 1) { - // First part in the object JSON. - expect(contentType, 'application/json; charset=utf-8'); - mimeMultipart - .transform(UTF8.decoder) - .fold('', (p, e) => '$p$e') - .then((j) => json = j); - } else if (partCount == 2) { - // Second part is the base64 encoded bytes. - mimeMultipart - .transform(ASCII.decoder) - .fold('', (p, e) => '$p$e') - .then(BASE64.decode) - .then((bytes) { - completer.complete( - new NormalMediaUpload(json, bytes, contentType)); - }); - } else { - // Exactly two parts expected. - throw 'Unexpected part count'; - } - })); + .listen(((mime.MimeMultipart mimeMultipart) { + var contentType = mimeMultipart.headers['content-type']; + partCount++; + if (partCount == 1) { + // First part in the object JSON. + expect(contentType, 'application/json; charset=utf-8'); + mimeMultipart + .transform(UTF8.decoder) + .fold('', (p, e) => '$p$e') + .then((j) => json = j); + } else if (partCount == 2) { + // Second part is the base64 encoded bytes. + mimeMultipart + .transform(ASCII.decoder) + .fold('', (p, e) => '$p$e') + .then(BASE64.decode) + .then((bytes) { + completer.complete(new NormalMediaUpload(json, bytes, contentType)); + }); + } else { + // Exactly two parts expected. + throw 'Unexpected part count'; + } + })); return completer.future; } @@ -219,10 +210,8 @@ class TraceClient extends http.BaseClient { print(UTF8.decode(body)); print('--- END RESPONSE ---'); return new http.StreamedResponse( - new http.ByteStream.fromBytes(body), - rr.statusCode, + new http.ByteStream.fromBytes(body), rr.statusCode, headers: rr.headers); - }); }); }); @@ -237,8 +226,7 @@ class TraceClient extends http.BaseClient { class RequestImpl extends http.BaseRequest { final List _body; - RequestImpl(String method, Uri url, this._body) - : super(method, url); + RequestImpl(String method, Uri url, this._body) : super(method, url); http.ByteStream finalize() { super.finalize(); diff --git a/pkgs/gcloud/test/common_e2e.dart b/pkgs/gcloud/test/common_e2e.dart index bbc56f18..c3ab8d8c 100644 --- a/pkgs/gcloud/test/common_e2e.dart +++ b/pkgs/gcloud/test/common_e2e.dart @@ -36,7 +36,6 @@ const String DEFAULT_KEY_LOCATION = // attempt to account for that. const STORAGE_LIST_DELAY = const Duration(seconds: 5); - bool onBot() { // When running on the package-bot the current user is chrome-bot. var envName; @@ -72,9 +71,8 @@ Future serviceKeyJson(String serviceKeyLocation) { typedef Future AuthCallback(String project, http.Client client); -Future withAuthClient(List scopes, - AuthCallback callback, - {bool trace: false}) { +Future withAuthClient(List scopes, AuthCallback callback, + {bool trace: false}) { String project = Platform.environment[PROJECT_ENV]; String serviceKeyLocation = Platform.environment[SERVICE_KEY_LOCATION_ENV]; @@ -114,7 +112,8 @@ class E2EConfiguration extends SimpleConfiguration { onDone(success) { new Future.sync(() { super.onDone(success); - }).then((_) => _completer.complete(_)) - .catchError((error, stack) => _completer.completeError(error, stack)); + }) + .then((_) => _completer.complete(_)) + .catchError((error, stack) => _completer.completeError(error, stack)); } } diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index 8704adf9..ed99a246 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -26,7 +26,6 @@ library datastore_test; /// 02:19 PM Host: appengine.google.com /// 02:19 PM Uploading index definitions. - import 'dart:async'; import 'package:gcloud/datastore.dart'; @@ -56,37 +55,40 @@ runTests(Datastore datastore, String namespace) { return datastore.beginTransaction(crossEntityGroup: xg).then(f); } - Future> insert(List entities, - List autoIdEntities, - {bool transactional: true}) { + Future> insert(List entities, List autoIdEntities, + {bool transactional: true}) { if (transactional) { return withTransaction((Transaction transaction) { - return datastore.commit(inserts: entities, - autoIdInserts: autoIdEntities, - transaction: transaction).then((result) { + return datastore + .commit( + inserts: entities, + autoIdInserts: autoIdEntities, + transaction: transaction) + .then((result) { if (autoIdEntities != null && autoIdEntities.length > 0) { - expect(result.autoIdInsertKeys.length, - equals(autoIdEntities.length)); + expect( + result.autoIdInsertKeys.length, equals(autoIdEntities.length)); } return result.autoIdInsertKeys; }); }, xg: true); } else { - return datastore.commit(inserts: entities, autoIdInserts: autoIdEntities) + return datastore + .commit(inserts: entities, autoIdInserts: autoIdEntities) .then((result) { - if (autoIdEntities != null && autoIdEntities.length > 0) { - expect(result.autoIdInsertKeys.length, - equals(autoIdEntities.length)); - } - return result.autoIdInsertKeys; - }); + if (autoIdEntities != null && autoIdEntities.length > 0) { + expect(result.autoIdInsertKeys.length, equals(autoIdEntities.length)); + } + return result.autoIdInsertKeys; + }); } } Future delete(List keys, {bool transactional: true}) { if (transactional) { return withTransaction((Transaction t) { - return datastore.commit(deletes: keys, transaction: t) + return datastore + .commit(deletes: keys, transaction: t) .then((result) => null); }, xg: true); } else { @@ -166,8 +168,8 @@ runTests(Datastore datastore, String namespace) { Future> testInsert(List entities, {bool transactional: false, bool xg: false, bool unnamed: true}) { Future> test(Transaction transaction) { - return datastore.commit(autoIdInserts: entities, - transaction: transaction) + return datastore + .commit(autoIdInserts: entities, transaction: transaction) .then((CommitResult result) { expect(result.autoIdInsertKeys.length, equals(entities.length)); @@ -175,8 +177,8 @@ runTests(Datastore datastore, String namespace) { var key = result.autoIdInsertKeys[i]; expect(isValidKey(key), isTrue); if (unnamed) { - expect(compareKey(key, entities[i].key, ignoreIds: true), - isTrue); + expect( + compareKey(key, entities[i].key, ignoreIds: true), isTrue); } else { expect(compareKey(key, entities[i].key), isTrue); } @@ -194,9 +196,10 @@ runTests(Datastore datastore, String namespace) { Future> testInsertNegative(List entities, {bool transactional: false, bool xg: false}) { test(Transaction transaction) { - expect(datastore.commit(autoIdInserts: entities, - transaction: transaction), - throwsA(isApplicationError)); + expect( + datastore.commit( + autoIdInserts: entities, transaction: transaction), + throwsA(isApplicationError)); } if (transactional) { @@ -208,9 +211,8 @@ runTests(Datastore datastore, String namespace) { var unnamedEntities1 = buildEntities(42, 43, partition: partition); var unnamedEntities5 = buildEntities(1, 6, partition: partition); var unnamedEntities26 = buildEntities(6, 32, partition: partition); - var named20000 = buildEntities( - 1000, 21001, idFunction: (i) => 'named_${i}_of_10000', - partition: partition); + var named20000 = buildEntities(1000, 21001, + idFunction: (i) => 'named_${i}_of_10000', partition: partition); test('insert', () { return testInsert(unnamedEntities5, transactional: false).then((keys) { @@ -233,8 +235,8 @@ runTests(Datastore datastore, String namespace) { }); test('insert_transactional_xg', () { - return testInsert( - unnamedEntities5, transactional: true, xg: true).then((keys) { + return testInsert(unnamedEntities5, transactional: true, xg: true) + .then((keys) { return delete(keys).then((_) { return lookup(keys).then((List entities) { entities.forEach((Entity e) => expect(e, isNull)); @@ -245,19 +247,18 @@ runTests(Datastore datastore, String namespace) { test('negative_insert__incomplete_path', () { expect(() => datastore.commit(inserts: unnamedEntities1), - throwsA(isApplicationError)); + throwsA(isApplicationError)); }); test('negative_insert_transactional_xg', () { - return testInsertNegative( - unnamedEntities26, transactional: true, xg: true); + return testInsertNegative(unnamedEntities26, + transactional: true, xg: true); }); test('negative_insert_20000_entities', () { // Maybe it should not be a [DataStoreError] here? // FIXME/TODO: This was adapted - expect(datastore.commit(inserts: named20000), - throws); + expect(datastore.commit(inserts: named20000), throws); }); // TODO: test invalid inserts (like entities without key, ...) @@ -272,15 +273,15 @@ runTests(Datastore datastore, String namespace) { var completedKey = completedKeys[i]; expect(completedKey.elements.length, - equals(insertedKey.elements.length)); + equals(insertedKey.elements.length)); for (int j = 0; j < insertedKey.elements.length - 1; j++) { expect(completedKey.elements[j], equals(insertedKey.elements[j])); } for (int j = insertedKey.elements.length - 1; - j < insertedKey.elements.length; - j++) { + j < insertedKey.elements.length; + j++) { expect(completedKey.elements[j].kind, - equals(insertedKey.elements[j].kind)); + equals(insertedKey.elements[j].kind)); expect(completedKey.elements[j].id, isNotNull); expect(completedKey.elements[j].id, isInt); } @@ -297,22 +298,21 @@ runTests(Datastore datastore, String namespace) { }); group('lookup', () { - Future testLookup(List keysToLookup, - List entitiesToLookup, - {bool transactional: false, - bool xg: false, - bool negative: false, - bool named: false}) { + Future testLookup(List keysToLookup, List entitiesToLookup, + {bool transactional: false, + bool xg: false, + bool negative: false, + bool named: false}) { expect(keysToLookup.length, equals(entitiesToLookup.length)); for (var i = 0; i < keysToLookup.length; i++) { - expect(compareKey(keysToLookup[i], - entitiesToLookup[i].key, - ignoreIds: !named), isTrue); + expect( + compareKey(keysToLookup[i], entitiesToLookup[i].key, + ignoreIds: !named), + isTrue); } Future test(Transaction transaction) { - return datastore.lookup(keysToLookup) - .then((List entities) { + return datastore.lookup(keysToLookup).then((List entities) { expect(entities.length, equals(keysToLookup.length)); if (negative) { for (int i = 0; i < entities.length; i++) { @@ -321,14 +321,16 @@ runTests(Datastore datastore, String namespace) { } else { for (var i = 0; i < entities.length; i++) { expect(compareKey(entities[i].key, keysToLookup[i]), isTrue); - expect(compareEntity(entities[i], - entitiesToLookup[i], - ignoreIds: !named), isTrue); + expect( + compareEntity(entities[i], entitiesToLookup[i], + ignoreIds: !named), + isTrue); } } if (transaction != null) { - return - datastore.commit(transaction: transaction).then((_) => null); + return datastore + .commit(transaction: transaction) + .then((_) => null); } }); } @@ -368,15 +370,15 @@ runTests(Datastore datastore, String namespace) { return insert([], unnamedEntities1).then((keys) { keys.forEach((key) => expect(isValidKey(key), isTrue)); return testLookup(keys, unnamedEntities1, transactional: true) - .then((_) => delete(keys)); + .then((_) => delete(keys)); }); }); test('lookup_transactional_xg', () { return insert([], unnamedEntities5).then((keys) { keys.forEach((key) => expect(isValidKey(key), isTrue)); - return testLookup( - keys, unnamedEntities5, transactional: true, xg: true).then((_) { + return testLookup(keys, unnamedEntities5, + transactional: true, xg: true).then((_) { return delete(keys); }); }); @@ -387,7 +389,7 @@ runTests(Datastore datastore, String namespace) { group('delete', () { Future testDelete(List keys, - {bool transactional: false, bool xg: false}) { + {bool transactional: false, bool xg: false}) { Future test(Transaction transaction) { return datastore.commit(deletes: keys).then((_) { if (transaction != null) { @@ -411,7 +413,7 @@ runTests(Datastore datastore, String namespace) { entities.forEach((e) => expect(e, isNotNull)); return testDelete(keys).then((_) { return lookup(keys, transactional: false).then((entities) { - entities.forEach((e) => expect(e, isNull)); + entities.forEach((e) => expect(e, isNull)); }); }); }); @@ -456,7 +458,8 @@ runTests(Datastore datastore, String namespace) { group('rollback', () { Future testRollback(List keys, {bool xg: false}) { return withTransaction((Transaction transaction) { - return datastore.lookup(keys, transaction: transaction) + return datastore + .lookup(keys, transaction: transaction) .then((List entities) { return datastore.rollback(transaction); }); @@ -481,10 +484,11 @@ runTests(Datastore datastore, String namespace) { }); group('empty_commit', () { - Future testEmptyCommit( - List keys, {bool transactional: false, bool xg: false}) { + Future testEmptyCommit(List keys, + {bool transactional: false, bool xg: false}) { Future test(Transaction transaction) { - return datastore.lookup(keys, transaction: transaction) + return datastore + .lookup(keys, transaction: transaction) .then((List entities) { return datastore.commit(transaction: transaction); }); @@ -531,11 +535,9 @@ runTests(Datastore datastore, String namespace) { }); group('conflicting_transaction', () { - Future testConflictingTransaction( - List entities, {bool xg: false}) { - Future test( - List entities, Transaction transaction, value) { - + Future testConflictingTransaction(List entities, + {bool xg: false}) { + Future test(List entities, Transaction transaction, value) { // Change entities: var changedEntities = new List(entities.length); for (int i = 0; i < entities.length; i++) { @@ -544,11 +546,10 @@ runTests(Datastore datastore, String namespace) { for (var prop in newProperties.keys) { newProperties[prop] = "${newProperties[prop]}conflict$value"; } - changedEntities[i] = - new Entity(entity.key, newProperties); + changedEntities[i] = new Entity(entity.key, newProperties); } - return datastore.commit(inserts: changedEntities, - transaction: transaction); + return datastore.commit( + inserts: changedEntities, transaction: transaction); } // Insert first @@ -562,13 +563,13 @@ runTests(Datastore datastore, String namespace) { for (var i = 0; i < NUM_TRANSACTIONS; i++) { transactions.add(datastore.beginTransaction(crossEntityGroup: xg)); } - return Future.wait(transactions) + return Future + .wait(transactions) .then((List transactions) { // Do a lookup for the entities in every transaction var lookups = []; for (var transaction in transactions) { - lookups.add( - datastore.lookup(keys, transaction: transaction)); + lookups.add(datastore.lookup(keys, transaction: transaction)); } return Future.wait(lookups).then((List> results) { // Do a conflicting commit in every transaction. @@ -590,31 +591,35 @@ runTests(Datastore datastore, String namespace) { test('conflicting_transaction', () { expect(testConflictingTransaction(namedEntities1), - throwsA(isTransactionAbortedError)); + throwsA(isTransactionAbortedError)); }); test('conflicting_transaction_xg', () { expect(testConflictingTransaction(namedEntities5, xg: true), - throwsA(isTransactionAbortedError)); + throwsA(isTransactionAbortedError)); }); }); group('query', () { Future testQuery(String kind, - {List filters, - List orders, - bool transactional: false, - bool xg: false, - int offset, - int limit}) { + {List filters, + List orders, + bool transactional: false, + bool xg: false, + int offset, + int limit}) { Future> test(Transaction transaction) { var query = new Query( - kind: kind, filters: filters, orders: orders, - offset: offset, limit: limit); + kind: kind, + filters: filters, + orders: orders, + offset: offset, + limit: limit); return consumePages( - (_) => datastore.query(query, partition: partition)) + (_) => datastore.query(query, partition: partition)) .then((List entities) { if (transaction != null) { - return datastore.commit(transaction: transaction) + return datastore + .commit(transaction: transaction) .then((_) => entities); } return entities; @@ -627,22 +632,21 @@ runTests(Datastore datastore, String namespace) { return test(null); } - Future testQueryAndCompare(String kind, - List expectedEntities, - {List filters, - List orders, - bool transactional: false, - bool xg: false, - bool correctOrder: true, - int offset, - int limit}) { + Future testQueryAndCompare(String kind, List expectedEntities, + {List filters, + List orders, + bool transactional: false, + bool xg: false, + bool correctOrder: true, + int offset, + int limit}) { return testQuery(kind, - filters: filters, - orders: orders, - transactional: transactional, - xg: xg, - offset: offset, - limit: limit).then((List entities) { + filters: filters, + orders: orders, + transactional: transactional, + xg: xg, + offset: offset, + limit: limit).then((List entities) { expect(entities.length, equals(expectedEntities.length)); if (correctOrder) { @@ -653,20 +657,18 @@ runTests(Datastore datastore, String namespace) { for (int i = 0; i < entities.length; i++) { bool found = false; for (int j = 0; j < expectedEntities.length; j++) { - if (compareEntity(entities[i], expectedEntities[i])) { - found = true; - } + if (compareEntity(entities[i], expectedEntities[i])) { + found = true; + } } expect(found, isTrue); } } }); } - Future testOffsetLimitQuery(String kind, - List expectedEntities, - {List orders, - bool transactional: false, - bool xg: false}) { + + Future testOffsetLimitQuery(String kind, List expectedEntities, + {List orders, bool transactional: false, bool xg: false}) { // We query for all subsets of expectedEntities // NOTE: This is O(0.5 * n^2) queries, but n is currently only 6. List queryTests = []; @@ -676,27 +678,32 @@ runTests(Datastore datastore, String namespace) { int limit = end - start; var entities = expectedEntities.sublist(offset, offset + limit); queryTests.add(() { - return testQueryAndCompare( - kind, entities, transactional: transactional, - xg: xg, orders: orders, - offset: offset, limit: limit); + return testQueryAndCompare(kind, entities, + transactional: transactional, + xg: xg, + orders: orders, + offset: offset, + limit: limit); }); } } // Query with limit higher than the number of results. queryTests.add(() { - return testQueryAndCompare( - kind, expectedEntities, transactional: transactional, - xg: xg, orders: orders, - offset: 0, limit: expectedEntities.length * 10); + return testQueryAndCompare(kind, expectedEntities, + transactional: transactional, + xg: xg, + orders: orders, + offset: 0, + limit: expectedEntities.length * 10); }); return Future.forEach(queryTests, (f) => f()); } const TEST_QUERY_KIND = 'TestQueryKind'; - var stringNamedEntities = buildEntities( - 1, 6, idFunction: (i) => 'str$i', kind: TEST_QUERY_KIND, + var stringNamedEntities = buildEntities(1, 6, + idFunction: (i) => 'str$i', + kind: TEST_QUERY_KIND, partition: partition); var stringNamedKeys = stringNamedEntities.map((e) => e.key).toList(); @@ -708,14 +715,15 @@ runTests(Datastore datastore, String namespace) { var reverseOrderFunction = (Entity a, Entity b) { // Reverse the order - return -1 * (a.properties[QUERY_KEY] as String) - .compareTo(b.properties[QUERY_KEY]); + return -1 * + (a.properties[QUERY_KEY] as String) + .compareTo(b.properties[QUERY_KEY]); }; var filterFunction = (Entity entity) { var value = entity.properties[QUERY_KEY]; return value.compareTo(QUERY_UPPER_BOUND) == -1 && - value.compareTo(QUERY_LOWER_BOUND) == 1; + value.compareTo(QUERY_LOWER_BOUND) == 1; }; var listFilterFunction = (Entity entity) { var values = entity.properties[TEST_LIST_PROPERTY]; @@ -733,137 +741,120 @@ runTests(Datastore datastore, String namespace) { expect(indexedEntity.length, equals(1)); var filters = [ - new Filter(FilterRelation.GreatherThan, QUERY_KEY, QUERY_LOWER_BOUND), - new Filter(FilterRelation.LessThan, QUERY_KEY, QUERY_UPPER_BOUND), + new Filter(FilterRelation.GreatherThan, QUERY_KEY, QUERY_LOWER_BOUND), + new Filter(FilterRelation.LessThan, QUERY_KEY, QUERY_UPPER_BOUND), ]; var listFilters = [ - new Filter(FilterRelation.Equal, TEST_LIST_PROPERTY, QUERY_LIST_ENTRY) + new Filter(FilterRelation.Equal, TEST_LIST_PROPERTY, QUERY_LIST_ENTRY) ]; var indexedPropertyFilter = [ - new Filter(FilterRelation.Equal, - TEST_INDEXED_PROPERTY, - QUERY_INDEX_VALUE), - new Filter(FilterRelation.Equal, - TEST_BLOB_INDEXED_PROPERTY, - TEST_BLOB_INDEXED_VALUE) + new Filter( + FilterRelation.Equal, TEST_INDEXED_PROPERTY, QUERY_INDEX_VALUE), + new Filter(FilterRelation.Equal, TEST_BLOB_INDEXED_PROPERTY, + TEST_BLOB_INDEXED_VALUE) ]; var unIndexedPropertyFilter = [ - new Filter(FilterRelation.Equal, - TEST_UNINDEXED_PROPERTY, - QUERY_INDEX_VALUE) + new Filter( + FilterRelation.Equal, TEST_UNINDEXED_PROPERTY, QUERY_INDEX_VALUE) ]; var orders = [new Order(OrderDirection.Decending, QUERY_KEY)]; test('query', () { return insert(stringNamedEntities, []).then((keys) { - return waitUntilEntitiesReady( - datastore, stringNamedKeys, partition).then((_) { + return waitUntilEntitiesReady(datastore, stringNamedKeys, partition) + .then((_) { var tests = [ // EntityKind query - () => testQueryAndCompare( - TEST_QUERY_KIND, stringNamedEntities, transactional: false, - correctOrder: false), - () => testQueryAndCompare( - TEST_QUERY_KIND, stringNamedEntities, transactional: true, - correctOrder: false), - () => testQueryAndCompare( - TEST_QUERY_KIND, stringNamedEntities, transactional: true, - correctOrder: false, xg: true), + () => testQueryAndCompare(TEST_QUERY_KIND, stringNamedEntities, + transactional: false, correctOrder: false), + () => testQueryAndCompare(TEST_QUERY_KIND, stringNamedEntities, + transactional: true, correctOrder: false), + () => testQueryAndCompare(TEST_QUERY_KIND, stringNamedEntities, + transactional: true, correctOrder: false, xg: true), // EntityKind query with order - () => testQueryAndCompare( - TEST_QUERY_KIND, sorted, transactional: false, - orders: orders), - () => testQueryAndCompare( - TEST_QUERY_KIND, sorted, transactional: true, - orders: orders), - () => testQueryAndCompare( - TEST_QUERY_KIND, sorted, transactional: false, xg: true, - orders: orders), + () => testQueryAndCompare(TEST_QUERY_KIND, sorted, + transactional: false, orders: orders), + () => testQueryAndCompare(TEST_QUERY_KIND, sorted, + transactional: true, orders: orders), + () => testQueryAndCompare(TEST_QUERY_KIND, sorted, + transactional: false, xg: true, orders: orders), // EntityKind query with filter - () => testQueryAndCompare( - TEST_QUERY_KIND, filtered, transactional: false, - filters: filters), - () => testQueryAndCompare( - TEST_QUERY_KIND, filtered, transactional: true, - filters: filters), - () => testQueryAndCompare( - TEST_QUERY_KIND, filtered, transactional: false, xg: true, - filters: filters), + () => testQueryAndCompare(TEST_QUERY_KIND, filtered, + transactional: false, filters: filters), + () => testQueryAndCompare(TEST_QUERY_KIND, filtered, + transactional: true, filters: filters), + () => testQueryAndCompare(TEST_QUERY_KIND, filtered, + transactional: false, xg: true, filters: filters), // EntityKind query with filter + order - () => testQueryAndCompare( - TEST_QUERY_KIND, sortedAndFiltered, transactional: false, - filters: filters, orders: orders), - () => testQueryAndCompare( - TEST_QUERY_KIND, sortedAndFiltered, transactional: true, - filters: filters, orders: orders), - () => testQueryAndCompare( - TEST_QUERY_KIND, sortedAndFiltered, transactional: false, - xg: true, filters: filters, orders: orders), + () => testQueryAndCompare(TEST_QUERY_KIND, sortedAndFiltered, + transactional: false, filters: filters, orders: orders), + () => testQueryAndCompare(TEST_QUERY_KIND, sortedAndFiltered, + transactional: true, filters: filters, orders: orders), + () => testQueryAndCompare(TEST_QUERY_KIND, sortedAndFiltered, + transactional: false, + xg: true, + filters: filters, + orders: orders), // EntityKind query with IN filter + order - () => testQueryAndCompare( - TEST_QUERY_KIND, sortedAndListFiltered, transactional: false, - filters: listFilters, orders: orders), - () => testQueryAndCompare( - TEST_QUERY_KIND, sortedAndListFiltered, transactional: true, - filters: listFilters, orders: orders), - () => testQueryAndCompare( - TEST_QUERY_KIND, sortedAndListFiltered, transactional: false, - xg: true, filters: listFilters, orders: orders), + () => testQueryAndCompare(TEST_QUERY_KIND, sortedAndListFiltered, + transactional: false, filters: listFilters, orders: orders), + () => testQueryAndCompare(TEST_QUERY_KIND, sortedAndListFiltered, + transactional: true, filters: listFilters, orders: orders), + () => testQueryAndCompare(TEST_QUERY_KIND, sortedAndListFiltered, + transactional: false, + xg: true, + filters: listFilters, + orders: orders), // Limit & Offset test - () => testOffsetLimitQuery( - TEST_QUERY_KIND, sorted, transactional: false, - orders: orders), - () => testOffsetLimitQuery( - TEST_QUERY_KIND, sorted, transactional: true, orders: orders), - () => testOffsetLimitQuery( - TEST_QUERY_KIND, sorted, transactional: false, - xg: true, orders: orders), + () => testOffsetLimitQuery(TEST_QUERY_KIND, sorted, + transactional: false, orders: orders), + () => testOffsetLimitQuery(TEST_QUERY_KIND, sorted, + transactional: true, orders: orders), + () => testOffsetLimitQuery(TEST_QUERY_KIND, sorted, + transactional: false, xg: true, orders: orders), // Query for indexed property - () => testQueryAndCompare( - TEST_QUERY_KIND, indexedEntity, transactional: false, + () => testQueryAndCompare(TEST_QUERY_KIND, indexedEntity, + transactional: false, filters: indexedPropertyFilter), + () => testQueryAndCompare(TEST_QUERY_KIND, indexedEntity, + transactional: true, filters: indexedPropertyFilter), + () => testQueryAndCompare(TEST_QUERY_KIND, indexedEntity, + transactional: false, + xg: true, filters: indexedPropertyFilter), - () => testQueryAndCompare( - TEST_QUERY_KIND, indexedEntity, transactional: true, - filters: indexedPropertyFilter), - () => testQueryAndCompare( - TEST_QUERY_KIND, indexedEntity, transactional: false, - xg: true, filters: indexedPropertyFilter), // Query for un-indexed property - () => testQueryAndCompare( - TEST_QUERY_KIND, [], transactional: false, - filters: unIndexedPropertyFilter), - () => testQueryAndCompare( - TEST_QUERY_KIND, [], transactional: true, + () => testQueryAndCompare(TEST_QUERY_KIND, [], + transactional: false, filters: unIndexedPropertyFilter), + () => testQueryAndCompare(TEST_QUERY_KIND, [], + transactional: true, filters: unIndexedPropertyFilter), + () => testQueryAndCompare(TEST_QUERY_KIND, [], + transactional: false, + xg: true, filters: unIndexedPropertyFilter), - () => testQueryAndCompare( - TEST_QUERY_KIND, [], transactional: false, - xg: true, filters: unIndexedPropertyFilter), // Delete results () => delete(stringNamedKeys, transactional: true), // Wait until the entity deletes are reflected in the indices. - () => waitUntilEntitiesGone( - datastore, stringNamedKeys, partition), + () => + waitUntilEntitiesGone(datastore, stringNamedKeys, partition), // Make sure queries don't return results - () => testQueryAndCompare( - TEST_QUERY_KIND, [], transactional: false), - () => testQueryAndCompare( - TEST_QUERY_KIND, [], transactional: true), - () => testQueryAndCompare( - TEST_QUERY_KIND, [], transactional: true, xg: true), - () => testQueryAndCompare( - TEST_QUERY_KIND, [], transactional: false, - filters: filters, orders: orders), + () => testQueryAndCompare(TEST_QUERY_KIND, [], + transactional: false), + () => + testQueryAndCompare(TEST_QUERY_KIND, [], transactional: true), + () => testQueryAndCompare(TEST_QUERY_KIND, [], + transactional: true, xg: true), + () => testQueryAndCompare(TEST_QUERY_KIND, [], + transactional: false, filters: filters, orders: orders), ]; return Future.forEach(tests, (f) => f()); }); @@ -885,7 +876,7 @@ runTests(Datastore datastore, String namespace) { var subKey = new Key.fromParent('SubKind', 1, parent: rootKey); var subSubKey = new Key.fromParent('SubSubKind', 1, parent: subKey); var subSubKey2 = new Key.fromParent('SubSubKind2', 1, parent: subKey); - var properties = { 'foo' : 'bar' }; + var properties = {'foo': 'bar'}; var entity = new Entity(subSubKey, properties); var entity2 = new Entity(subSubKey2, properties); @@ -902,8 +893,9 @@ runTests(Datastore datastore, String namespace) { }, // Test that lookup only returns inserted entities. () { - return datastore.lookup([rootKey, subKey, subSubKey, subSubKey2]) - .then((List entities) { + return datastore + .lookup([rootKey, subKey, subSubKey, subSubKey2]).then( + (List entities) { expect(entities.length, 4); expect(entities[0], isNull); expect(entities[1], isNull); @@ -919,8 +911,8 @@ runTests(Datastore datastore, String namespace) { () { var ancestorQuery = new Query(ancestorKey: rootKey, orders: orders); - return consumePages( - (_) => datastore.query(ancestorQuery, partition: partition)) + return consumePages((_) => + datastore.query(ancestorQuery, partition: partition)) .then((results) { expect(results.length, 2); expect(compareEntity(entity, results[0]), isTrue); @@ -931,8 +923,8 @@ runTests(Datastore datastore, String namespace) { () { var ancestorQuery = new Query(ancestorKey: subKey, orders: orders); - return consumePages( - (_) => datastore.query(ancestorQuery, partition: partition)) + return consumePages((_) => + datastore.query(ancestorQuery, partition: partition)) .then((results) { expect(results.length, 2); expect(compareEntity(entity, results[0]), isTrue); @@ -942,8 +934,8 @@ runTests(Datastore datastore, String namespace) { // - by [subSubKey] () { var ancestorQuery = new Query(ancestorKey: subSubKey); - return consumePages( - (_) => datastore.query(ancestorQuery, partition: partition)) + return consumePages((_) => + datastore.query(ancestorQuery, partition: partition)) .then((results) { expect(results.length, 1); expect(compareEntity(entity, results[0]), isTrue); @@ -952,8 +944,8 @@ runTests(Datastore datastore, String namespace) { // - by [subSubKey2] () { var ancestorQuery = new Query(ancestorKey: subSubKey2); - return consumePages( - (_) => datastore.query(ancestorQuery, partition: partition)) + return consumePages((_) => + datastore.query(ancestorQuery, partition: partition)) .then((results) { expect(results.length, 1); expect(compareEntity(entity2, results[0]), isTrue); @@ -965,7 +957,7 @@ runTests(Datastore datastore, String namespace) { () { var query = new Query(ancestorKey: rootKey, kind: 'SubSubKind'); return consumePages( - (_) => datastore.query(query, partition: partition)) + (_) => datastore.query(query, partition: partition)) .then((List results) { expect(results.length, 1); expect(compareEntity(entity, results[0]), isTrue); @@ -975,7 +967,7 @@ runTests(Datastore datastore, String namespace) { () { var query = new Query(ancestorKey: rootKey, kind: 'SubSubKind2'); return consumePages( - (_) => datastore.query(query, partition: partition)) + (_) => datastore.query(query, partition: partition)) .then((List results) { expect(results.length, 1); expect(compareEntity(entity2, results[0]), isTrue); @@ -985,7 +977,7 @@ runTests(Datastore datastore, String namespace) { () { var query = new Query(ancestorKey: subSubKey, kind: 'SubSubKind'); return consumePages( - (_) => datastore.query(query, partition: partition)) + (_) => datastore.query(query, partition: partition)) .then((List results) { expect(results.length, 1); expect(compareEntity(entity, results[0]), isTrue); @@ -996,7 +988,7 @@ runTests(Datastore datastore, String namespace) { var query = new Query(ancestorKey: subSubKey2, kind: 'SubSubKind2'); return consumePages( - (_) => datastore.query(query, partition: partition)) + (_) => datastore.query(query, partition: partition)) .then((List results) { expect(results.length, 1); expect(compareEntity(entity2, results[0]), isTrue); @@ -1007,7 +999,7 @@ runTests(Datastore datastore, String namespace) { var query = new Query(ancestorKey: subSubKey, kind: 'SubSubKind2'); return consumePages( - (_) => datastore.query(query, partition: partition)) + (_) => datastore.query(query, partition: partition)) .then((List results) { expect(results.length, 0); }); @@ -1017,7 +1009,7 @@ runTests(Datastore datastore, String namespace) { var query = new Query(ancestorKey: subSubKey2, kind: 'SubSubKind'); return consumePages( - (_) => datastore.query(query, partition: partition)) + (_) => datastore.query(query, partition: partition)) .then((List results) { expect(results.length, 0); }); @@ -1077,10 +1069,8 @@ Future waitUntilEntitiesGone(Datastore db, List keys, Partition p) { return waitUntilEntitiesHelper(db, keys, false, p); } -Future waitUntilEntitiesHelper(Datastore db, - List keys, - bool positive, - Partition p) { +Future waitUntilEntitiesHelper( + Datastore db, List keys, bool positive, Partition p) { var keysByKind = {}; for (var key in keys) { keysByKind.putIfAbsent(key.elements.last.kind, () => []).add(key); diff --git a/pkgs/gcloud/test/datastore/e2e/utils.dart b/pkgs/gcloud/test/datastore/e2e/utils.dart index 39a787c3..e0221d82 100644 --- a/pkgs/gcloud/test/datastore/e2e/utils.dart +++ b/pkgs/gcloud/test/datastore/e2e/utils.dart @@ -18,29 +18,28 @@ const TEST_UNINDEXED_PROPERTY = 'unindexedProp'; const TEST_BLOB_INDEXED_PROPERTY = 'blobPropertyIndexed'; final TEST_BLOB_INDEXED_VALUE = new BlobValue([0xaa, 0xaa, 0xff, 0xff]); -buildKey(int i, {Function idFunction, String kind : TEST_KIND, Partition p}) { +buildKey(int i, {Function idFunction, String kind: TEST_KIND, Partition p}) { var path = [new KeyElement(kind, idFunction == null ? null : idFunction(i))]; return new Key(path, partition: p); } Map buildProperties(int i) { var listValues = [ - 'foo', - '$TEST_LIST_VALUE$i', + 'foo', + '$TEST_LIST_VALUE$i', ]; return { - TEST_PROPERTY_KEY_PREFIX : '$TEST_PROPERTY_VALUE_PREFIX$i', - TEST_LIST_PROPERTY : listValues, - TEST_INDEXED_PROPERTY : '$TEST_INDEXED_PROPERTY_VALUE_PREFIX$i', - TEST_UNINDEXED_PROPERTY : '$TEST_INDEXED_PROPERTY_VALUE_PREFIX$i', - TEST_BLOB_INDEXED_PROPERTY : TEST_BLOB_INDEXED_VALUE, + TEST_PROPERTY_KEY_PREFIX: '$TEST_PROPERTY_VALUE_PREFIX$i', + TEST_LIST_PROPERTY: listValues, + TEST_INDEXED_PROPERTY: '$TEST_INDEXED_PROPERTY_VALUE_PREFIX$i', + TEST_UNINDEXED_PROPERTY: '$TEST_INDEXED_PROPERTY_VALUE_PREFIX$i', + TEST_BLOB_INDEXED_PROPERTY: TEST_BLOB_INDEXED_VALUE, }; } -List buildKeys( - int from, int to, {Function idFunction, String kind : TEST_KIND, - Partition partition}) { +List buildKeys(int from, int to, + {Function idFunction, String kind: TEST_KIND, Partition partition}) { var keys = []; for (var i = from; i < to; i++) { keys.add(buildKey(i, idFunction: idFunction, kind: kind, p: partition)); @@ -48,9 +47,8 @@ List buildKeys( return keys; } -List buildEntities( - int from, int to, {Function idFunction, String kind : TEST_KIND, - Partition partition}) { +List buildEntities(int from, int to, + {Function idFunction, String kind: TEST_KIND, Partition partition}) { var entities = []; var unIndexedProperties = new Set(); for (var i = from; i < to; i++) { @@ -63,24 +61,24 @@ List buildEntities( return entities; } -List buildEntityWithAllProperties( - int from, int to, {String kind : TEST_KIND, Partition partition}) { +List buildEntityWithAllProperties(int from, int to, + {String kind: TEST_KIND, Partition partition}) { var us42 = const Duration(microseconds: 42); var unIndexed = new Set.from(['blobProperty']); Map buildProperties(int i) { return { - 'nullValue' : null, - 'boolProperty' : true, - 'intProperty' : 42, - 'doubleProperty' : 4.2, - 'stringProperty' : 'foobar', - 'blobProperty' : new BlobValue([0xff, 0xff, 0xaa, 0xaa]), - 'blobPropertyIndexed' : new BlobValue([0xaa, 0xaa, 0xff, 0xff]), - 'dateProperty' : + 'nullValue': null, + 'boolProperty': true, + 'intProperty': 42, + 'doubleProperty': 4.2, + 'stringProperty': 'foobar', + 'blobProperty': new BlobValue([0xff, 0xff, 0xaa, 0xaa]), + 'blobPropertyIndexed': new BlobValue([0xaa, 0xaa, 0xff, 0xff]), + 'dateProperty': new DateTime.fromMillisecondsSinceEpoch(1, isUtc: true).add(us42), - 'keyProperty' : buildKey(1, idFunction: (i) => 's$i', kind: kind), - 'listProperty' : [ + 'keyProperty': buildKey(1, idFunction: (i) => 's$i', kind: kind), + 'listProperty': [ 42, 4.2, 'foobar', @@ -91,8 +89,8 @@ List buildEntityWithAllProperties( var entities = []; for (var i = from; i < to; i++) { - var key = buildKey( - i, idFunction: (i) => 'allprop$i', kind: kind, p: partition); + var key = + buildKey(i, idFunction: (i) => 'allprop$i', kind: kind, p: partition); var properties = buildProperties(i); entities.add(new Entity(key, properties, unIndexedProperties: unIndexed)); } diff --git a/pkgs/gcloud/test/datastore/error_matchers.dart b/pkgs/gcloud/test/datastore/error_matchers.dart index 582ec1aa..7509ed73 100644 --- a/pkgs/gcloud/test/datastore/error_matchers.dart +++ b/pkgs/gcloud/test/datastore/error_matchers.dart @@ -12,7 +12,6 @@ class _ApplicationError extends TypeMatcher { bool matches(item, Map matchState) => item is ApplicationError; } - class _DataStoreError extends TypeMatcher { const _DataStoreError() : super("DataStoreError"); bool matches(item, Map matchState) => item is DatastoreError; @@ -33,7 +32,6 @@ class _TimeoutError extends TypeMatcher { bool matches(item, Map matchState) => item is TimeoutError; } - class _IntMatcher extends TypeMatcher { const _IntMatcher() : super("IntMatcher"); bool matches(item, Map matchState) => item is int; diff --git a/pkgs/gcloud/test/db/db_test.dart b/pkgs/gcloud/test/db/db_test.dart index 70320d7a..587cf480 100644 --- a/pkgs/gcloud/test/db/db_test.dart +++ b/pkgs/gcloud/test/db/db_test.dart @@ -30,8 +30,8 @@ main() { }); test('non-default-partition', () { - var nsDb = new DatastoreDB( - null, defaultPartition: new Partition('foobar-namespace')); + var nsDb = new DatastoreDB(null, + defaultPartition: new Partition('foobar-namespace')); // Test defaultPartition expect(nsDb.defaultPartition.namespace, 'foobar-namespace'); diff --git a/pkgs/gcloud/test/db/e2e/db_test_impl.dart b/pkgs/gcloud/test/db/e2e/db_test_impl.dart index 8757f67a..0c80738c 100644 --- a/pkgs/gcloud/test/db/e2e/db_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/db_test_impl.dart @@ -64,7 +64,7 @@ class Person extends db.Model { @db.ModelKeyProperty(propertyName: 'mangledWife') db.Key wife; - operator==(Object other) => sameAs(other); + operator ==(Object other) => sameAs(other); sameAs(Object other) { return other is Person && @@ -78,7 +78,6 @@ class Person extends db.Model { String toString() => 'Person(id: $id, name: $name, age: $age)'; } - @db.Kind() class User extends Person { @db.StringProperty() @@ -112,7 +111,6 @@ class User extends Person { 'User(${super.toString()}, nickname: $nickname, languages: $languages'; } - @db.Kind() class ExpandoPerson extends db.ExpandoModel { @db.StringProperty() @@ -121,7 +119,7 @@ class ExpandoPerson extends db.ExpandoModel { @db.StringProperty(propertyName: 'NN') String nickname; - operator==(Object other) { + operator ==(Object other) { if (other is ExpandoPerson && id == other.id && name == other.name) { if (additionalProperties.length != other.additionalProperties.length) { return false; @@ -137,15 +135,13 @@ class ExpandoPerson extends db.ExpandoModel { } } - Future sleep(Duration duration) => new Future.delayed(duration); runTests(db.DatastoreDB store, String namespace) { var partition = store.newPartition(namespace); - void compareModels(List expectedModels, - List models, - {bool anyOrder: false}) { + void compareModels(List expectedModels, List models, + {bool anyOrder: false}) { expect(models.length, equals(expectedModels.length)); if (anyOrder) { // Do expensive O(n^2) search. @@ -166,8 +162,8 @@ runTests(db.DatastoreDB store, String namespace) { } } - Future testInsertLookupDelete( - List objects, {bool transactional: false}) { + Future testInsertLookupDelete(List objects, + {bool transactional: false}) { var keys = objects.map((db.Model obj) => obj.key).toList(); if (transactional) { @@ -202,8 +198,11 @@ runTests(db.DatastoreDB store, String namespace) { group('key', () { test('equal_and_hashcode', () { var k1 = store.emptyKey.append(User, id: 10).append(Person, id: 12); - var k2 = store.newPartition(null) - .emptyKey.append(User, id: 10).append(Person, id: 12); + var k2 = store + .newPartition(null) + .emptyKey + .append(User, id: 10) + .append(Person, id: 12); expect(k1, equals(k2)); expect(k1.hashCode, equals(k2.hashCode)); }); @@ -216,10 +215,10 @@ runTests(db.DatastoreDB store, String namespace) { var persons = []; for (var i = 1; i <= 10; i++) { persons.add(new Person() - ..id = i - ..parentKey = root - ..age = 42 + i - ..name = 'user$i'); + ..id = i + ..parentKey = root + ..age = 42 + i + ..name = 'user$i'); } persons.first.wife = persons.last.key; return testInsertLookupDelete(persons); @@ -229,11 +228,11 @@ runTests(db.DatastoreDB store, String namespace) { var users = []; for (var i = 1; i <= 10; i++) { users.add(new User() - ..id = i - ..parentKey = root - ..age = 42 + i - ..name = 'user$i' - ..nickname = 'nickname${i%3}'); + ..id = i + ..parentKey = root + ..age = 42 + i + ..name = 'user$i' + ..nickname = 'nickname${i%3}'); } return testInsertLookupDelete(users); }); @@ -242,9 +241,9 @@ runTests(db.DatastoreDB store, String namespace) { var expandoPersons = []; for (var i = 1; i <= 10; i++) { var expandoPerson = new ExpandoPerson() - ..parentKey = root - ..id = i - ..name = 'user$i'; + ..parentKey = root + ..id = i + ..name = 'user$i'; expandoPerson.foo = 'foo$i'; expandoPerson.bar = i; expect(expandoPerson.additionalProperties['foo'], equals('foo$i')); @@ -258,20 +257,20 @@ runTests(db.DatastoreDB store, String namespace) { var models = []; models.add(new Person() - ..id = 1 - ..parentKey = root - ..age = 1 - ..name = 'user1'); + ..id = 1 + ..parentKey = root + ..age = 1 + ..name = 'user1'); models.add(new User() - ..id = 2 - ..parentKey = root - ..age = 2 - ..name = 'user2' - ..nickname = 'nickname2'); + ..id = 2 + ..parentKey = root + ..age = 2 + ..name = 'user2' + ..nickname = 'nickname2'); var expandoPerson = new ExpandoPerson() - ..parentKey = root - ..id = 3 - ..name = 'user1'; + ..parentKey = root + ..id = 3 + ..name = 'user1'; expandoPerson.foo = 'foo1'; expandoPerson.bar = 2; @@ -283,19 +282,19 @@ runTests(db.DatastoreDB store, String namespace) { var users = []; for (var i = 333; i <= 334; i++) { users.add(new User() - ..id = i - ..parentKey = root - ..age = 42 + i - ..name = 'user$i' - ..nickname = 'nickname${i%3}'); + ..id = i + ..parentKey = root + ..age = 42 + i + ..name = 'user$i' + ..nickname = 'nickname${i%3}'); } var persons = []; for (var i = 335; i <= 336; i++) { persons.add(new Person() - ..id = i - ..parentKey = root - ..age = 42 + i - ..name = 'person$i'); + ..id = i + ..parentKey = root + ..age = 42 + i + ..name = 'person$i'); } // We test that we can insert + lookup @@ -311,26 +310,26 @@ runTests(db.DatastoreDB store, String namespace) { var root = partition.emptyKey; var persons = []; persons.add(new Person() - ..id = 42 - ..parentKey = root - ..age = 80 - ..name = 'user80'); + ..id = 42 + ..parentKey = root + ..age = 80 + ..name = 'user80'); // Auto id person with parentKey persons.add(new Person() - ..parentKey = root - ..age = 81 - ..name = 'user81'); + ..parentKey = root + ..age = 81 + ..name = 'user81'); // Auto id person with non-root parentKey var fatherKey = persons.first.parentKey; persons.add(new Person() - ..parentKey = fatherKey - ..age = 82 - ..name = 'user82'); + ..parentKey = fatherKey + ..age = 82 + ..name = 'user82'); persons.add(new Person() - ..id = 43 - ..parentKey = root - ..age = 83 - ..name = 'user83'); + ..id = 43 + ..parentKey = root + ..age = 83 + ..name = 'user83'); return store.commit(inserts: persons).then(expectAsync((_) { // At this point, autoIds are allocated and are reflected in the // models (as well as parentKey if it was empty). @@ -389,22 +388,22 @@ runTests(db.DatastoreDB store, String namespace) { languages = ['foo', 'bar']; } users.add(new User() - ..id = i - ..parentKey = root - ..wife = root.append(User, id: 42 + i) - ..age = 42 + i - ..name = 'user$i' - ..nickname = 'nickname${i%3}' - ..languages = languages); + ..id = i + ..parentKey = root + ..wife = root.append(User, id: 42 + i) + ..age = 42 + i + ..name = 'user$i' + ..nickname = 'nickname${i%3}' + ..languages = languages); } var expandoPersons = []; for (var i = 1; i <= 3; i++) { var expandoPerson = new ExpandoPerson() - ..parentKey = root - ..id = i - ..name = 'user$i' - ..nickname = 'nickuser$i'; + ..parentKey = root + ..id = i + ..name = 'user$i' + ..nickname = 'nickuser$i'; expandoPerson.foo = 'foo$i'; expandoPerson.bar = i; expect(expandoPerson.additionalProperties['foo'], equals('foo$i')); @@ -438,16 +437,15 @@ runTests(db.DatastoreDB store, String namespace) { return LOWER_BOUND.compareTo(u.name) <= 0; }).toList(); - var fooUsers = users.where( - (User u) => u.languages.contains('foo')).toList(); - var barUsers = users.where( - (User u) => u.languages.contains('bar')).toList(); - var usersWithWife = users.where( - (User u) => u.wife == root.append(User, id: 42 + 3)).toList(); + var fooUsers = + users.where((User u) => u.languages.contains('foo')).toList(); + var barUsers = + users.where((User u) => u.languages.contains('bar')).toList(); + var usersWithWife = users + .where((User u) => u.wife == root.append(User, id: 42 + 3)) + .toList(); - var allInserts = [] - ..addAll(users) - ..addAll(expandoPersons); + var allInserts = []..addAll(users)..addAll(expandoPersons); var allKeys = allInserts.map((db.Model model) => model.key).toList(); return store.commit(inserts: allInserts).then((_) { return waitUntilEntitiesReady(store, allKeys, partition).then((_) { @@ -455,7 +453,10 @@ runTests(db.DatastoreDB store, String namespace) { // Queries for [Person] return no results, we only have [User] // objects. () { - return store.query(Person, partition: partition).run().toList() + return store + .query(Person, partition: partition) + .run() + .toList() .then((List models) { compareModels([], models); }); @@ -463,7 +464,10 @@ runTests(db.DatastoreDB store, String namespace) { // All users query () { - return store.query(User, partition: partition).run().toList() + return store + .query(User, partition: partition) + .run() + .toList() .then((List models) { compareModels(users, models, anyOrder: true); }); @@ -472,64 +476,60 @@ runTests(db.DatastoreDB store, String namespace) { // Sorted query () async { var query = store.query(User, partition: partition) - ..order('-name') - ..order('nickname'); + ..order('-name') + ..order('nickname'); var models = await runQueryWithExponentialBackoff( query, usersSortedNameDescNicknameAsc.length); - compareModels( - usersSortedNameDescNicknameAsc, models); + compareModels(usersSortedNameDescNicknameAsc, models); }, () async { var query = store.query(User, partition: partition) - ..order('-name') - ..order('-nickname') - ..run(); + ..order('-name') + ..order('-nickname') + ..run(); var models = await runQueryWithExponentialBackoff( query, usersSortedNameDescNicknameDesc.length); - compareModels( - usersSortedNameDescNicknameDesc, models); + compareModels(usersSortedNameDescNicknameDesc, models); }, // Sorted query with filter () async { var query = store.query(User, partition: partition) - ..filter('name >=', LOWER_BOUND) - ..order('-name') - ..order('nickname'); + ..filter('name >=', LOWER_BOUND) + ..order('-name') + ..order('nickname'); var models = await runQueryWithExponentialBackoff( query, usersSortedAndFilteredNameDescNicknameAsc.length); - compareModels(usersSortedAndFilteredNameDescNicknameAsc, - models); + compareModels(usersSortedAndFilteredNameDescNicknameAsc, models); }, () async { var query = store.query(User, partition: partition) - ..filter('name >=', LOWER_BOUND) - ..order('-name') - ..order('-nickname') - ..run(); + ..filter('name >=', LOWER_BOUND) + ..order('-name') + ..order('-nickname') + ..run(); var models = await runQueryWithExponentialBackoff( query, usersSortedAndFilteredNameDescNicknameDesc.length); - compareModels(usersSortedAndFilteredNameDescNicknameDesc, - models); + compareModels(usersSortedAndFilteredNameDescNicknameDesc, models); }, // Filter lists () async { var query = store.query(User, partition: partition) - ..filter('languages =', 'foo') - ..order('name') - ..run(); - var models = await runQueryWithExponentialBackoff( - query, fooUsers.length); + ..filter('languages =', 'foo') + ..order('name') + ..run(); + var models = + await runQueryWithExponentialBackoff(query, fooUsers.length); compareModels(fooUsers, models, anyOrder: true); }, () async { var query = store.query(User, partition: partition) - ..filter('languages =', 'bar') - ..order('name') - ..run(); - var models = await runQueryWithExponentialBackoff( - query, barUsers.length); + ..filter('languages =', 'bar') + ..order('name') + ..run(); + var models = + await runQueryWithExponentialBackoff(query, barUsers.length); compareModels(barUsers, models, anyOrder: true); }, @@ -537,8 +537,8 @@ runTests(db.DatastoreDB store, String namespace) { () async { var wifeKey = root.append(User, id: usersWithWife.first.wife.id); var query = store.query(User, partition: partition) - ..filter('wife =', wifeKey) - ..run(); + ..filter('wife =', wifeKey) + ..run(); var models = await runQueryWithExponentialBackoff( query, usersWithWife.length); compareModels(usersWithWife, models, anyOrder: true); @@ -547,10 +547,10 @@ runTests(db.DatastoreDB store, String namespace) { // Simple limit/offset test. () async { var query = store.query(User, partition: partition) - ..order('-name') - ..order('nickname') - ..offset(3) - ..limit(4); + ..order('-name') + ..order('nickname') + ..offset(3) + ..limit(4); var expectedModels = usersSortedAndFilteredNameDescNicknameAsc.sublist(3, 7); var models = await runQueryWithExponentialBackoff( @@ -561,24 +561,24 @@ runTests(db.DatastoreDB store, String namespace) { // Expando queries: Filter on normal property. () async { var query = store.query(ExpandoPerson, partition: partition) - ..filter('name =', expandoPersons.last.name) - ..run(); + ..filter('name =', expandoPersons.last.name) + ..run(); var models = await runQueryWithExponentialBackoff(query, 1); compareModels([expandoPersons.last], models); }, // Expando queries: Filter on expanded String property () async { var query = store.query(ExpandoPerson, partition: partition) - ..filter('foo =', expandoPersons.last.foo) - ..run(); + ..filter('foo =', expandoPersons.last.foo) + ..run(); var models = await runQueryWithExponentialBackoff(query, 1); compareModels([expandoPersons.last], models); }, // Expando queries: Filter on expanded int property () async { var query = store.query(ExpandoPerson, partition: partition) - ..filter('bar =', expandoPersons.last.bar) - ..run(); + ..filter('bar =', expandoPersons.last.bar) + ..run(); var models = await runQueryWithExponentialBackoff(query, 1); compareModels([expandoPersons.last], models); }, @@ -586,8 +586,8 @@ runTests(db.DatastoreDB store, String namespace) { // propertyName (datastore name is 'NN'). () async { var query = store.query(ExpandoPerson, partition: partition) - ..filter('nickname =', expandoPersons.last.nickname) - ..run(); + ..filter('nickname =', expandoPersons.last.nickname) + ..run(); var models = await runQueryWithExponentialBackoff(query, 1); compareModels([expandoPersons.last], models); }, @@ -600,11 +600,11 @@ runTests(db.DatastoreDB store, String namespace) { // Make sure queries don't return results () => store.lookup(allKeys).then((List models) { - expect(models.length, equals(allKeys.length)); - for (var model in models) { - expect(model, isNull); - } - }), + expect(models.length, equals(allKeys.length)); + for (var model in models) { + expect(model, isNull); + } + }), ]; return Future.forEach(tests, (f) => f()); }); @@ -620,7 +620,7 @@ Future> runQueryWithExponentialBackoff( // Wait for 0.1s, 0.2s, ..., 12.8s var duration = new Duration(milliseconds: 100 * (2 << i)); print("Running query did return less results than expected." - "Using exponential backoff: Sleeping for $duration."); + "Using exponential backoff: Sleeping for $duration."); await sleep(duration); } @@ -634,30 +634,29 @@ Future> runQueryWithExponentialBackoff( "Tried running a query with exponential backoff, giving up now."); } -Future waitUntilEntitiesReady(db.DatastoreDB mdb, - List keys, - db.Partition partition) { +Future waitUntilEntitiesReady( + db.DatastoreDB mdb, List keys, db.Partition partition) { return waitUntilEntitiesHelper(mdb, keys, true, partition); } -Future waitUntilEntitiesGone(db.DatastoreDB mdb, - List keys, - db.Partition partition) { +Future waitUntilEntitiesGone( + db.DatastoreDB mdb, List keys, db.Partition partition) { return waitUntilEntitiesHelper(mdb, keys, false, partition); } -Future waitUntilEntitiesHelper(db.DatastoreDB mdb, - List keys, - bool positive, - db.Partition partition) { +Future waitUntilEntitiesHelper(db.DatastoreDB mdb, List keys, + bool positive, db.Partition partition) { var keysByKind = {}; for (var key in keys) { keysByKind.putIfAbsent(key.type, () => []).add(key); } Future waitForKeys(Type kind, List keys) { - return mdb.query(kind, partition: partition) - .run().toList().then((List models) { + return mdb + .query(kind, partition: partition) + .run() + .toList() + .then((List models) { for (var key in keys) { bool found = false; for (var model in models) { diff --git a/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart index 40e8f49a..fccf53f0 100644 --- a/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart @@ -28,12 +28,10 @@ List buildEntitiesWithDifferentNamespaces() { newEntity(null, 'NullKind', id: 2), newEntity(null, 'NullKind2', id: 1), newEntity(null, 'NullKind2', id: 2), - newEntity('FooNamespace', 'FooKind', id: 1), newEntity('FooNamespace', 'FooKind', id: 2), newEntity('FooNamespace', 'FooKind2', id: 1), newEntity('FooNamespace', 'FooKind2', id: 2), - newEntity('BarNamespace', 'BarKind', id: 1), newEntity('BarNamespace', 'BarKind', id: 2), newEntity('BarNamespace', 'BarKind2', id: 1), @@ -61,20 +59,22 @@ runTests(datastore, db.DatastoreDB store) { return datastore.commit(inserts: entities).then((_) { return sleep(const Duration(seconds: 10)).then((_) { var namespaceQuery = store.query(Namespace); - return namespaceQuery.run().toList() + return namespaceQuery + .run() + .toList() .then((List namespaces) { expect(namespaces.length, greaterThanOrEqualTo(3)); expect(namespaces, contains(cond((ns) => ns.name == null))); - expect(namespaces, - contains(cond((ns) => ns.name == 'FooNamespace'))); - expect(namespaces, - contains(cond((ns) => ns.name == 'BarNamespace'))); + expect( + namespaces, contains(cond((ns) => ns.name == 'FooNamespace'))); + expect( + namespaces, contains(cond((ns) => ns.name == 'BarNamespace'))); var futures = []; for (var namespace in namespaces) { if (!(namespace == null || - namespace == 'FooNamespace' || - namespace == 'BarNamespace')) { + namespace == 'FooNamespace' || + namespace == 'BarNamespace')) { continue; } var partition = store.newPartition(namespace.name); @@ -102,4 +102,3 @@ runTests(datastore, db.DatastoreDB store) { }); }); } - diff --git a/pkgs/gcloud/test/db/model_db_test.dart b/pkgs/gcloud/test/db/model_db_test.dart index bf8be819..b9707b86 100644 --- a/pkgs/gcloud/test/db/model_db_test.dart +++ b/pkgs/gcloud/test/db/model_db_test.dart @@ -9,7 +9,6 @@ import 'dart:async'; import 'package:gcloud/db.dart'; import 'package:unittest/unittest.dart'; - // These unused imports make sure that [ModelDBImpl.fromLibrary()] will find // all the Model/ModelDescription classes. import 'model_dbs/duplicate_kind.dart' as test1; @@ -19,7 +18,7 @@ import 'model_dbs/duplicate_fieldname.dart' as test4; import 'model_dbs/no_default_constructor.dart' as test5; main() { - newModelDB(Symbol symbol)=> new ModelDBImpl.fromLibrary(symbol); + newModelDB(Symbol symbol) => new ModelDBImpl.fromLibrary(symbol); group('model_db', () { group('from_library', () { diff --git a/pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart b/pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart index 89ae7eb0..1859fdf9 100644 --- a/pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart +++ b/pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart @@ -7,7 +7,7 @@ library gcloud.db.model_test.duplicate_kind; import 'package:gcloud/db.dart' as db; @db.Kind() -class A extends db.Model { } +class A extends db.Model {} @db.Kind(name: 'A') -class B extends db.Model { } +class B extends db.Model {} diff --git a/pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart b/pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart index 30e11a91..3ffd27ca 100644 --- a/pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart +++ b/pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart @@ -8,4 +8,4 @@ import 'package:gcloud/db.dart' as db; @db.Kind() @db.Kind() -class A extends db.Model { } +class A extends db.Model {} diff --git a/pkgs/gcloud/test/db/properties_test.dart b/pkgs/gcloud/test/db/properties_test.dart index 52217b60..7b05e29b 100644 --- a/pkgs/gcloud/test/db/properties_test.dart +++ b/pkgs/gcloud/test/db/properties_test.dart @@ -73,20 +73,21 @@ main() { prop = const BlobProperty(required: false); expect(prop.validate(null, null), isTrue); - expect(prop.validate(null, [1,2]), isTrue); + expect(prop.validate(null, [1, 2]), isTrue); expect(prop.encodeValue(null, null), equals(null)); expect(prop.encodeValue(null, []).bytes, equals([])); - expect(prop.encodeValue(null, [1,2]).bytes, equals([1,2])); - expect(prop.encodeValue(null, new Uint8List.fromList([1,2])).bytes, - equals([1,2])); + expect(prop.encodeValue(null, [1, 2]).bytes, equals([1, 2])); + expect(prop.encodeValue(null, new Uint8List.fromList([1, 2])).bytes, + equals([1, 2])); expect(prop.decodePrimitiveValue(null, null), equals(null)); expect(prop.decodePrimitiveValue(null, new datastore.BlobValue([])), equals([])); - expect(prop.decodePrimitiveValue(null, new datastore.BlobValue([5,6])), - equals([5,6])); - expect(prop.decodePrimitiveValue( - null, new datastore.BlobValue(new Uint8List.fromList([5,6]))), - equals([5,6])); + expect(prop.decodePrimitiveValue(null, new datastore.BlobValue([5, 6])), + equals([5, 6])); + expect( + prop.decodePrimitiveValue( + null, new datastore.BlobValue(new Uint8List.fromList([5, 6]))), + equals([5, 6])); }); test('datetime_property', () { @@ -101,8 +102,8 @@ main() { expect(prop.encodeValue(null, null), equals(null)); expect(prop.encodeValue(null, utc99), equals(utc99)); expect(prop.decodePrimitiveValue(null, null), equals(null)); - expect(prop.decodePrimitiveValue(null, 99*1000), equals(utc99)); - expect(prop.decodePrimitiveValue(null, 99*1000 + 1), equals(utc99)); + expect(prop.decodePrimitiveValue(null, 99 * 1000), equals(utc99)); + expect(prop.decodePrimitiveValue(null, 99 * 1000 + 1), equals(utc99)); expect(prop.decodePrimitiveValue(null, utc99), equals(utc99)); }); @@ -124,7 +125,7 @@ main() { expect(prop.decodePrimitiveValue(null, []), equals([])); expect(prop.decodePrimitiveValue(null, true), equals([true])); expect(prop.decodePrimitiveValue(null, [true, false]), - equals([true, false])); + equals([true, false])); }); test('composed_list_property', () { @@ -141,12 +142,12 @@ main() { expect(prop.encodeValue(null, []), equals(null)); expect(prop.encodeValue(null, [c1]), equals(c1.customValue)); expect(prop.encodeValue(null, [c1, c2]), - equals([c1.customValue, c2.customValue])); + equals([c1.customValue, c2.customValue])); expect(prop.decodePrimitiveValue(null, null), equals([])); expect(prop.decodePrimitiveValue(null, []), equals([])); expect(prop.decodePrimitiveValue(null, c1.customValue), equals([c1])); expect(prop.decodePrimitiveValue(null, [c1.customValue, c2.customValue]), - equals([c1, c2])); + equals([c1, c2])); }); test('modelkey_property', () { @@ -166,8 +167,8 @@ main() { expect(prop.encodeValue(modelDBMock, null), equals(null)); expect(prop.encodeValue(modelDBMock, dbKey), equals(datastoreKey)); expect(prop.decodePrimitiveValue(modelDBMock, null), equals(null)); - expect(prop.decodePrimitiveValue(modelDBMock, datastoreKey), - equals(dbKey)); + expect( + prop.decodePrimitiveValue(modelDBMock, datastoreKey), equals(dbKey)); }); }); } @@ -177,7 +178,7 @@ class Custom { int get hashCode => customValue.hashCode; - bool operator==(other) { + bool operator ==(other) { return other is Custom && other.customValue == customValue; } } @@ -231,7 +232,7 @@ class ModelDBMock implements ModelDB { datastore.Key toDatastoreKey(Key key) { if (!identical(_dbKey, key)) { - throw "Broken test"; + throw "Broken test"; } return _datastoreKey; } @@ -241,5 +242,7 @@ class ModelDBMock implements ModelDB { datastore.Entity toDatastoreEntity(Model model) => null; String fieldNameToPropertyName(String kind, String fieldName) => null; String kindName(Type type) => null; - Object toDatastoreValue(String kind, String fieldName, Object value, {bool forComparison: false}) => null; + Object toDatastoreValue(String kind, String fieldName, Object value, + {bool forComparison: false}) => + null; } diff --git a/pkgs/gcloud/test/db_all_e2e_test.dart b/pkgs/gcloud/test/db_all_e2e_test.dart index 434f2a7d..75bde888 100644 --- a/pkgs/gcloud/test/db_all_e2e_test.dart +++ b/pkgs/gcloud/test/db_all_e2e_test.dart @@ -15,7 +15,6 @@ import 'db/e2e/db_test_impl.dart' as db_test; import 'db/e2e/metamodel_test_impl.dart' as db_metamodel_test; import 'datastore/e2e/datastore_test_impl.dart' as datastore_test; - import 'common_e2e.dart'; main() { diff --git a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart index c32b384a..c422c952 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart @@ -60,7 +60,7 @@ runTests(PubSub pubsub, String project, String prefix) { expect(subscription.name, subscriptionName); expect(subscription.project, project); expect(subscription.absoluteName, - 'projects/$project/subscriptions/$subscriptionName'); + 'projects/$project/subscriptions/$subscriptionName'); expect(subscription.isPull, isTrue); expect(subscription.isPush, isFalse); expect(await pubsub.deleteSubscription(subscriptionName), isNull); @@ -131,12 +131,12 @@ main() { if (subscription.name.startsWith(prefix)) { try { print('WARNING: Removing leftover subscription ' - '${subscription.name}'); + '${subscription.name}'); leftovers = true; await pubsub.deleteSubscription(subscription.name); } catch (e) { print('Error during test cleanup of subscription ' - '${subscription.name} ($e)'); + '${subscription.name} ($e)'); cleanupErrors = true; } } diff --git a/pkgs/gcloud/test/pubsub/pubsub_test.dart b/pkgs/gcloud/test/pubsub/pubsub_test.dart index 9953231a..8a0bc14f 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_test.dart @@ -23,14 +23,22 @@ http.Client mockClient() => new MockClient(HOSTNAME, ROOT_PATH); main() { group('api', () { var badTopicNames = [ - 'projects/', 'projects/topics', 'projects/$PROJECT', - 'projects/$PROJECT/', 'projects/${PROJECT}/topics', - 'projects/${PROJECT}/topics/']; + 'projects/', + 'projects/topics', + 'projects/$PROJECT', + 'projects/$PROJECT/', + 'projects/${PROJECT}/topics', + 'projects/${PROJECT}/topics/' + ]; var badSubscriptionNames = [ - 'projects/', 'projects/subscriptions', 'projects/$PROJECT', - 'projects/$PROJECT/', 'projects/${PROJECT}/subscriptions', - 'projects/${PROJECT}/subscriptions/']; + 'projects/', + 'projects/subscriptions', + 'projects/$PROJECT', + 'projects/$PROJECT/', + 'projects/${PROJECT}/subscriptions', + 'projects/${PROJECT}/subscriptions/' + ]; group('topic', () { var name = 'test-topic'; @@ -39,7 +47,8 @@ main() { test('create', () { var mock = mockClient(); mock.register( - 'PUT', 'projects/$PROJECT/topics/test-topic', + 'PUT', + 'projects/$PROJECT/topics/test-topic', expectAsync((request) { var requestTopic = new pubsub.Topic.fromJson(JSON.decode(request.body)); @@ -73,10 +82,12 @@ main() { test('delete', () { var mock = mockClient(); mock.register( - 'DELETE', 'projects/test-project/topics/test-topic', expectAsync((request) { - expect(request.body.length, 0); - return mock.respondEmpty(); - }, count: 2)); + 'DELETE', + 'projects/test-project/topics/test-topic', + expectAsync((request) { + expect(request.body.length, 0); + return mock.respondEmpty(); + }, count: 2)); var api = new PubSub(mock, PROJECT); return api.deleteTopic(name).then(expectAsync((result) { @@ -101,10 +112,12 @@ main() { test('lookup', () { var mock = mockClient(); mock.register( - 'GET', 'projects/test-project/topics/test-topic', expectAsync((request) { - expect(request.body.length, 0); - return mock.respond(new pubsub.Topic()..name = absoluteName); - }, count: 2)); + 'GET', + 'projects/test-project/topics/test-topic', + expectAsync((request) { + expect(request.body.length, 0); + return mock.respond(new pubsub.Topic()..name = absoluteName); + }, count: 2)); var api = new PubSub(mock, PROJECT); return api.lookupTopic(name).then(expectAsync((topic) { @@ -133,7 +146,8 @@ main() { addTopics(pubsub.ListTopicsResponse response, int first, int count) { response.topics = []; for (int i = 0; i < count; i++) { - response.topics.add(new pubsub.Topic()..name = 'topic-${first + i}'); + response.topics + .add(new pubsub.Topic()..name = 'topic-${first + i}'); } } @@ -149,24 +163,27 @@ main() { totalCalls = totalPages; } var pageCount = 0; - mock.register('GET', 'projects/$PROJECT/topics', expectAsync((request) { - pageCount++; - expect(request.url.queryParameters['pageSize'], '$pageSize'); - expect(request.body.length, 0); - if (pageCount > 1) { - expect(request.url.queryParameters['pageToken'], 'next-page'); - } + mock.register( + 'GET', + 'projects/$PROJECT/topics', + expectAsync((request) { + pageCount++; + expect(request.url.queryParameters['pageSize'], '$pageSize'); + expect(request.body.length, 0); + if (pageCount > 1) { + expect(request.url.queryParameters['pageToken'], 'next-page'); + } - var response = new pubsub.ListTopicsResponse(); - var first = (pageCount - 1) * pageSize + 1; - if (pageCount < totalPages) { - response.nextPageToken = 'next-page'; - addTopics(response, first, pageSize); - } else { - addTopics(response, first, n - (totalPages - 1) * pageSize); - } - return mock.respond(response); - }, count: totalCalls)); + var response = new pubsub.ListTopicsResponse(); + var first = (pageCount - 1) * pageSize + 1; + if (pageCount < totalPages) { + response.nextPageToken = 'next-page'; + addTopics(response, first, pageSize); + } else { + addTopics(response, first, n - (totalPages - 1) * pageSize); + } + return mock.respond(response); + }, count: totalCalls)); } group('list', () { @@ -175,8 +192,10 @@ main() { registerQueryMock(mock, count, 50); var api = new PubSub(mock, PROJECT); - return api.listTopics().listen( - expectAsync((_) => null, count: count)).asFuture(); + return api + .listTopics() + .listen(expectAsync((_) => null, count: count)) + .asFuture(); } test('simple', () { @@ -197,13 +216,12 @@ main() { registerQueryMock(mock, 70, 50); var api = new PubSub(mock, PROJECT); - api.listTopics().listen( - expectAsync(((_) => null), count: 70), + api.listTopics().listen(expectAsync(((_) => null), count: 70), onDone: expectAsync(() => null)) - ..pause() - ..resume() - ..pause() - ..resume(); + ..pause() + ..resume() + ..pause() + ..resume(); }); test('pause-resume', () { @@ -215,7 +233,10 @@ main() { var subscription; subscription = api.listTopics().listen( expectAsync(((_) { - subscription..pause()..resume()..pause(); + subscription + ..pause() + ..resume() + ..pause(); if ((count % 2) == 0) { subscription.resume(); } else { @@ -224,7 +245,7 @@ main() { return null; }), count: 70), onDone: expectAsync(() => null)) - ..pause(); + ..pause(); scheduleMicrotask(() => subscription.resume()); }); @@ -233,10 +254,9 @@ main() { registerQueryMock(mock, 70, 50, 1); var api = new PubSub(mock, PROJECT); - api.listTopics().listen( - (_) => throw 'Unexpected', + api.listTopics().listen((_) => throw 'Unexpected', onDone: () => throw 'Unexpected') - ..cancel(); + ..cancel(); }); test('cancel', () { @@ -254,16 +274,16 @@ main() { runTest(bool withPause) { // Test error on first GET request. var mock = mockClient(); - mock.register('GET', 'projects/$PROJECT/topics', expectAsync((request) { + mock.register('GET', 'projects/$PROJECT/topics', + expectAsync((request) { return mock.respondError(500); })); var api = new PubSub(mock, PROJECT); var subscription; - subscription = api.listTopics().listen( - (_) => throw 'Unexpected', + subscription = api.listTopics().listen((_) => throw 'Unexpected', onDone: expectAsync(() => null), - onError: expectAsync( - (e) => e is pubsub.DetailedApiRequestError)); + onError: + expectAsync((e) => e is pubsub.DetailedApiRequestError)); if (withPause) { subscription.pause(); scheduleMicrotask(() => subscription.resume()); @@ -293,15 +313,16 @@ main() { scheduleMicrotask(() => subscription.resume()); } mock.clear(); - mock.register('GET', 'projects/$PROJECT/topics', expectAsync((request) { + mock.register('GET', 'projects/$PROJECT/topics', + expectAsync((request) { return mock.respondError(500); })); } return null; }), count: 50), onDone: expectAsync(() => null), - onError: expectAsync( - (e) => e is pubsub.DetailedApiRequestError)); + onError: + expectAsync((e) => e is pubsub.DetailedApiRequestError)); } runTest(false); @@ -363,8 +384,7 @@ main() { pageCount++; expect(page.isLast, pageCount == totalPages); expect(page.items.length, - page.isLast ? n - (totalPages - 1) * pageSize - : pageSize ); + page.isLast ? n - (totalPages - 1) * pageSize : pageSize); page.next().then(expectAsync((page) { if (page != null) { handlePage(page); @@ -404,25 +424,31 @@ main() { test('create', () { var mock = mockClient(); - mock.register('PUT', 'projects/$PROJECT/subscriptions', expectAsync((request) { - var requestSubscription = - new pubsub.Subscription.fromJson(JSON.decode(request.body)); - expect(requestSubscription.name, absoluteName); - return mock.respond(new pubsub.Subscription()..name = absoluteName); - }, count: 2)); + mock.register( + 'PUT', + 'projects/$PROJECT/subscriptions', + expectAsync((request) { + var requestSubscription = + new pubsub.Subscription.fromJson(JSON.decode(request.body)); + expect(requestSubscription.name, absoluteName); + return mock + .respond(new pubsub.Subscription()..name = absoluteName); + }, count: 2)); var api = new PubSub(mock, PROJECT); - return api.createSubscription(name, topicName) + return api + .createSubscription(name, topicName) .then(expectAsync((subscription) { - expect(subscription.name, name); - expect(subscription.absoluteName, absoluteName); - return api.createSubscription(absoluteName, absoluteTopicName) - .then(expectAsync((subscription) { - expect(subscription.name, name); - expect(subscription.project, PROJECT); - expect(subscription.absoluteName, absoluteName); - })); - })); + expect(subscription.name, name); + expect(subscription.absoluteName, absoluteName); + return api + .createSubscription(absoluteName, absoluteTopicName) + .then(expectAsync((subscription) { + expect(subscription.name, name); + expect(subscription.project, PROJECT); + expect(subscription.absoluteName, absoluteName); + })); + })); }); test('create-error', () { @@ -430,11 +456,11 @@ main() { var api = new PubSub(mock, PROJECT); badSubscriptionNames.forEach((name) { expect(() => api.createSubscription(name, 'test-topic'), - throwsArgumentError); + throwsArgumentError); }); badTopicNames.forEach((name) { expect(() => api.createSubscription('test-subscription', name), - throwsArgumentError); + throwsArgumentError); }); }); @@ -442,10 +468,11 @@ main() { var mock = mockClient(); mock.register( 'DELETE', - 'projects/$PROJECT/subscriptions', expectAsync((request) { - expect(request.body.length, 0); - return mock.respondEmpty(); - }, count: 2)); + 'projects/$PROJECT/subscriptions', + expectAsync((request) { + expect(request.body.length, 0); + return mock.respondEmpty(); + }, count: 2)); var api = new PubSub(mock, PROJECT); return api.deleteSubscription(name).then(expectAsync((result) { @@ -471,21 +498,24 @@ main() { var mock = mockClient(); mock.register( 'GET', - new RegExp('projects/$PROJECT/subscriptions'), expectAsync((request) { - expect(request.body.length, 0); - return mock.respond(new pubsub.Subscription()..name = absoluteName); - }, count: 2)); + new RegExp('projects/$PROJECT/subscriptions'), + expectAsync((request) { + expect(request.body.length, 0); + return mock + .respond(new pubsub.Subscription()..name = absoluteName); + }, count: 2)); var api = new PubSub(mock, PROJECT); return api.lookupSubscription(name).then(expectAsync((subscription) { expect(subscription.name, name); expect(subscription.absoluteName, absoluteName); - return api.lookupSubscription(absoluteName) + return api + .lookupSubscription(absoluteName) .then(expectAsync((subscription) { - expect(subscription.name, name); - expect(subscription.project, PROJECT); - expect(subscription.absoluteName, absoluteName); - })); + expect(subscription.name, name); + expect(subscription.project, PROJECT); + expect(subscription.absoluteName, absoluteName); + })); })); }); @@ -510,7 +540,6 @@ main() { } } - // Mock that expect/generates [n] subscriptions in pages of page size // [pageSize]. registerQueryMock(mock, n, pageSize, {String topic, int totalCalls}) { @@ -523,25 +552,28 @@ main() { totalCalls = totalPages; } var pageCount = 0; - mock.register('GET', 'projects/$PROJECT/subscriptions', expectAsync((request) { - pageCount++; - expect(request.url.queryParameters['pageSize'], '$pageSize'); - expect(request.body.length, 0); - if (pageCount > 1) { - expect(request.url.queryParameters['pageToken'], 'next-page'); - } + mock.register( + 'GET', + 'projects/$PROJECT/subscriptions', + expectAsync((request) { + pageCount++; + expect(request.url.queryParameters['pageSize'], '$pageSize'); + expect(request.body.length, 0); + if (pageCount > 1) { + expect(request.url.queryParameters['pageToken'], 'next-page'); + } - var response = new pubsub.ListSubscriptionsResponse(); - var first = (pageCount - 1) * pageSize + 1; - if (pageCount < totalPages) { - response.nextPageToken = 'next-page'; - addSubscriptions(response, first, pageSize); - } else { - addSubscriptions( - response, first, n - (totalPages - 1) * pageSize); - } - return mock.respond(response); - }, count: totalCalls)); + var response = new pubsub.ListSubscriptionsResponse(); + var first = (pageCount - 1) * pageSize + 1; + if (pageCount < totalPages) { + response.nextPageToken = 'next-page'; + addSubscriptions(response, first, pageSize); + } else { + addSubscriptions( + response, first, n - (totalPages - 1) * pageSize); + } + return mock.respond(response); + }, count: totalCalls)); } group('list', () { @@ -550,8 +582,10 @@ main() { registerQueryMock(mock, count, 50, topic: topic); var api = new PubSub(mock, PROJECT); - return api.listSubscriptions(topic).listen( - expectAsync((_) => null, count: count)).asFuture(); + return api + .listSubscriptions(topic) + .listen(expectAsync((_) => null, count: count)) + .asFuture(); } test('simple', () { @@ -585,10 +619,10 @@ main() { api.listSubscriptions().listen( expectAsync(((_) => null), count: 70), onDone: expectAsync(() => null)) - ..pause() - ..resume() - ..pause() - ..resume(); + ..pause() + ..resume() + ..pause() + ..resume(); }); test('pause-resume', () { @@ -600,7 +634,10 @@ main() { var subscription; subscription = api.listSubscriptions().listen( expectAsync(((_) { - subscription..pause()..resume()..pause(); + subscription + ..pause() + ..resume() + ..pause(); if ((count % 2) == 0) { subscription.resume(); } else { @@ -609,7 +646,7 @@ main() { return null; }), count: 70), onDone: expectAsync(() => null)) - ..pause(); + ..pause(); scheduleMicrotask(() => subscription.resume()); }); @@ -618,10 +655,9 @@ main() { registerQueryMock(mock, 70, 50, totalCalls: 1); var api = new PubSub(mock, PROJECT); - api.listSubscriptions().listen( - (_) => throw 'Unexpected', + api.listSubscriptions().listen((_) => throw 'Unexpected', onDone: () => throw 'Unexpected') - ..cancel(); + ..cancel(); }); test('cancel', () { @@ -639,7 +675,8 @@ main() { runTest(bool withPause) { // Test error on first GET request. var mock = mockClient(); - mock.register('GET', 'projects/$PROJECT/subscriptions', expectAsync((request) { + mock.register('GET', 'projects/$PROJECT/subscriptions', + expectAsync((request) { return mock.respondError(500); })); var api = new PubSub(mock, PROJECT); @@ -647,8 +684,8 @@ main() { subscription = api.listSubscriptions().listen( (_) => throw 'Unexpected', onDone: expectAsync(() => null), - onError: expectAsync( - (e) => e is pubsub.DetailedApiRequestError)); + onError: + expectAsync((e) => e is pubsub.DetailedApiRequestError)); if (withPause) { subscription.pause(); scheduleMicrotask(() => subscription.resume()); @@ -678,16 +715,16 @@ main() { scheduleMicrotask(() => subscription.resume()); } mock.clear(); - mock.register( - 'GET', 'projects/$PROJECT/subscriptions', expectAsync((request) { + mock.register('GET', 'projects/$PROJECT/subscriptions', + expectAsync((request) { return mock.respondError(500); })); } return null; }), count: 50), onDone: expectAsync(() => null), - onError: expectAsync( - (e) => e is pubsub.DetailedApiRequestError)); + onError: + expectAsync((e) => e is pubsub.DetailedApiRequestError)); } runTest(false); @@ -708,12 +745,13 @@ main() { mock.clear(); registerQueryMock(mock, 0, 20, topic: topic); - return api.pageSubscriptions(topic: topic, pageSize: 20) + return api + .pageSubscriptions(topic: topic, pageSize: 20) .then(expectAsync((page) { - expect(page.items.length, 0); - expect(page.isLast, isTrue); - expect(page.next(), completion(isNull)); - })); + expect(page.items.length, 0); + expect(page.isLast, isTrue); + expect(page.next(), completion(isNull)); + })); })); } @@ -734,12 +772,13 @@ main() { mock.clear(); registerQueryMock(mock, 20, 20, topic: topic); - return api.pageSubscriptions(topic: topic, pageSize: 20) + return api + .pageSubscriptions(topic: topic, pageSize: 20) .then(expectAsync((page) { - expect(page.items.length, 20); - expect(page.isLast, isTrue); - expect(page.next(), completion(isNull)); - })); + expect(page.items.length, 20); + expect(page.isLast, isTrue); + expect(page.next(), completion(isNull)); + })); })); } @@ -760,8 +799,7 @@ main() { pageCount++; expect(page.isLast, pageCount == totalPages); expect(page.items.length, - page.isLast ? n - (totalPages - 1) * pageSize - : pageSize ); + page.isLast ? n - (totalPages - 1) * pageSize : pageSize); page.next().then((page) { if (page != null) { handlingPage(page); @@ -773,7 +811,8 @@ main() { } var api = new PubSub(mock, PROJECT); - api.pageSubscriptions(topic: topic, pageSize: pageSize) + api + .pageSubscriptions(topic: topic, pageSize: pageSize) .then(handlingPage); return completer.future; @@ -815,15 +854,16 @@ main() { var attributes = {'a': '1', 'b': 'text'}; registerLookup(mock) { - mock.register( - 'GET', absoluteName, expectAsync((request) { + mock.register('GET', absoluteName, expectAsync((request) { expect(request.body.length, 0); return mock.respond(new pubsub.Topic()..name = absoluteName); })); } registerPublish(mock, count, fn) { - mock.register('POST', 'projects/test-project/topics/test-topic:publish', + mock.register( + 'POST', + 'projects/test-project/topics/test-topic:publish', expectAsync((request) { var publishRequest = new pubsub.PublishRequest.fromJson(JSON.decode(request.body)); @@ -849,12 +889,13 @@ main() { expect(result, isNull); return topic.publishBytes(messageBytes).then(expectAsync((result) { expect(result, isNull); - return topic.publish( - new Message.withString(message)).then(expectAsync((result) { + return topic + .publish(new Message.withString(message)) + .then(expectAsync((result) { expect(result, isNull); - return topic.publish( - new Message.withBytes( - messageBytes)).then(expectAsync((result) { + return topic + .publish(new Message.withBytes(messageBytes)) + .then(expectAsync((result) { expect(result, isNull); })); })); @@ -879,18 +920,22 @@ main() { return mock.respond(new pubsub.PublishResponse()..messageIds = [0]); })); - return topic.publishString(message, attributes: attributes) + return topic + .publishString(message, attributes: attributes) .then(expectAsync((result) { expect(result, isNull); - return topic.publishBytes(messageBytes, attributes: attributes) + return topic + .publishBytes(messageBytes, attributes: attributes) .then(expectAsync((result) { expect(result, isNull); - return topic.publish( - new Message.withString(message, attributes: attributes)) + return topic + .publish( + new Message.withString(message, attributes: attributes)) .then(expectAsync((result) { expect(result, isNull); - return topic.publish( - new Message.withBytes(messageBytes, attributes: attributes)) + return topic + .publish(new Message.withBytes(messageBytes, + attributes: attributes)) .then(expectAsync((result) { expect(result, isNull); })); @@ -902,8 +947,7 @@ main() { test('delete', () { var mock = mockClient(); - mock.register( - 'GET', absoluteName, expectAsync((request) { + mock.register('GET', absoluteName, expectAsync((request) { expect(request.body.length, 0); return mock.respond(new pubsub.Topic()..name = absoluteName); })); @@ -913,8 +957,7 @@ main() { expect(topic.name, name); expect(topic.absoluteName, absoluteName); - mock.register( - 'DELETE', absoluteName, expectAsync((request) { + mock.register('DELETE', absoluteName, expectAsync((request) { expect(request.body.length, 0); return mock.respondEmpty(); })); @@ -932,8 +975,7 @@ main() { test('delete', () { var mock = mockClient(); - mock.register( - 'GET', absoluteName, expectAsync((request) { + mock.register('GET', absoluteName, expectAsync((request) { expect(request.body.length, 0); return mock.respond(new pubsub.Topic()..name = absoluteName); })); @@ -960,8 +1002,7 @@ main() { var absoluteSubscriptionName = '/subscriptions/$relativeSubscriptionName'; test('event', () { - var requestBody = -''' + var requestBody = ''' { "message": { "data":"SGVsbG8sIHdvcmxkIDMwIG9mIDUwIQ==", @@ -987,8 +1028,7 @@ main() { }); test('event-short-subscription-name', () { - var requestBody = - ''' + var requestBody = ''' { "message": { "data":"SGVsbG8sIHdvcmxkIDMwIG9mIDUwIQ==", diff --git a/pkgs/gcloud/test/service_scope_test.dart b/pkgs/gcloud/test/service_scope_test.dart index afaa3be3..753677d3 100644 --- a/pkgs/gcloud/test/service_scope_test.dart +++ b/pkgs/gcloud/test/service_scope_test.dart @@ -39,22 +39,21 @@ main() { test('fork-callback-returns-non-future', () { // The closure passed to fork() must return a future. - expect(() => ss.fork(expectAsync(() => null)), - throwsA(isArgumentError)); + expect(() => ss.fork(expectAsync(() => null)), throwsA(isArgumentError)); }); test('error-on-double-insert', () { // Ensure that inserting twice with the same key results in an error. return ss.fork(expectAsync(() => new Future.sync(() { - ss.register(1, 'firstValue'); - expect(() => ss.register(1, 'firstValue'), throwsA(isArgumentError)); - }))); + ss.register(1, 'firstValue'); + expect(() => ss.register(1, 'firstValue'), throwsA(isArgumentError)); + }))); }); test('only-cleanup', () { return ss.fork(expectAsync(() => new Future.sync(() { - ss.registerScopeExitCallback(expectAsync(() {})); - }))); + ss.registerScopeExitCallback(expectAsync(() {})); + }))); }); test('correct-insertion-and-cleanup-order', () { @@ -62,27 +61,27 @@ main() { // their entries. int insertions = 0; return ss.fork(expectAsync(() => new Future.value(() { - int NUM = 10; - - for (int i = 0; i < NUM; i++) { - var key = i; - - insertions++; - ss.register(key, 'value$i'); - ss.registerScopeExitCallback(expectAsync(() { - expect(insertions, equals(i + 1)); - insertions--; - })); - - for (int j = 0; j <= NUM; j++) { - if (j <= i) { - expect(ss.lookup(key), 'value$i'); - } else { - expect(ss.lookup(key), isNull); + int NUM = 10; + + for (int i = 0; i < NUM; i++) { + var key = i; + + insertions++; + ss.register(key, 'value$i'); + ss.registerScopeExitCallback(expectAsync(() { + expect(insertions, equals(i + 1)); + insertions--; + })); + + for (int j = 0; j <= NUM; j++) { + if (j <= i) { + expect(ss.lookup(key), 'value$i'); + } else { + expect(ss.lookup(key), isNull); + } + } } - } - } - }))); + }))); }); test('onion-cleanup', () { @@ -115,17 +114,19 @@ main() { // Ensure the fork() error message contains all error messages from the // failed cleanup() calls. int insertions = 0; - return ss.fork(() => new Future.sync(() { - for (int i = 0; i < 10; i++) { - insertions++; - ss.register(i, 'value$i'); - ss.registerScopeExitCallback(() { - expect(insertions, equals(i + 1)); - insertions--; - if (i.isEven) throw 'xx${i}yy'; - }); - } - })).catchError(expectAsync((e, _) { + return ss + .fork(() => new Future.sync(() { + for (int i = 0; i < 10; i++) { + insertions++; + ss.register(i, 'value$i'); + ss.registerScopeExitCallback(() { + expect(insertions, equals(i + 1)); + insertions--; + if (i.isEven) throw 'xx${i}yy'; + }); + } + })) + .catchError(expectAsync((e, _) { for (int i = 0; i < 10; i++) { expect('$e'.contains('xx${i}yy'), equals(i.isEven)); } @@ -136,35 +137,35 @@ main() { // Ensure that once the closure passed to fork() completes, the service // scope is destroyed. return ss.fork(expectAsync(() => new Future.sync(() { - var key = 1; - ss.register(key, 'firstValue'); - ss.registerScopeExitCallback(Zone.current.bindCallback(() { - // Spawn an async task which will be run after the cleanups to ensure - // the service scope got destroyed. - Timer.run(expectAsync(() { - expect(() => ss.lookup(key), throwsA(isStateError)); - expect(() => ss.register(2, 'value'), throwsA(isStateError)); - expect(() => ss.registerScopeExitCallback(() {}), - throwsA(isStateError)); - })); - })); - expect(ss.lookup(key), equals('firstValue')); - }))); + var key = 1; + ss.register(key, 'firstValue'); + ss.registerScopeExitCallback(Zone.current.bindCallback(() { + // Spawn an async task which will be run after the cleanups to ensure + // the service scope got destroyed. + Timer.run(expectAsync(() { + expect(() => ss.lookup(key), throwsA(isStateError)); + expect(() => ss.register(2, 'value'), throwsA(isStateError)); + expect(() => ss.registerScopeExitCallback(() {}), + throwsA(isStateError)); + })); + })); + expect(ss.lookup(key), equals('firstValue')); + }))); }); test('override-parent-value', () { // Ensure that once the closure passed to fork() completes, the service // scope is destroyed. return ss.fork(expectAsync(() => new Future.sync(() { - var key = 1; - ss.register(key, 'firstValue'); - expect(ss.lookup(key), equals('firstValue')); - - return ss.fork(expectAsync(() => new Future.sync(() { - ss.register(key, 'secondValue'); - expect(ss.lookup(key), equals('secondValue')); - }))); - }))); + var key = 1; + ss.register(key, 'firstValue'); + expect(ss.lookup(key), equals('firstValue')); + + return ss.fork(expectAsync(() => new Future.sync(() { + ss.register(key, 'secondValue'); + expect(ss.lookup(key), equals('secondValue')); + }))); + }))); }); test('fork-onError-handler', () { @@ -200,21 +201,21 @@ main() { Future spawnChild(ownSubKey, otherSubKey, int i, cleanup) { return ss.fork(expectAsync(() => new Future.sync(() { - ss.register(subKey, 'fork$i'); - ss.registerScopeExitCallback(cleanup); - ss.register(ownSubKey, 'sub$i'); - ss.registerScopeExitCallback(cleanup); - - expect(ss.lookup(rootKey), equals('root')); - expect(ss.lookup(subKey), equals('fork$i')); - expect(ss.lookup(ownSubKey), equals('sub$i')); - expect(ss.lookup(otherSubKey), isNull); - }))); + ss.register(subKey, 'fork$i'); + ss.registerScopeExitCallback(cleanup); + ss.register(ownSubKey, 'sub$i'); + ss.registerScopeExitCallback(cleanup); + + expect(ss.lookup(rootKey), equals('root')); + expect(ss.lookup(subKey), equals('fork$i')); + expect(ss.lookup(ownSubKey), equals('sub$i')); + expect(ss.lookup(otherSubKey), isNull); + }))); } return Future.wait([ - spawnChild(subKey1, subKey2, 1, () => cleanupFork1++), - spawnChild(subKey2, subKey1, 2, () => cleanupFork2++), + spawnChild(subKey1, subKey2, 1, () => cleanupFork1++), + spawnChild(subKey2, subKey1, 2, () => cleanupFork2++), ]); })); }); diff --git a/pkgs/gcloud/test/storage/e2e_test.dart b/pkgs/gcloud/test/storage/e2e_test.dart index ef3b77b7..88e52c35 100644 --- a/pkgs/gcloud/test/storage/e2e_test.dart +++ b/pkgs/gcloud/test/storage/e2e_test.dart @@ -37,10 +37,9 @@ runTests(Storage storage, Bucket testBucket) { expect(info.etag, isNotNull); expect(info.created is DateTime, isTrue); expect(info.id, isNotNull); - return storage.deleteBucket(bucketName) - .then(expectAsync((result) { - expect(result, isNull); - })); + return storage.deleteBucket(bucketName).then(expectAsync((result) { + expect(result, isNull); + })); })); })); }); @@ -48,28 +47,28 @@ runTests(Storage storage, Bucket testBucket) { test('create-with-predefined-acl-delete', () { Future test(predefinedAcl, expectedLength) { var bucketName = generateBucketName(); - return storage.createBucket(bucketName, predefinedAcl: predefinedAcl) + return storage + .createBucket(bucketName, predefinedAcl: predefinedAcl) .then(expectAsync((result) { + expect(result, isNull); + return storage.bucketInfo(bucketName).then(expectAsync((info) { + var acl = info.acl; + expect(info.bucketName, bucketName); + expect(acl.entries.length, expectedLength); + return storage.deleteBucket(bucketName).then(expectAsync((result) { expect(result, isNull); - return storage.bucketInfo(bucketName).then(expectAsync((info) { - var acl = info.acl; - expect(info.bucketName, bucketName); - expect(acl.entries.length, expectedLength); - return storage.deleteBucket(bucketName) - .then(expectAsync((result) { - expect(result, isNull); - })); - })); + })); + })); })); } return Future.forEach([ - // TODO: Figure out why some returned ACLs are empty. - () => test(PredefinedAcl.authenticatedRead, 0), - // [test, [PredefinedAcl.private, 0]], // TODO: Cannot delete. - () => test(PredefinedAcl.projectPrivate, 3), - () => test(PredefinedAcl.publicRead, 0), - () => test(PredefinedAcl.publicReadWrite, 0) + // TODO: Figure out why some returned ACLs are empty. + () => test(PredefinedAcl.authenticatedRead, 0), + // [test, [PredefinedAcl.private, 0]], // TODO: Cannot delete. + () => test(PredefinedAcl.projectPrivate, 3), + () => test(PredefinedAcl.publicRead, 0), + () => test(PredefinedAcl.publicReadWrite, 0) ], (f) => f().then(expectAsync((_) {}))); }); @@ -91,53 +90,53 @@ runTests(Storage storage, Bucket testBucket) { test('create-read-delete', () { Future test(name, bytes) { - return withTestBucket((Bucket bucket) { - return bucket.writeBytes('test', bytes).then(expectAsync((info) { - expect(info, isNotNull); - return bucket.read('test') - .fold([], (p, e) => p..addAll(e)) - .then(expectAsync((result) { - expect(result, bytes); - return bucket.delete('test').then(expectAsync((result) { - expect(result, isNull); + return withTestBucket((Bucket bucket) { + return bucket.writeBytes('test', bytes).then(expectAsync((info) { + expect(info, isNotNull); + return bucket + .read('test') + .fold([], (p, e) => p..addAll(e)).then(expectAsync((result) { + expect(result, bytes); + return bucket.delete('test').then(expectAsync((result) { + expect(result, isNull); })); + })); })); - })); - }); + }); } return Future.forEach([ - () => test('test-1', [1, 2, 3]), - () => test('test-2', bytesResumableUpload) - ], (f) => f().then(expectAsync((_) {}))); + () => test('test-1', [1, 2, 3]), + () => test('test-2', bytesResumableUpload) + ], (f) => f().then(expectAsync((_) {}))); }); test('create-with-predefined-acl-delete', () { return withTestBucket((Bucket bucket) { Future test(objectName, predefinedAcl, expectedLength) { - return bucket.writeBytes( - objectName, [1, 2, 3], predefinedAcl: predefinedAcl) + return bucket + .writeBytes(objectName, [1, 2, 3], predefinedAcl: predefinedAcl) .then(expectAsync((result) { - expect(result, isNotNull); - return bucket.info(objectName).then(expectAsync((info) { - var acl = info.metadata.acl; - expect(info.name, objectName); - expect(info.etag, isNotNull); - expect(acl.entries.length, expectedLength); - return bucket.delete(objectName).then(expectAsync((result) { - expect(result, isNull); - })); - })); + expect(result, isNotNull); + return bucket.info(objectName).then(expectAsync((info) { + var acl = info.metadata.acl; + expect(info.name, objectName); + expect(info.etag, isNotNull); + expect(acl.entries.length, expectedLength); + return bucket.delete(objectName).then(expectAsync((result) { + expect(result, isNull); + })); + })); })); } return Future.forEach([ - () => test('test-1', PredefinedAcl.authenticatedRead, 2), - () => test('test-2', PredefinedAcl.private, 1), - () => test('test-3', PredefinedAcl.projectPrivate, 4), - () => test('test-4', PredefinedAcl.publicRead, 2), - () => test('test-5', PredefinedAcl.bucketOwnerFullControl, 2), - () => test('test-6', PredefinedAcl.bucketOwnerRead, 2) + () => test('test-1', PredefinedAcl.authenticatedRead, 2), + () => test('test-2', PredefinedAcl.private, 1), + () => test('test-3', PredefinedAcl.projectPrivate, 4), + () => test('test-4', PredefinedAcl.publicRead, 2), + () => test('test-5', PredefinedAcl.bucketOwnerFullControl, 2), + () => test('test-6', PredefinedAcl.bucketOwnerRead, 2) ], (f) => f().then(expectAsync((_) {}))); }); }); @@ -145,50 +144,52 @@ runTests(Storage storage, Bucket testBucket) { test('create-with-acl-delete', () { return withTestBucket((Bucket bucket) { Future test(objectName, acl, expectedLength) { - return bucket.writeBytes(objectName, [1, 2, 3], acl: acl) + return bucket + .writeBytes(objectName, [1, 2, 3], acl: acl) .then(expectAsync((result) { - expect(result, isNotNull); - return bucket.info(objectName).then(expectAsync((info) { - var acl = info.metadata.acl; - expect(info.name, objectName); - expect(info.etag, isNotNull); - expect(acl.entries.length, expectedLength); - return bucket.delete(objectName).then(expectAsync((result) { - expect(result, isNull); - })); - })); + expect(result, isNotNull); + return bucket.info(objectName).then(expectAsync((info) { + var acl = info.metadata.acl; + expect(info.name, objectName); + expect(info.etag, isNotNull); + expect(acl.entries.length, expectedLength); + return bucket.delete(objectName).then(expectAsync((result) { + expect(result, isNull); + })); + })); })); } Acl acl1 = new Acl( [new AclEntry(AclScope.allAuthenticated, AclPermission.WRITE)]); - Acl acl2 = new Acl( - [new AclEntry(AclScope.allUsers, AclPermission.WRITE), - new AclEntry(new AccountScope('sgjesse@google.com'), - AclPermission.WRITE)]); - Acl acl3 = new Acl( - [new AclEntry(AclScope.allUsers, AclPermission.WRITE), - new AclEntry(new AccountScope('sgjesse@google.com'), - AclPermission.WRITE), - new AclEntry(new GroupScope('misc@dartlang.org'), - AclPermission.READ)]); - Acl acl4 = new Acl( - [new AclEntry(AclScope.allUsers, AclPermission.WRITE), - new AclEntry(new AccountScope('sgjesse@google.com'), - AclPermission.WRITE), - new AclEntry(new GroupScope('misc@dartlang.org'), - AclPermission.READ), - new AclEntry(new DomainScope('dartlang.org'), - AclPermission.FULL_CONTROL)]); + Acl acl2 = new Acl([ + new AclEntry(AclScope.allUsers, AclPermission.WRITE), + new AclEntry( + new AccountScope('sgjesse@google.com'), AclPermission.WRITE) + ]); + Acl acl3 = new Acl([ + new AclEntry(AclScope.allUsers, AclPermission.WRITE), + new AclEntry( + new AccountScope('sgjesse@google.com'), AclPermission.WRITE), + new AclEntry(new GroupScope('misc@dartlang.org'), AclPermission.READ) + ]); + Acl acl4 = new Acl([ + new AclEntry(AclScope.allUsers, AclPermission.WRITE), + new AclEntry( + new AccountScope('sgjesse@google.com'), AclPermission.WRITE), + new AclEntry(new GroupScope('misc@dartlang.org'), AclPermission.READ), + new AclEntry( + new DomainScope('dartlang.org'), AclPermission.FULL_CONTROL) + ]); // The expected length of the returned ACL is one longer than the one // use during creation as an additional 'used-ID' ACL entry is added // by cloud storage during creation. return Future.forEach([ - () => test('test-1', acl1, acl1.entries.length + 1), - () => test('test-2', acl2, acl2.entries.length + 1), - () => test('test-3', acl3, acl3.entries.length + 1), - () => test('test-4', acl4, acl4.entries.length + 1) + () => test('test-1', acl1, acl1.entries.length + 1), + () => test('test-2', acl2, acl2.entries.length + 1), + () => test('test-3', acl3, acl3.entries.length + 1), + () => test('test-4', acl4, acl4.entries.length + 1) ], (f) => f().then(expectAsync((_) {}))); }); }); @@ -196,31 +197,30 @@ runTests(Storage storage, Bucket testBucket) { test('create-with-metadata-delete', () { return withTestBucket((Bucket bucket) { Future test(objectName, metadata, bytes) { - return bucket.writeBytes(objectName, bytes, metadata: metadata) + return bucket + .writeBytes(objectName, bytes, metadata: metadata) .then(expectAsync((result) { - expect(result, isNotNull); - return bucket.info(objectName).then(expectAsync((info) { - expect(info.name, objectName); - expect(info.length, bytes.length); - expect(info.updated is DateTime, isTrue); - expect(info.md5Hash, isNotNull); - expect(info.crc32CChecksum, isNotNull); - expect(info.downloadLink is Uri, isTrue); - expect(info.generation.objectGeneration, isNotNull); - expect(info.generation.metaGeneration, 1); - expect(info.metadata.contentType, metadata.contentType); - expect(info.metadata.cacheControl, metadata.cacheControl); - expect(info.metadata.contentDisposition, - metadata.contentDisposition); - expect(info.metadata.contentEncoding, - metadata.contentEncoding); - expect(info.metadata.contentLanguage, - metadata.contentLanguage); - expect(info.metadata.custom, metadata.custom); - return bucket.delete(objectName).then(expectAsync((result) { - expect(result, isNull); - })); - })); + expect(result, isNotNull); + return bucket.info(objectName).then(expectAsync((info) { + expect(info.name, objectName); + expect(info.length, bytes.length); + expect(info.updated is DateTime, isTrue); + expect(info.md5Hash, isNotNull); + expect(info.crc32CChecksum, isNotNull); + expect(info.downloadLink is Uri, isTrue); + expect(info.generation.objectGeneration, isNotNull); + expect(info.generation.metaGeneration, 1); + expect(info.metadata.contentType, metadata.contentType); + expect(info.metadata.cacheControl, metadata.cacheControl); + expect(info.metadata.contentDisposition, + metadata.contentDisposition); + expect(info.metadata.contentEncoding, metadata.contentEncoding); + expect(info.metadata.contentLanguage, metadata.contentLanguage); + expect(info.metadata.custom, metadata.custom); + return bucket.delete(objectName).then(expectAsync((result) { + expect(result, isNull); + })); + })); })); } @@ -234,10 +234,10 @@ runTests(Storage storage, Bucket testBucket) { custom: {'a': 'b', 'c': 'd'}); return Future.forEach([ - () => test('test-1', metadata1, [65, 66, 67]), - () => test('test-2', metadata2, [65, 66, 67]), - () => test('test-3', metadata1, bytesResumableUpload), - () => test('test-4', metadata2, bytesResumableUpload) + () => test('test-1', metadata1, [65, 66, 67]), + () => test('test-2', metadata2, [65, 66, 67]), + () => test('test-3', metadata1, bytesResumableUpload), + () => test('test-4', metadata2, bytesResumableUpload) ], (f) => f().then(expectAsync((_) {}))); }); }); diff --git a/pkgs/gcloud/test/storage/storage_test.dart b/pkgs/gcloud/test/storage/storage_test.dart index 9185e9ba..c3142af2 100644 --- a/pkgs/gcloud/test/storage/storage_test.dart +++ b/pkgs/gcloud/test/storage/storage_test.dart @@ -17,11 +17,9 @@ import 'package:googleapis/storage/v1.dart' as storage; import '../common.dart'; import '../common_e2e.dart'; - const String HOSTNAME = 'www.googleapis.com'; const String ROOT_PATH = '/storage/v1/'; - http.Client mockClient() => new MockClient(HOSTNAME, ROOT_PATH); withMockClient(function(MockClient client, Storage storage)) { @@ -53,30 +51,34 @@ main() { }); test('create-with-predefined-acl', () { - var predefined = - [[PredefinedAcl.authenticatedRead, 'authenticatedRead'], - [PredefinedAcl.private, 'private'], - [PredefinedAcl.projectPrivate, 'projectPrivate'], - [PredefinedAcl.publicRead, 'publicRead'], - [PredefinedAcl.publicReadWrite, 'publicReadWrite']]; + var predefined = [ + [PredefinedAcl.authenticatedRead, 'authenticatedRead'], + [PredefinedAcl.private, 'private'], + [PredefinedAcl.projectPrivate, 'projectPrivate'], + [PredefinedAcl.publicRead, 'publicRead'], + [PredefinedAcl.publicReadWrite, 'publicReadWrite'] + ]; withMockClient((mock, api) { int count = 0; - mock.register('POST', 'b', expectAsync((request) { - var requestBucket = - new storage.Bucket.fromJson(JSON.decode(request.body)); - expect(requestBucket.name, bucketName); - expect(requestBucket.acl, isNull); - expect(request.url.queryParameters['predefinedAcl'], - predefined[count++][1]); - return mock.respond(new storage.Bucket()..name = bucketName); - }, count: predefined.length)); + mock.register( + 'POST', + 'b', + expectAsync((request) { + var requestBucket = + new storage.Bucket.fromJson(JSON.decode(request.body)); + expect(requestBucket.name, bucketName); + expect(requestBucket.acl, isNull); + expect(request.url.queryParameters['predefinedAcl'], + predefined[count++][1]); + return mock.respond(new storage.Bucket()..name = bucketName); + }, count: predefined.length)); var futures = []; for (int i = 0; i < predefined.length; i++) { - futures.add(api.createBucket(bucketName, - predefinedAcl: predefined[i][0])); + futures.add( + api.createBucket(bucketName, predefinedAcl: predefined[i][0])); } return Future.wait(futures); }); @@ -84,49 +86,49 @@ main() { test('create-with-acl', () { var acl1 = new Acl([ - new AclEntry(new AccountScope('user@example.com'), - AclPermission.FULL_CONTROL), - ]); + new AclEntry( + new AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + ]); var acl2 = new Acl([ - new AclEntry(new AccountScope('user@example.com'), - AclPermission.FULL_CONTROL), - new AclEntry(new GroupScope('group@example.com'), - AclPermission.WRITE), - ]); + new AclEntry( + new AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + new AclEntry(new GroupScope('group@example.com'), AclPermission.WRITE), + ]); var acl3 = new Acl([ - new AclEntry(new AccountScope('user@example.com'), - AclPermission.FULL_CONTROL), - new AclEntry(new GroupScope('group@example.com'), - AclPermission.WRITE), - new AclEntry(new DomainScope('example.com'), - AclPermission.READ), - ]); + new AclEntry( + new AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + new AclEntry(new GroupScope('group@example.com'), AclPermission.WRITE), + new AclEntry(new DomainScope('example.com'), AclPermission.READ), + ]); var acls = [acl1, acl2, acl3]; withMockClient((mock, api) { int count = 0; - mock.register('POST', 'b', expectAsync((request) { - var requestBucket = - new storage.Bucket.fromJson(JSON.decode(request.body)); - expect(requestBucket.name, bucketName); - expect(request.url.queryParameters['predefinedAcl'], isNull); - expect(requestBucket.acl, isNotNull); - expect(requestBucket.acl.length, count + 1); - expect(requestBucket.acl[0].entity, 'user-user@example.com'); - expect(requestBucket.acl[0].role, 'OWNER'); - if (count > 0) { - expect(requestBucket.acl[1].entity, 'group-group@example.com'); - expect(requestBucket.acl[1].role, 'WRITER'); - } - if (count > 2) { - expect(requestBucket.acl[2].entity, 'domain-example.com'); - expect(requestBucket.acl[2].role, 'READER'); - } - count++; - return mock.respond(new storage.Bucket()..name = bucketName); - }, count: acls.length)); + mock.register( + 'POST', + 'b', + expectAsync((request) { + var requestBucket = + new storage.Bucket.fromJson(JSON.decode(request.body)); + expect(requestBucket.name, bucketName); + expect(request.url.queryParameters['predefinedAcl'], isNull); + expect(requestBucket.acl, isNotNull); + expect(requestBucket.acl.length, count + 1); + expect(requestBucket.acl[0].entity, 'user-user@example.com'); + expect(requestBucket.acl[0].role, 'OWNER'); + if (count > 0) { + expect(requestBucket.acl[1].entity, 'group-group@example.com'); + expect(requestBucket.acl[1].role, 'WRITER'); + } + if (count > 2) { + expect(requestBucket.acl[2].entity, 'domain-example.com'); + expect(requestBucket.acl[2].role, 'READER'); + } + count++; + return mock.respond(new storage.Bucket()..name = bucketName); + }, count: acls.length)); var futures = []; for (int i = 0; i < acls.length; i++) { @@ -137,66 +139,67 @@ main() { }); test('create-with-acl-and-predefined-acl', () { - var predefined = - [[PredefinedAcl.authenticatedRead, 'authenticatedRead'], - [PredefinedAcl.private, 'private'], - [PredefinedAcl.projectPrivate, 'projectPrivate'], - [PredefinedAcl.publicRead, 'publicRead'], - [PredefinedAcl.publicReadWrite, 'publicReadWrite']]; + var predefined = [ + [PredefinedAcl.authenticatedRead, 'authenticatedRead'], + [PredefinedAcl.private, 'private'], + [PredefinedAcl.projectPrivate, 'projectPrivate'], + [PredefinedAcl.publicRead, 'publicRead'], + [PredefinedAcl.publicReadWrite, 'publicReadWrite'] + ]; var acl1 = new Acl([ - new AclEntry(new AccountScope('user@example.com'), - AclPermission.FULL_CONTROL), - ]); + new AclEntry( + new AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + ]); var acl2 = new Acl([ - new AclEntry(new AccountScope('user@example.com'), - AclPermission.FULL_CONTROL), - new AclEntry(new GroupScope('group@example.com'), - AclPermission.WRITE), - ]); + new AclEntry( + new AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + new AclEntry(new GroupScope('group@example.com'), AclPermission.WRITE), + ]); var acl3 = new Acl([ - new AclEntry(new AccountScope('user@example.com'), - AclPermission.FULL_CONTROL), - new AclEntry(new GroupScope('group@example.com'), - AclPermission.WRITE), - new AclEntry(new DomainScope('example.com'), - AclPermission.READ), - ]); + new AclEntry( + new AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + new AclEntry(new GroupScope('group@example.com'), AclPermission.WRITE), + new AclEntry(new DomainScope('example.com'), AclPermission.READ), + ]); var acls = [acl1, acl2, acl3]; withMockClient((mock, api) { int count = 0; - mock.register('POST', 'b', expectAsync((request) { - var requestBucket = - new storage.Bucket.fromJson(JSON.decode(request.body)); - int predefinedIndex = count ~/ acls.length; - int aclIndex = count % acls.length; - expect(requestBucket.name, bucketName); - expect(request.url.queryParameters['predefinedAcl'], - predefined[predefinedIndex][1]); - expect(requestBucket.acl, isNotNull); - expect(requestBucket.acl.length, aclIndex + 1); - expect(requestBucket.acl[0].entity, 'user-user@example.com'); - expect(requestBucket.acl[0].role, 'OWNER'); - if (aclIndex > 0) { - expect(requestBucket.acl[1].entity, 'group-group@example.com'); - expect(requestBucket.acl[1].role, 'WRITER'); - } - if (aclIndex > 2) { - expect(requestBucket.acl[2].entity, 'domain-example.com'); - expect(requestBucket.acl[2].role, 'READER'); - } - count++; - return mock.respond(new storage.Bucket()..name = bucketName); - }, count: predefined.length * acls.length)); + mock.register( + 'POST', + 'b', + expectAsync((request) { + var requestBucket = + new storage.Bucket.fromJson(JSON.decode(request.body)); + int predefinedIndex = count ~/ acls.length; + int aclIndex = count % acls.length; + expect(requestBucket.name, bucketName); + expect(request.url.queryParameters['predefinedAcl'], + predefined[predefinedIndex][1]); + expect(requestBucket.acl, isNotNull); + expect(requestBucket.acl.length, aclIndex + 1); + expect(requestBucket.acl[0].entity, 'user-user@example.com'); + expect(requestBucket.acl[0].role, 'OWNER'); + if (aclIndex > 0) { + expect(requestBucket.acl[1].entity, 'group-group@example.com'); + expect(requestBucket.acl[1].role, 'WRITER'); + } + if (aclIndex > 2) { + expect(requestBucket.acl[2].entity, 'domain-example.com'); + expect(requestBucket.acl[2].role, 'READER'); + } + count++; + return mock.respond(new storage.Bucket()..name = bucketName); + }, count: predefined.length * acls.length)); var futures = []; for (int i = 0; i < predefined.length; i++) { for (int j = 0; j < acls.length; j++) { - futures.add(api.createBucket( - bucketName, predefinedAcl: predefined[i][0], acl: acls[j])); + futures.add(api.createBucket(bucketName, + predefinedAcl: predefined[i][0], acl: acls[j])); } } return Future.wait(futures); @@ -205,8 +208,8 @@ main() { test('delete', () { withMockClient((mock, api) { - mock.register( - 'DELETE', new RegExp(r'b/[a-z/-]*$'), expectAsync((request) { + mock.register('DELETE', new RegExp(r'b/[a-z/-]*$'), + expectAsync((request) { expect(request.url.path, '${ROOT_PATH}b/$bucketName'); expect(request.body.length, 0); return mock.respond(new storage.Bucket()..name = bucketName); @@ -221,15 +224,17 @@ main() { withMockClient((mock, api) { mock.register( - 'GET', new RegExp(r'b/[a-z/-]*$'), expectAsync((request) { - expect(request.url.path, '${ROOT_PATH}b/$bucketName'); - expect(request.body.length, 0); - if (exists) { - return mock.respond(new storage.Bucket()..name = bucketName); - } else { - return mock.respondError(404); - } - }, count: 2)); + 'GET', + new RegExp(r'b/[a-z/-]*$'), + expectAsync((request) { + expect(request.url.path, '${ROOT_PATH}b/$bucketName'); + expect(request.body.length, 0); + if (exists) { + return mock.respond(new storage.Bucket()..name = bucketName); + } else { + return mock.respondError(404); + } + }, count: 2)); return api.bucketExists(bucketName).then(expectAsync((result) { expect(result, isTrue); @@ -241,13 +246,12 @@ main() { test('stat', () { withMockClient((mock, api) { - mock.register( - 'GET', new RegExp(r'b/[a-z/-]*$'), expectAsync((request) { + mock.register('GET', new RegExp(r'b/[a-z/-]*$'), expectAsync((request) { expect(request.url.path, '${ROOT_PATH}b/$bucketName'); expect(request.body.length, 0); return mock.respond(new storage.Bucket() - ..name = bucketName - ..timeCreated = new DateTime(2014)); + ..name = bucketName + ..timeCreated = new DateTime(2014)); })); return api.bucketInfo(bucketName).then(expectAsync((result) { @@ -265,18 +269,16 @@ main() { return mock.respond(new storage.Buckets()); })); - api.listBucketNames().listen( - (_) => throw 'Unexpected', + api.listBucketNames().listen((_) => throw 'Unexpected', onDone: expectAsync(() => null)); }); }); test('immediate-cancel', () { withMockClient((mock, api) { - api.listBucketNames().listen( - (_) => throw 'Unexpected', + api.listBucketNames().listen((_) => throw 'Unexpected', onDone: () => throw 'Unexpected') - ..cancel(); + ..cancel(); }); }); @@ -292,14 +294,14 @@ main() { test('copy', () { withMockClient((mock, api) { mock.register( - 'POST', - 'b/srcBucket/o/srcObject/copyTo/b/destBucket/o/destObject', + 'POST', 'b/srcBucket/o/srcObject/copyTo/b/destBucket/o/destObject', expectAsync((request) { return mock.respond(new storage.Object()..name = 'destObject'); })); - expect(api.copyObject('gs://srcBucket/srcObject', - 'gs://destBucket/destObject'), - completion(isNull)); + expect( + api.copyObject( + 'gs://srcBucket/srcObject', 'gs://destBucket/destObject'), + completion(isNull)); }); }); @@ -308,9 +310,9 @@ main() { expect(() => api.copyObject('a', 'b'), throwsA(isFormatException)); expect(() => api.copyObject('a/b', 'c/d'), throwsA(isFormatException)); expect(() => api.copyObject('gs://a/b', 'gs://c/'), - throwsA(isFormatException)); + throwsA(isFormatException)); expect(() => api.copyObject('gs://a/b', 'gs:///c'), - throwsA(isFormatException)); + throwsA(isFormatException)); }); }); }); @@ -334,16 +336,16 @@ main() { expectNormalUpload(mock, data, objectName) { var bytes = data.fold([], (p, e) => p..addAll(e)); - mock.registerUpload( - 'POST', 'b/$bucketName/o', expectAsync((request) { - return mock.processNormalMediaUpload(request) + mock.registerUpload('POST', 'b/$bucketName/o', expectAsync((request) { + return mock + .processNormalMediaUpload(request) .then(expectAsync((mediaUpload) { - var object = - new storage.Object.fromJson(JSON.decode(mediaUpload.json)); - expect(object.name, objectName); - expect(mediaUpload.bytes, bytes); - expect(mediaUpload.contentType, 'application/octet-stream'); - return mock.respond(new storage.Object()..name = objectName); + var object = + new storage.Object.fromJson(JSON.decode(mediaUpload.json)); + expect(object.name, objectName); + expect(mediaUpload.bytes, bytes); + expect(mediaUpload.contentType, 'application/octet-stream'); + return mock.respond(new storage.Object()..name = objectName); })); })); } @@ -352,24 +354,26 @@ main() { var bytes = data.fold([], (p, e) => p..addAll(e)); expect(bytes.length, bytesResumableUpload.length); int count = 0; - mock.registerResumableUpload( - 'POST', 'b/$bucketName/o', expectAsync((request) { + mock.registerResumableUpload('POST', 'b/$bucketName/o', + expectAsync((request) { var requestObject = new storage.Object.fromJson(JSON.decode(request.body)); expect(requestObject.name, objectName); return mock.respondInitiateResumableUpload(PROJECT); })); mock.registerResumableUpload( - 'PUT', 'b/$PROJECT/o', expectAsync((request) { - count++; - if (count == 1) { - expect(request.bodyBytes.length, MB); - return mock.respondContinueResumableUpload(); - } else { - expect(request.bodyBytes.length, 1); - return mock.respond(new storage.Object()..name = objectName); - } - }, count: 2)); + 'PUT', + 'b/$PROJECT/o', + expectAsync((request) { + count++; + if (count == 1) { + expect(request.bodyBytes.length, MB); + return mock.respondContinueResumableUpload(); + } else { + expect(request.bodyBytes.length, 1); + return mock.respond(new storage.Object()..name = objectName); + } + }, count: 2)); } checkResult(result) { @@ -388,7 +392,8 @@ main() { Future addStreamToSink(sink, List> data) { sink.done.then(expectAsync(checkResult)); sink.done.catchError((e) => throw 'Unexpected $e'); - return sink.addStream(new Stream.fromIterable(data)) + return sink + .addStream(new Stream.fromIterable(data)) .then((_) => sink.close()) .then(expectAsync(checkResult)) .catchError((e) => throw 'Unexpected $e'); @@ -398,7 +403,8 @@ main() { sink.done.then(expectAsync(checkResult)); sink.done.catchError((e) => throw 'Unexpected $e'); data.forEach((bytes) => sink.add(bytes)); - return sink.close() + return sink + .close() .then(expectAsync(checkResult)) .catchError((e) => throw 'Unexpected $e'); } @@ -428,7 +434,9 @@ main() { .then(expectAsync((_) => upload(addStreamToSink, false))) .then(expectAsync((_) => upload(addToSink, true))) .then(expectAsync((_) => upload(addToSink, false))); - }; + } + + ; test('write-short-1', () { withMockClient((mock, api) { @@ -438,10 +446,8 @@ main() { test('write-short-2', () { withMockClient((mock, api) { - runTest(mock, - api, - [bytesNormalUpload, bytesNormalUpload], - bytesNormalUpload.length * 2); + runTest(mock, api, [bytesNormalUpload, bytesNormalUpload], + bytesNormalUpload.length * 2); }); }); @@ -453,31 +459,27 @@ main() { test('write-short-error', () { withMockClient((mock, api) { - Future test(length) { mock.clear(); - mock.registerUpload( - 'POST', 'b/$bucketName/o', expectAsync((request) { + mock.registerUpload('POST', 'b/$bucketName/o', expectAsync((request) { return mock.respondError(500); })); var bucket = api.bucket(bucketName); var sink = bucket.write(bucketName, length: length); - sink.done - .then((_) => throw 'Unexpected') - .catchError(expectAsync(expectNotNull), - test: testDetailedApiError); - sink.done - .catchError(expectAsync(expectNotNull), - test: testDetailedApiError); + sink.done.then((_) => throw 'Unexpected').catchError( + expectAsync(expectNotNull), + test: testDetailedApiError); + sink.done.catchError(expectAsync(expectNotNull), + test: testDetailedApiError); return new Stream.fromIterable([bytesNormalUpload]) .pipe(sink) .then((_) => throw 'Unexpected') .catchError(expectAsync(expectNotNull), - test: testDetailedApiError); + test: testDetailedApiError); } - test(null) // Unknown length. + test(null) // Unknown length. .then(expectAsync((_) => test(1))) .then(expectAsync((_) => test(10))) .then(expectAsync((_) => test(maxNormalUpload))); @@ -487,69 +489,66 @@ main() { // TODO: Mock the resumable upload timeout. test('write-long-error', () { withMockClient((mock, api) { - Future test(length) { mock.clear(); - mock.registerResumableUpload( - 'POST', 'b/$bucketName/o', expectAsync((request) { + mock.registerResumableUpload('POST', 'b/$bucketName/o', + expectAsync((request) { return mock.respondInitiateResumableUpload(PROJECT); })); mock.registerResumableUpload( - 'PUT', 'b/$PROJECT/o', expectAsync((request) { - return mock.respondError(502); - }, count: 3)); // Default 3 retries in googleapis library. - + 'PUT', + 'b/$PROJECT/o', + expectAsync((request) { + return mock.respondError(502); + }, count: 3)); // Default 3 retries in googleapis library. var bucket = api.bucket(bucketName); var sink = bucket.write(bucketName); - sink.done - .then((_) => throw 'Unexpected') - .catchError(expectAsync(expectNotNull), - test: testDetailedApiError); + sink.done.then((_) => throw 'Unexpected').catchError( + expectAsync(expectNotNull), + test: testDetailedApiError); return new Stream.fromIterable([bytesResumableUpload]) .pipe(sink) .then((_) => throw 'Unexpected') .catchError(expectAsync(expectNotNull), - test: testDetailedApiError); + test: testDetailedApiError); } - test(null) // Unknown length. + test(null) // Unknown length. .then(expectAsync((_) => test(minResumableUpload))); }); }); test('write-long-wrong-length', () { withMockClient((mock, api) { - Future test(data, length) { mock.clear(); - mock.registerResumableUpload( - 'POST', 'b/$bucketName/o', expectAsync((request) { + mock.registerResumableUpload('POST', 'b/$bucketName/o', + expectAsync((request) { return mock.respondInitiateResumableUpload(PROJECT); })); - mock.registerResumableUpload( - 'PUT', 'b/$PROJECT/o', expectAsync((request) { + mock.registerResumableUpload('PUT', 'b/$PROJECT/o', + expectAsync((request) { return mock.respondContinueResumableUpload(); - })); // Default 3 retries in googleapis library. + })); // Default 3 retries in googleapis library. var bucket = api.bucket(bucketName); var sink = bucket.write(bucketName, length: length); - sink.done - .then((_) => throw 'Unexpected') - .catchError( - expectAsync(expectNotNull), - test: (e) => e is String || e is storage.ApiRequestError); + sink.done.then((_) => throw 'Unexpected').catchError( + expectAsync(expectNotNull), + test: (e) => e is String || e is storage.ApiRequestError); return new Stream.fromIterable(data) .pipe(sink) .then((_) => throw 'Unexpected') - .catchError( - expectAsync(expectNotNull), + .catchError(expectAsync(expectNotNull), test: (e) => e is String || e is storage.ApiRequestError); } test([bytesResumableUpload], bytesResumableUpload.length + 1) - .then(expectAsync((_) => test([bytesResumableUpload, [1, 2]], - bytesResumableUpload.length + 1))); + .then(expectAsync((_) => test([ + bytesResumableUpload, + [1, 2] + ], bytesResumableUpload.length + 1))); }); }); @@ -560,10 +559,13 @@ main() { sink.done .then((_) => throw 'Unexpected') .catchError(expectAsync(expectNotNull), test: testArgumentError); - var stream = new Stream.fromIterable([[1, 2, 3]]); + var stream = new Stream.fromIterable([ + [1, 2, 3] + ]); sink.addStream(stream).then((_) { sink.addError(new ArgumentError()); - sink.close() + sink + .close() .catchError(expectAsync(expectNotNull), test: testArgumentError); }); }); @@ -571,15 +573,15 @@ main() { test('write-long-add-error', () { withMockClient((mock, api) { - mock.registerResumableUpload( - 'POST', 'b/$bucketName/o', expectAsync((request) { + mock.registerResumableUpload('POST', 'b/$bucketName/o', + expectAsync((request) { return mock.respondInitiateResumableUpload(PROJECT); })); // The resumable upload will buffer until either close or a full chunk, // so when we add an error the last byte is never sent. Therefore this // PUT is only called once. - mock.registerResumableUpload( - 'PUT', 'b/$PROJECT/o', expectAsync((request) { + mock.registerResumableUpload('PUT', 'b/$PROJECT/o', + expectAsync((request) { expect(request.bodyBytes.length, 1024 * 1024); return mock.respondContinueResumableUpload(); })); @@ -592,42 +594,48 @@ main() { var stream = new Stream.fromIterable([bytesResumableUpload]); sink.addStream(stream).then((_) { sink.addError(new ArgumentError()); - sink.close() + sink + .close() .catchError(expectAsync(expectNotNull), test: testArgumentError); }); }); }); test('write-with-metadata-short', () { - var metadata = - [new ObjectMetadata(contentType: 'mime/type'), - new ObjectMetadata(contentType: 'type/mime', - cacheControl: 'control-cache'), - new ObjectMetadata(cacheControl: 'control-cache'), - new ObjectMetadata(cacheControl: 'control-cache', - contentDisposition: 'disp-content'), - new ObjectMetadata(contentDisposition: 'disp-content', - contentEncoding: 'encoding', - contentLanguage: 'language'), - new ObjectMetadata(custom: {'x': 'y'}), - new ObjectMetadata(custom: {'a': 'b', 'x': 'y'}) - ]; + var metadata = [ + new ObjectMetadata(contentType: 'mime/type'), + new ObjectMetadata( + contentType: 'type/mime', cacheControl: 'control-cache'), + new ObjectMetadata(cacheControl: 'control-cache'), + new ObjectMetadata( + cacheControl: 'control-cache', contentDisposition: 'disp-content'), + new ObjectMetadata( + contentDisposition: 'disp-content', + contentEncoding: 'encoding', + contentLanguage: 'language'), + new ObjectMetadata(custom: {'x': 'y'}), + new ObjectMetadata(custom: {'a': 'b', 'x': 'y'}) + ]; withMockClient((mock, api) { int count = 0; var bytes = [1, 2, 3]; mock.registerUpload( - 'POST', 'b/$bucketName/o', expectAsync((request) { - return mock.processNormalMediaUpload(request) - .then(expectAsync((mediaUpload) { + 'POST', + 'b/$bucketName/o', + expectAsync((request) { + return mock + .processNormalMediaUpload(request) + .then(expectAsync((mediaUpload) { var object = new storage.Object.fromJson(JSON.decode(mediaUpload.json)); ObjectMetadata m = metadata[count]; expect(object.name, objectName); expect(mediaUpload.bytes, bytes); var contentType = m.contentType != null - ? m.contentType : 'application/octet-stream'; + ? m.contentType + : 'application/octet-stream'; expect(mediaUpload.contentType, contentType); expect(object.cacheControl, m.cacheControl); expect(object.contentDisposition, m.contentDisposition); @@ -637,111 +645,122 @@ main() { count++; return mock.respond(new storage.Object()..name = objectName); })); - }, count: metadata.length)); + }, count: metadata.length)); var bucket = api.bucket(bucketName); var futures = []; for (int i = 0; i < metadata.length; i++) { - futures.add(bucket.writeBytes(objectName, bytes, - metadata: metadata[i])); + futures + .add(bucket.writeBytes(objectName, bytes, metadata: metadata[i])); } return Future.wait(futures); }); }); test('write-with-metadata-long', () { - var metadata = - [new ObjectMetadata(contentType: 'mime/type'), - new ObjectMetadata(contentType: 'type/mime', - cacheControl: 'control-cache'), - new ObjectMetadata(cacheControl: 'control-cache'), - new ObjectMetadata(cacheControl: 'control-cache', - contentDisposition: 'disp-content'), - new ObjectMetadata(contentDisposition: 'disp-content', - contentEncoding: 'encoding', - contentLanguage: 'language'), - new ObjectMetadata(custom: {'x': 'y'}), - new ObjectMetadata(custom: {'a': 'b', 'x': 'y'}) - ]; + var metadata = [ + new ObjectMetadata(contentType: 'mime/type'), + new ObjectMetadata( + contentType: 'type/mime', cacheControl: 'control-cache'), + new ObjectMetadata(cacheControl: 'control-cache'), + new ObjectMetadata( + cacheControl: 'control-cache', contentDisposition: 'disp-content'), + new ObjectMetadata( + contentDisposition: 'disp-content', + contentEncoding: 'encoding', + contentLanguage: 'language'), + new ObjectMetadata(custom: {'x': 'y'}), + new ObjectMetadata(custom: {'a': 'b', 'x': 'y'}) + ]; withMockClient((mock, api) { int countInitial = 0; int countData = 0; mock.registerResumableUpload( - 'POST', 'b/$bucketName/o', expectAsync((request) { - var object = new storage.Object.fromJson(JSON.decode(request.body)); - ObjectMetadata m = metadata[countInitial]; - expect(object.name, objectName); - expect(object.cacheControl, m.cacheControl); - expect(object.contentDisposition, m.contentDisposition); - expect(object.contentEncoding, m.contentEncoding); - expect(object.contentLanguage, m.contentLanguage); - expect(object.metadata, m.custom); - countInitial++; - return mock.respondInitiateResumableUpload(PROJECT); - }, count: metadata.length)); + 'POST', + 'b/$bucketName/o', + expectAsync((request) { + var object = + new storage.Object.fromJson(JSON.decode(request.body)); + ObjectMetadata m = metadata[countInitial]; + expect(object.name, objectName); + expect(object.cacheControl, m.cacheControl); + expect(object.contentDisposition, m.contentDisposition); + expect(object.contentEncoding, m.contentEncoding); + expect(object.contentLanguage, m.contentLanguage); + expect(object.metadata, m.custom); + countInitial++; + return mock.respondInitiateResumableUpload(PROJECT); + }, count: metadata.length)); mock.registerResumableUpload( - 'PUT', 'b/$PROJECT/o', expectAsync((request) { - ObjectMetadata m = metadata[countData % metadata.length]; - var contentType = m.contentType != null - ? m.contentType : 'application/octet-stream'; - expect(request.headers['content-type'], contentType); - bool firstPart = countData < metadata.length; - countData++; - if (firstPart) { - expect(request.bodyBytes.length, MB); - return mock.respondContinueResumableUpload(); - } else { - expect(request.bodyBytes.length, 1); - return mock.respond(new storage.Object()..name = objectName); - } - }, count: metadata.length * 2)); + 'PUT', + 'b/$PROJECT/o', + expectAsync((request) { + ObjectMetadata m = metadata[countData % metadata.length]; + var contentType = m.contentType != null + ? m.contentType + : 'application/octet-stream'; + expect(request.headers['content-type'], contentType); + bool firstPart = countData < metadata.length; + countData++; + if (firstPart) { + expect(request.bodyBytes.length, MB); + return mock.respondContinueResumableUpload(); + } else { + expect(request.bodyBytes.length, 1); + return mock.respond(new storage.Object()..name = objectName); + } + }, count: metadata.length * 2)); var bucket = api.bucket(bucketName); var futures = []; for (int i = 0; i < metadata.length; i++) { futures.add(bucket.writeBytes(objectName, bytesResumableUpload, - metadata: metadata[i])); + metadata: metadata[i])); } return Future.wait(futures); }); }); test('write-with-predefined-acl', () { - var predefined = - [[PredefinedAcl.authenticatedRead, 'authenticatedRead'], - [PredefinedAcl.private, 'private'], - [PredefinedAcl.projectPrivate, 'projectPrivate'], - [PredefinedAcl.publicRead, 'publicRead'], - [PredefinedAcl.bucketOwnerFullControl, 'bucketOwnerFullControl'], - [PredefinedAcl.bucketOwnerRead, 'bucketOwnerRead']]; + var predefined = [ + [PredefinedAcl.authenticatedRead, 'authenticatedRead'], + [PredefinedAcl.private, 'private'], + [PredefinedAcl.projectPrivate, 'projectPrivate'], + [PredefinedAcl.publicRead, 'publicRead'], + [PredefinedAcl.bucketOwnerFullControl, 'bucketOwnerFullControl'], + [PredefinedAcl.bucketOwnerRead, 'bucketOwnerRead'] + ]; withMockClient((mock, api) { int count = 0; - var bytes = [1,2,3]; + var bytes = [1, 2, 3]; mock.registerUpload( - 'POST', 'b/$bucketName/o', expectAsync((request) { - return mock.processNormalMediaUpload(request) - .then(expectAsync((mediaUpload) { + 'POST', + 'b/$bucketName/o', + expectAsync((request) { + return mock + .processNormalMediaUpload(request) + .then(expectAsync((mediaUpload) { var object = new storage.Object.fromJson(JSON.decode(mediaUpload.json)); expect(object.name, objectName); expect(mediaUpload.bytes, bytes); expect(mediaUpload.contentType, 'application/octet-stream'); expect(request.url.queryParameters['predefinedAcl'], - predefined[count++][1]); + predefined[count++][1]); expect(object.acl, isNull); return mock.respond(new storage.Object()..name = objectName); })); - }, count: predefined.length)); + }, count: predefined.length)); var bucket = api.bucket(bucketName); var futures = []; for (int i = 0; i < predefined.length; i++) { futures.add(bucket.writeBytes(objectName, bytes, - predefinedAcl: predefined[i][0])); + predefinedAcl: predefined[i][0])); } return Future.wait(futures); }); @@ -749,34 +768,34 @@ main() { test('write-with-acl', () { var acl1 = new Acl([ - new AclEntry(new AccountScope('user@example.com'), - AclPermission.FULL_CONTROL), - ]); + new AclEntry( + new AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + ]); var acl2 = new Acl([ - new AclEntry(new AccountScope('user@example.com'), - AclPermission.FULL_CONTROL), - new AclEntry(new GroupScope('group@example.com'), - AclPermission.WRITE), - ]); + new AclEntry( + new AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + new AclEntry(new GroupScope('group@example.com'), AclPermission.WRITE), + ]); var acl3 = new Acl([ - new AclEntry(new AccountScope('user@example.com'), - AclPermission.FULL_CONTROL), - new AclEntry(new GroupScope('group@example.com'), - AclPermission.WRITE), - new AclEntry(new DomainScope('example.com'), - AclPermission.READ), - ]); + new AclEntry( + new AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + new AclEntry(new GroupScope('group@example.com'), AclPermission.WRITE), + new AclEntry(new DomainScope('example.com'), AclPermission.READ), + ]); var acls = [acl1, acl2, acl3]; withMockClient((mock, api) { int count = 0; - var bytes = [1,2,3]; + var bytes = [1, 2, 3]; mock.registerUpload( - 'POST', 'b/$bucketName/o', expectAsync((request) { - return mock.processNormalMediaUpload(request) - .then(expectAsync((mediaUpload) { + 'POST', + 'b/$bucketName/o', + expectAsync((request) { + return mock + .processNormalMediaUpload(request) + .then(expectAsync((mediaUpload) { var object = new storage.Object.fromJson(JSON.decode(mediaUpload.json)); expect(object.name, objectName); @@ -798,7 +817,7 @@ main() { count++; return mock.respond(new storage.Object()..name = objectName); })); - }, count: acls.length)); + }, count: acls.length)); var bucket = api.bucket(bucketName); var futures = []; @@ -810,43 +829,44 @@ main() { }); test('write-with-acl-and-predefined-acl', () { - var predefined = - [[PredefinedAcl.authenticatedRead, 'authenticatedRead'], - [PredefinedAcl.private, 'private'], - [PredefinedAcl.projectPrivate, 'projectPrivate'], - [PredefinedAcl.publicRead, 'publicRead'], - [PredefinedAcl.bucketOwnerFullControl, 'bucketOwnerFullControl'], - [PredefinedAcl.bucketOwnerRead, 'bucketOwnerRead']]; + var predefined = [ + [PredefinedAcl.authenticatedRead, 'authenticatedRead'], + [PredefinedAcl.private, 'private'], + [PredefinedAcl.projectPrivate, 'projectPrivate'], + [PredefinedAcl.publicRead, 'publicRead'], + [PredefinedAcl.bucketOwnerFullControl, 'bucketOwnerFullControl'], + [PredefinedAcl.bucketOwnerRead, 'bucketOwnerRead'] + ]; var acl1 = new Acl([ - new AclEntry(new AccountScope('user@example.com'), - AclPermission.FULL_CONTROL), - ]); + new AclEntry( + new AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + ]); var acl2 = new Acl([ - new AclEntry(new AccountScope('user@example.com'), - AclPermission.FULL_CONTROL), - new AclEntry(new GroupScope('group@example.com'), - AclPermission.WRITE), - ]); + new AclEntry( + new AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + new AclEntry(new GroupScope('group@example.com'), AclPermission.WRITE), + ]); var acl3 = new Acl([ - new AclEntry(new AccountScope('user@example.com'), - AclPermission.FULL_CONTROL), - new AclEntry(new GroupScope('group@example.com'), - AclPermission.WRITE), - new AclEntry(new DomainScope('example.com'), - AclPermission.READ), - ]); + new AclEntry( + new AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + new AclEntry(new GroupScope('group@example.com'), AclPermission.WRITE), + new AclEntry(new DomainScope('example.com'), AclPermission.READ), + ]); var acls = [acl1, acl2, acl3]; withMockClient((mock, api) { int count = 0; - var bytes = [1,2,3]; + var bytes = [1, 2, 3]; mock.registerUpload( - 'POST', 'b/$bucketName/o', expectAsync((request) { - return mock.processNormalMediaUpload(request) - .then(expectAsync((mediaUpload) { + 'POST', + 'b/$bucketName/o', + expectAsync((request) { + return mock + .processNormalMediaUpload(request) + .then(expectAsync((mediaUpload) { int predefinedIndex = count ~/ acls.length; int aclIndex = count % acls.length; var object = @@ -855,7 +875,7 @@ main() { expect(mediaUpload.bytes, bytes); expect(mediaUpload.contentType, 'application/octet-stream'); expect(request.url.queryParameters['predefinedAcl'], - predefined[predefinedIndex][1]); + predefined[predefinedIndex][1]); expect(object.acl, isNotNull); expect(object.acl.length, aclIndex + 1); expect(object.acl[0].entity, 'user-user@example.com'); @@ -871,14 +891,13 @@ main() { count++; return mock.respond(new storage.Object()..name = objectName); })); - }, count: predefined.length * acls.length)); + }, count: predefined.length * acls.length)); var bucket = api.bucket(bucketName); var futures = []; for (int i = 0; i < predefined.length; i++) { for (int j = 0; j < acls.length; j++) { - futures.add(bucket.writeBytes( - objectName, bytes, + futures.add(bucket.writeBytes(objectName, bytes, acl: acls[j], predefinedAcl: predefined[i][0])); } } @@ -889,8 +908,7 @@ main() { group('read', () { test('success', () async { await withMockClientAsync((MockClient mock, Storage api) async { - mock.register('GET', - 'b/$bucketName/o/$objectName', + mock.register('GET', 'b/$bucketName/o/$objectName', expectAsync(mock.respondBytes)); var bucket = api.bucket(bucketName); @@ -909,8 +927,8 @@ main() { await bucket.read(objectName, offset: 1).toList(); fail('An exception should be thrown'); } on ArgumentError catch (e) { - expect(e.message, - "length must have a value if offset is non-zero."); + expect( + e.message, "length must have a value if offset is non-zero."); } }); }); @@ -943,8 +961,7 @@ main() { test('with length', () async { await withMockClientAsync((MockClient mock, Storage api) async { - mock.register('GET', - 'b/$bucketName/o/$objectName', + mock.register('GET', 'b/$bucketName/o/$objectName', expectAsync(mock.respondBytes)); var bucket = api.bucket(bucketName); @@ -957,26 +974,26 @@ main() { test('with offset and length', () async { await withMockClientAsync((MockClient mock, Storage api) async { - mock.register('GET', - 'b/$bucketName/o/$objectName', + mock.register('GET', 'b/$bucketName/o/$objectName', expectAsync(mock.respondBytes)); var bucket = api.bucket(bucketName); var data = []; - await bucket.read(objectName, offset: 1, length: 3) - .forEach(data.addAll); + await bucket + .read(objectName, offset: 1, length: 3) + .forEach(data.addAll); expect(data, MockClient.bytes.sublist(1, 4)); }); }); test('file does not exist', () async { await withMockClientAsync((MockClient mock, Storage api) async { - mock.register( - 'GET', 'b/$bucketName/o/$objectName', expectAsync((request) { - expect(request.url.queryParameters['alt'], 'media'); - return mock.respondError(404); - })); + mock.register('GET', 'b/$bucketName/o/$objectName', + expectAsync((request) { + expect(request.url.queryParameters['alt'], 'media'); + return mock.respondError(404); + })); var bucket = api.bucket(bucketName); @@ -992,13 +1009,13 @@ main() { test('stat', () { withMockClient((mock, api) { - mock.register( - 'GET', 'b/$bucketName/o/$objectName', expectAsync((request) { + mock.register('GET', 'b/$bucketName/o/$objectName', + expectAsync((request) { expect(request.url.queryParameters['alt'], 'json'); return mock.respond(new storage.Object() - ..name = objectName - ..updated = new DateTime(2014) - ..contentType = 'mime/type'); + ..name = objectName + ..updated = new DateTime(2014) + ..contentType = 'mime/type'); })); var api = new Storage(mock, PROJECT); @@ -1013,8 +1030,8 @@ main() { test('stat-acl', () { withMockClient((mock, api) { - mock.register( - 'GET', 'b/$bucketName/o/$objectName', expectAsync((request) { + mock.register('GET', 'b/$bucketName/o/$objectName', + expectAsync((request) { expect(request.url.queryParameters['alt'], 'json'); var acl1 = new storage.ObjectAccessControl(); acl1.entity = 'user-1234567890'; @@ -1026,8 +1043,8 @@ main() { acl3.entity = 'xxx-1234567890'; acl3.role = 'OWNER'; return mock.respond(new storage.Object() - ..name = objectName - ..acl = [acl1, acl2, acl3]); + ..name = objectName + ..acl = [acl1, acl2, acl3]); })); var api = new Storage(mock, PROJECT); @@ -1054,8 +1071,7 @@ main() { })); var bucket = api.bucket(bucketName); - bucket.list().listen( - (_) => throw 'Unexpected', + bucket.list().listen((_) => throw 'Unexpected', onDone: expectAsync(() => null)); }); }); @@ -1063,10 +1079,9 @@ main() { test('immediate-cancel', () { withMockClient((mock, api) { var bucket = api.bucket(bucketName); - bucket.list().listen( - (_) => throw 'Unexpected', + bucket.list().listen((_) => throw 'Unexpected', onDone: () => throw 'Unexpected') - ..cancel(); + ..cancel(); }); }); @@ -1088,8 +1103,7 @@ main() { var userRead = new AclEntry(user, AclPermission.READ); var groupWrite = new AclEntry(group, AclPermission.WRITE); - var domainFullControl = - new AclEntry(domain, AclPermission.FULL_CONTROL); + var domainFullControl = new AclEntry(domain, AclPermission.FULL_CONTROL); test('compare-scope', () { expect(id, new StorageIdScope('1234567890')); @@ -1103,22 +1117,28 @@ main() { test('compare-entries', () { expect(userRead, new AclEntry(user, AclPermission.READ)); expect(groupWrite, new AclEntry(group, AclPermission.WRITE)); - expect(domainFullControl, - new AclEntry(domain, AclPermission.FULL_CONTROL)); + expect( + domainFullControl, new AclEntry(domain, AclPermission.FULL_CONTROL)); }); test('compare-acls', () { var acl = new Acl([userRead, groupWrite, domainFullControl]); - expect(acl, new Acl([new AclEntry(user, AclPermission.READ), - new AclEntry(group, AclPermission.WRITE), - new AclEntry(domain, AclPermission.FULL_CONTROL)])); - expect(acl, - isNot(equals(new Acl([new AclEntry(group, AclPermission.WRITE), - new AclEntry(user, AclPermission.READ), - new AclEntry(domain, AclPermission.FULL_CONTROL)])))); + expect( + acl, + new Acl([ + new AclEntry(user, AclPermission.READ), + new AclEntry(group, AclPermission.WRITE), + new AclEntry(domain, AclPermission.FULL_CONTROL) + ])); + expect( + acl, + isNot(equals(new Acl([ + new AclEntry(group, AclPermission.WRITE), + new AclEntry(user, AclPermission.READ), + new AclEntry(domain, AclPermission.FULL_CONTROL) + ])))); }); - test('compare-predefined-acls', () { expect(PredefinedAcl.private, PredefinedAcl.private); expect(PredefinedAcl.private, isNot(equals(PredefinedAcl.publicRead))); From 5802e83b74d123c1d6fd4f785e3b755e4e69c4fd Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Fri, 6 Oct 2017 13:06:54 -0700 Subject: [PATCH 105/239] Cleanup (dart-lang/gcloud#50) * Make everything strong-mode clean * Enable and fix some lints * Type cleanup * Support the latest version of googleapis packages * oops on strong-mode clean * Prepare for release --- pkgs/gcloud/CHANGELOG.md | 6 ++++ pkgs/gcloud/analysis_options.yaml | 33 +++++++++++++++++++ pkgs/gcloud/lib/db.dart | 8 ++++- pkgs/gcloud/lib/pubsub.dart | 2 +- pkgs/gcloud/lib/service_scope.dart | 3 +- pkgs/gcloud/lib/src/datastore_impl.dart | 6 ++-- pkgs/gcloud/lib/src/db/annotations.dart | 11 ++++--- pkgs/gcloud/lib/src/db/model_db_impl.dart | 8 ++--- pkgs/gcloud/lib/src/db/models.dart | 2 +- pkgs/gcloud/lib/src/pubsub_impl.dart | 7 ++-- pkgs/gcloud/lib/src/storage_impl.dart | 12 +++---- pkgs/gcloud/lib/storage.dart | 6 ++-- pkgs/gcloud/pubspec.yaml | 6 ++-- pkgs/gcloud/test/common_e2e.dart | 4 ++- .../datastore/e2e/datastore_test_impl.dart | 23 +++++++------ pkgs/gcloud/test/datastore/e2e/utils.dart | 4 +-- pkgs/gcloud/test/db/e2e/db_test_impl.dart | 20 +++++------ .../test/db/e2e/metamodel_test_impl.dart | 7 ++-- pkgs/gcloud/test/db/model_db_test.dart | 8 +++-- pkgs/gcloud/test/db/properties_test.dart | 17 ++++++---- pkgs/gcloud/test/db_all_e2e_test.dart | 7 ++-- pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart | 4 +-- pkgs/gcloud/test/pubsub/pubsub_test.dart | 7 ++-- pkgs/gcloud/test/storage/e2e_test.dart | 4 +-- pkgs/gcloud/test/storage/storage_test.dart | 18 +++++----- 25 files changed, 141 insertions(+), 92 deletions(-) create mode 100644 pkgs/gcloud/analysis_options.yaml diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 50b1d281..f18707b9 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,9 @@ +## 0.4.0+1 + +* Made a number of strong-mode improvements. + +* Updated dependency on `googleapis` and `googleapis_beta`. + ## 0.4.0 * Remove support for `FilterRelation.In` and "propertyname IN" for queries: diff --git a/pkgs/gcloud/analysis_options.yaml b/pkgs/gcloud/analysis_options.yaml new file mode 100644 index 00000000..aeabf856 --- /dev/null +++ b/pkgs/gcloud/analysis_options.yaml @@ -0,0 +1,33 @@ +analyzer: + strong-mode: true +linter: + rules: + - avoid_empty_else + - avoid_init_to_null + - avoid_null_checks_in_equality_operators + - await_only_futures + - camel_case_types + - cancel_subscriptions + - control_flow_in_finally + - directives_ordering + - empty_catches + - empty_constructor_bodies + - empty_statements + - iterable_contains_unrelated_type + - library_names + - library_prefixes + - list_remove_unrelated_type + - package_api_docs + - package_names + - package_prefixed_library_names + - prefer_final_fields + - prefer_is_not_empty + - super_goes_last + - test_types_in_equals + - throw_in_finally + - type_init_formals + - unawaited_futures + #- unnecessary_brace_in_string_interps + # Need to debug usage in metamodel_test_impl – might reveal a bug + - unrelated_type_equality_checks + - valid_regexps diff --git a/pkgs/gcloud/lib/db.dart b/pkgs/gcloud/lib/db.dart index 50540238..b020d3f7 100644 --- a/pkgs/gcloud/lib/db.dart +++ b/pkgs/gcloud/lib/db.dart @@ -6,11 +6,17 @@ library gcloud.db; import 'dart:async'; import 'dart:collection'; +// dart:core is imported explicitly so it is available at top-level without +// the `core` prefix defined below. +import 'dart:core'; +// Importing `dart:core` as `core` to allow access to `String` in `IdType` +// without conflicts. +import 'dart:core' as core; import 'dart:mirrors' as mirrors; import 'common.dart' show StreamFromPages; -import 'service_scope.dart' as ss; import 'datastore.dart' as datastore; +import 'service_scope.dart' as ss; part 'src/db/annotations.dart'; part 'src/db/db.dart'; diff --git a/pkgs/gcloud/lib/pubsub.dart b/pkgs/gcloud/lib/pubsub.dart index 0e4bb10a..e12119ec 100644 --- a/pkgs/gcloud/lib/pubsub.dart +++ b/pkgs/gcloud/lib/pubsub.dart @@ -11,9 +11,9 @@ import 'package:http/http.dart' as http; import 'package:googleapis_beta/pubsub/v1beta2.dart' as pubsub; +import 'common.dart'; import 'service_scope.dart' as ss; -import 'common.dart'; export 'common.dart'; part 'src/pubsub_impl.dart'; diff --git a/pkgs/gcloud/lib/service_scope.dart b/pkgs/gcloud/lib/service_scope.dart index a5a58af6..754944f5 100644 --- a/pkgs/gcloud/lib/service_scope.dart +++ b/pkgs/gcloud/lib/service_scope.dart @@ -144,7 +144,8 @@ Object lookup(Object key) { /// Represents a global service scope of values stored via zones. class _ServiceScope { /// A mapping of keys to values stored inside the service scope. - final Map _key2Values = new Map(); + final Map _key2Values = + new Map(); /// A set which indicates whether an object was copied from it's parent. final Set _parentCopies = new Set(); diff --git a/pkgs/gcloud/lib/src/datastore_impl.dart b/pkgs/gcloud/lib/src/datastore_impl.dart index 8f28a361..b5844e4a 100644 --- a/pkgs/gcloud/lib/src/datastore_impl.dart +++ b/pkgs/gcloud/lib/src/datastore_impl.dart @@ -6,11 +6,11 @@ library gcloud.datastore_impl; import 'dart:async'; +import 'package:googleapis/datastore/v1.dart' as api; import 'package:http/http.dart' as http; -import '../datastore.dart' as datastore; import '../common.dart' show Page; -import 'package:googleapis/datastore/v1.dart' as api; +import '../datastore.dart' as datastore; class TransactionImpl implements datastore.Transaction { final String data; @@ -390,7 +390,7 @@ class DatastoreImpl implements datastore.Datastore { // // A list of keys that were not looked up due to resource constraints. // repeated Key deferred = 3; // } - var entities = new List(apiKeys.length); + var entities = new List(apiKeys.length); for (int i = 0; i < apiKeys.length; i++) { var apiKey = apiKeys[i]; diff --git a/pkgs/gcloud/lib/src/db/annotations.dart b/pkgs/gcloud/lib/src/db/annotations.dart index 9dd06148..92459d35 100644 --- a/pkgs/gcloud/lib/src/db/annotations.dart +++ b/pkgs/gcloud/lib/src/db/annotations.dart @@ -49,14 +49,16 @@ class Kind { /// The type used for id's of an entity. class IdType { /// Use integer ids for identifying entities. - static const IdType Integer = const IdType(1); + static const IdType Integer = const IdType('Integer'); /// Use string ids for identifying entities. - static const IdType String = const IdType(2); + static const IdType String = const IdType('String'); - final int _type; + final core.String _type; const IdType(this._type); + + core.String toString() => "IdType: $_type"; } /// Describes a property of an Entity. @@ -205,8 +207,7 @@ class BlobProperty extends PrimitiveProperty { Object decodePrimitiveValue(ModelDB db, Object value) { if (value == null) return null; - datastore.BlobValue blobValue = value; - return blobValue.bytes; + return (value as datastore.BlobValue).bytes; } } diff --git a/pkgs/gcloud/lib/src/db/model_db_impl.dart b/pkgs/gcloud/lib/src/db/model_db_impl.dart index 1b3c7077..124b3d69 100644 --- a/pkgs/gcloud/lib/src/db/model_db_impl.dart +++ b/pkgs/gcloud/lib/src/db/model_db_impl.dart @@ -185,7 +185,7 @@ class ModelDBImpl implements ModelDB { libraries.forEach((mirrors.LibraryMirror lm) { lm.declarations.values .where((d) => d is mirrors.ClassMirror && d.hasReflectedType) - .forEach((mirrors.ClassMirror declaration) { + .forEach((declaration) { _tryLoadNewModelClass(declaration); }); }); @@ -343,7 +343,7 @@ class ModelDBImpl implements ModelDB { } } -class _ModelDescription { +class _ModelDescription { final HashMap _property2FieldName = new HashMap(); final HashMap _field2PropertyName = @@ -383,7 +383,7 @@ class _ModelDescription { String kindName(ModelDBImpl db) => kind; - datastore.Entity encodeModel(ModelDBImpl db, Model model) { + datastore.Entity encodeModel(ModelDBImpl db, T model) { var key = db.toDatastoreKey(model.key); var properties = {}; @@ -475,7 +475,7 @@ class _ModelDescription { // - we may end up removing properties after a read-write cycle // - we may end up dropping added properties in a write // ([usedNames] := [realFieldNames] + [realPropertyNames]) -class _ExpandoModelDescription extends _ModelDescription { +class _ExpandoModelDescription extends _ModelDescription { Set realFieldNames; Set realPropertyNames; Set usedNames; diff --git a/pkgs/gcloud/lib/src/db/models.dart b/pkgs/gcloud/lib/src/db/models.dart index 336b1af9..84546b93 100644 --- a/pkgs/gcloud/lib/src/db/models.dart +++ b/pkgs/gcloud/lib/src/db/models.dart @@ -48,7 +48,7 @@ class Key { Partition get partition { var obj = _parent; while (obj is! Partition) { - obj = obj._parent; + obj = (obj as Key)._parent; } return obj; } diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart index 077ad3f8..9db0f73e 100644 --- a/pkgs/gcloud/lib/src/pubsub_impl.dart +++ b/pkgs/gcloud/lib/src/pubsub_impl.dart @@ -5,16 +5,13 @@ part of gcloud.pubsub; class _PubSubImpl implements PubSub { - final http.Client _client; final String project; final pubsub.PubsubApi _api; final String _topicPrefix; final String _subscriptionPrefix; - _PubSubImpl(client, project) - : this._client = client, - this.project = project, - _api = new pubsub.PubsubApi(client), + _PubSubImpl(http.Client client, this.project) + : _api = new pubsub.PubsubApi(client), _topicPrefix = 'projects/$project/topics/', _subscriptionPrefix = 'projects/$project/subscriptions/'; diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index d6ea7f79..0acffd48 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -37,7 +37,7 @@ class _StorageImpl implements Storage { final String project; final storage_api.StorageApi _api; - _StorageImpl(client, this.project) + _StorageImpl(http.Client client, this.project) : _api = new storage_api.StorageApi(client); Future createBucket(String bucketName, @@ -80,7 +80,7 @@ class _StorageImpl implements Storage { } Stream listBucketNames() { - Future> firstPage(pageSize) { + Future<_BucketPageImpl> firstPage(pageSize) { return _listBuckets(pageSize, null) .then((response) => new _BucketPageImpl(this, pageSize, response)); } @@ -181,7 +181,7 @@ class _BucketImpl implements Bucket { return sink; } - Future writeBytes(String objectName, List bytes, + Future writeBytes(String objectName, List bytes, {ObjectMetadata metadata, Acl acl, PredefinedAcl predefinedAcl, @@ -237,7 +237,7 @@ class _BucketImpl implements Bucket { } Stream list({String prefix}) { - Future> firstPage(pageSize) { + Future<_ObjectPageImpl> firstPage(pageSize) { return _listObjects(bucketName, prefix, _DIRECTORY_DELIMITER, 50, null) .then((response) => new _ObjectPageImpl(this, prefix, pageSize, response)); @@ -494,10 +494,10 @@ class _MediaUploadStreamSink implements StreamSink> { final int _maxNormalUploadLength; int _bufferLength = 0; final List> buffer = new List>(); - final StreamController _controller = new StreamController(sync: true); + final _controller = new StreamController>(sync: true); StreamSubscription _subscription; StreamController _resumableController; - final _doneCompleter = new Completer(); + final _doneCompleter = new Completer<_ObjectInfoImpl>(); static const int _STATE_LENGTH_KNOWN = 0; static const int _STATE_PROBING_LENGTH = 1; diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index dc9fec4d..90b96808 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -49,16 +49,16 @@ library gcloud.storage; import 'dart:async'; -import 'dart:convert'; import 'dart:collection' show UnmodifiableListView, UnmodifiableMapView; +import 'dart:convert'; import 'package:http/http.dart' as http; import 'package:googleapis/storage/v1.dart' as storage_api; +import 'common.dart'; import 'service_scope.dart' as ss; -import 'common.dart'; export 'common.dart'; part 'src/storage_impl.dart'; @@ -363,7 +363,7 @@ class ProjectScope extends AclScope { /// Possible values are `owners`, `editors` and `viewers`. final String role; - ProjectScope(String project, String this.role) + ProjectScope(String project, this.role) : super._(AclScope._TYPE_PROJECT, project); /// Project ID. diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index e3e0b325..10e54658 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,13 +1,13 @@ name: gcloud -version: 0.4.0 +version: 0.4.0+1 author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud environment: sdk: '>=1.13.0 <2.0.0' dependencies: - googleapis: '>=0.2.0 <0.37.0' - googleapis_beta: '>=0.10.0 <0.35.0' + googleapis: '>=0.2.0 <0.45.0' + googleapis_beta: '>=0.10.0 <0.40.0' http: '>=0.11.0 <0.12.0' dev_dependencies: googleapis_auth: '>=0.2.3 <0.3.0' diff --git a/pkgs/gcloud/test/common_e2e.dart b/pkgs/gcloud/test/common_e2e.dart index c3ab8d8c..0899bc23 100644 --- a/pkgs/gcloud/test/common_e2e.dart +++ b/pkgs/gcloud/test/common_e2e.dart @@ -88,7 +88,9 @@ Future withAuthClient(List scopes, AuthCallback callback, return serviceKeyJson(serviceKeyLocation).then((keyJson) { var creds = new auth.ServiceAccountCredentials.fromJson(keyJson); - return auth.clientViaServiceAccount(creds, scopes).then((client) { + return auth + .clientViaServiceAccount(creds, scopes) + .then((http.Client client) { if (trace) client = new TraceClient(client); return callback(project, client).whenComplete(() => client.close()); }); diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index ed99a246..7185c1d4 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -33,11 +33,10 @@ import 'package:gcloud/src/datastore_impl.dart' as datastore_impl; import 'package:gcloud/common.dart'; import 'package:unittest/unittest.dart'; +import '../../common_e2e.dart'; import '../error_matchers.dart'; import 'utils.dart'; -import '../../common_e2e.dart'; - Future sleep(Duration duration) { var completer = new Completer(); new Timer(duration, completer.complete); @@ -137,8 +136,8 @@ runTests(Datastore datastore, String namespace) { for (var key in a.properties.keys) { if (!b.properties.containsKey(key)) return false; if (a.properties[key] != null && a.properties[key] is List) { - var aList = a.properties[key]; - var bList = b.properties[key]; + var aList = a.properties[key] as List; + var bList = b.properties[key] as List; if (aList.length != bList.length) return false; for (var i = 0; i < aList.length; i++) { if (aList[i] != bList[i]) return false; @@ -559,7 +558,7 @@ runTests(Datastore datastore, String namespace) { var NUM_TRANSACTIONS = 10; // Start transactions - var transactions = []; + var transactions = >[]; for (var i = 0; i < NUM_TRANSACTIONS; i++) { transactions.add(datastore.beginTransaction(crossEntityGroup: xg)); } @@ -567,13 +566,13 @@ runTests(Datastore datastore, String namespace) { .wait(transactions) .then((List transactions) { // Do a lookup for the entities in every transaction - var lookups = []; + var lookups = >>[]; for (var transaction in transactions) { lookups.add(datastore.lookup(keys, transaction: transaction)); } return Future.wait(lookups).then((List> results) { // Do a conflicting commit in every transaction. - var commits = []; + var commits = []; for (var i = 0; i < transactions.length; i++) { var transaction = transactions[i]; commits.add(test(results[i], transaction, i)); @@ -600,7 +599,7 @@ runTests(Datastore datastore, String namespace) { }); }); group('query', () { - Future testQuery(String kind, + Future> testQuery(String kind, {List filters, List orders, bool transactional: false, @@ -721,12 +720,12 @@ runTests(Datastore datastore, String namespace) { }; var filterFunction = (Entity entity) { - var value = entity.properties[QUERY_KEY]; + var value = entity.properties[QUERY_KEY] as String; return value.compareTo(QUERY_UPPER_BOUND) == -1 && value.compareTo(QUERY_LOWER_BOUND) == 1; }; var listFilterFunction = (Entity entity) { - var values = entity.properties[TEST_LIST_PROPERTY]; + var values = entity.properties[TEST_LIST_PROPERTY] as List; return values.contains(QUERY_LIST_ENTRY); }; var indexFilterMatches = (Entity entity) { @@ -1034,7 +1033,7 @@ Future cleanupDB(Datastore db, String namespace) { return consumePages((_) => db.query(q, partition: partition)) .then((List entities) { return entities - .map((Entity e) => e.key.elements.last.id) + .map((Entity e) => e.key.elements.last.id as String) .where((String kind) => !kind.contains('__')) .toList(); }); @@ -1071,7 +1070,7 @@ Future waitUntilEntitiesGone(Datastore db, List keys, Partition p) { Future waitUntilEntitiesHelper( Datastore db, List keys, bool positive, Partition p) { - var keysByKind = {}; + var keysByKind = {}; for (var key in keys) { keysByKind.putIfAbsent(key.elements.last.kind, () => []).add(key); } diff --git a/pkgs/gcloud/test/datastore/e2e/utils.dart b/pkgs/gcloud/test/datastore/e2e/utils.dart index e0221d82..101fc740 100644 --- a/pkgs/gcloud/test/datastore/e2e/utils.dart +++ b/pkgs/gcloud/test/datastore/e2e/utils.dart @@ -23,7 +23,7 @@ buildKey(int i, {Function idFunction, String kind: TEST_KIND, Partition p}) { return new Key(path, partition: p); } -Map buildProperties(int i) { +Map buildProperties(int i) { var listValues = [ 'foo', '$TEST_LIST_VALUE$i', @@ -66,7 +66,7 @@ List buildEntityWithAllProperties(int from, int to, var us42 = const Duration(microseconds: 42); var unIndexed = new Set.from(['blobProperty']); - Map buildProperties(int i) { + Map buildProperties(int i) { return { 'nullValue': null, 'boolProperty': true, diff --git a/pkgs/gcloud/test/db/e2e/db_test_impl.dart b/pkgs/gcloud/test/db/e2e/db_test_impl.dart index 0c80738c..7af6ba8c 100644 --- a/pkgs/gcloud/test/db/e2e/db_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/db_test_impl.dart @@ -50,8 +50,8 @@ import 'package:unittest/unittest.dart'; import 'package:gcloud/db.dart' as db; import 'package:gcloud/src/datastore_impl.dart' as datastore_impl; -import '../../datastore/e2e/datastore_test_impl.dart' as datastore_test; import '../../common_e2e.dart'; +import '../../datastore/e2e/datastore_test_impl.dart' as datastore_test; @db.Kind() class Person extends db.Model { @@ -279,7 +279,7 @@ runTests(db.DatastoreDB store, String namespace) { test('parent_key', () { var root = partition.emptyKey; - var users = []; + var users = []; for (var i = 333; i <= 334; i++) { users.add(new User() ..id = i @@ -288,7 +288,7 @@ runTests(db.DatastoreDB store, String namespace) { ..name = 'user$i' ..nickname = 'nickname${i%3}'); } - var persons = []; + var persons = []; for (var i = 335; i <= 336; i++) { persons.add(new Person() ..id = i @@ -308,7 +308,7 @@ runTests(db.DatastoreDB store, String namespace) { test('auto_ids', () { var root = partition.emptyKey; - var persons = []; + var persons = []; persons.add(new Person() ..id = 42 ..parentKey = root @@ -379,7 +379,7 @@ runTests(db.DatastoreDB store, String namespace) { test('query', () { var root = partition.emptyKey; - var users = []; + var users = []; for (var i = 1; i <= 10; i++) { var languages = []; if (i == 9) { @@ -397,7 +397,7 @@ runTests(db.DatastoreDB store, String namespace) { ..languages = languages); } - var expandoPersons = []; + var expandoPersons = []; for (var i = 1; i <= 3; i++) { var expandoPerson = new ExpandoPerson() ..parentKey = root @@ -413,14 +413,14 @@ runTests(db.DatastoreDB store, String namespace) { var LOWER_BOUND = 'user2'; - var usersSortedNameDescNicknameAsc = new List.from(users); + var usersSortedNameDescNicknameAsc = new List.from(users); usersSortedNameDescNicknameAsc.sort((User a, User b) { var result = b.name.compareTo(a.name); if (result == 0) return a.nickname.compareTo(b.nickname); return result; }); - var usersSortedNameDescNicknameDesc = new List.from(users); + var usersSortedNameDescNicknameDesc = new List.from(users); usersSortedNameDescNicknameDesc.sort((User a, User b) { var result = b.name.compareTo(a.name); if (result == 0) return b.nickname.compareTo(a.nickname); @@ -445,7 +445,7 @@ runTests(db.DatastoreDB store, String namespace) { .where((User u) => u.wife == root.append(User, id: 42 + 3)) .toList(); - var allInserts = []..addAll(users)..addAll(expandoPersons); + var allInserts = []..addAll(users)..addAll(expandoPersons); var allKeys = allInserts.map((db.Model model) => model.key).toList(); return store.commit(inserts: allInserts).then((_) { return waitUntilEntitiesReady(store, allKeys, partition).then((_) { @@ -646,7 +646,7 @@ Future waitUntilEntitiesGone( Future waitUntilEntitiesHelper(db.DatastoreDB mdb, List keys, bool positive, db.Partition partition) { - var keysByKind = {}; + var keysByKind = {}; for (var key in keys) { keysByKind.putIfAbsent(key.type, () => []).add(key); } diff --git a/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart index fccf53f0..9a467466 100644 --- a/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart @@ -61,8 +61,9 @@ runTests(datastore, db.DatastoreDB store) { var namespaceQuery = store.query(Namespace); return namespaceQuery .run() + .map((m) => m as Namespace) .toList() - .then((List namespaces) { + .then((namespaces) { expect(namespaces.length, greaterThanOrEqualTo(3)); expect(namespaces, contains(cond((ns) => ns.name == null))); expect( @@ -70,7 +71,7 @@ runTests(datastore, db.DatastoreDB store) { expect( namespaces, contains(cond((ns) => ns.name == 'BarNamespace'))); - var futures = []; + var futures = []; for (var namespace in namespaces) { if (!(namespace == null || namespace == 'FooNamespace' || @@ -79,7 +80,7 @@ runTests(datastore, db.DatastoreDB store) { } var partition = store.newPartition(namespace.name); var kindQuery = store.query(Kind, partition: partition); - futures.add(kindQuery.run().toList().then((List kinds) { + futures.add(kindQuery.run().toList().then((List kinds) { expect(kinds.length, greaterThanOrEqualTo(2)); if (namespace.name == null) { expect(kinds, contains(cond((k) => k.name == 'NullKind'))); diff --git a/pkgs/gcloud/test/db/model_db_test.dart b/pkgs/gcloud/test/db/model_db_test.dart index b9707b86..571f2568 100644 --- a/pkgs/gcloud/test/db/model_db_test.dart +++ b/pkgs/gcloud/test/db/model_db_test.dart @@ -9,15 +9,17 @@ import 'dart:async'; import 'package:gcloud/db.dart'; import 'package:unittest/unittest.dart'; -// These unused imports make sure that [ModelDBImpl.fromLibrary()] will find -// all the Model/ModelDescription classes. +import 'model_dbs/duplicate_fieldname.dart' as test4; import 'model_dbs/duplicate_kind.dart' as test1; import 'model_dbs/duplicate_property.dart' as test2; import 'model_dbs/multiple_annotations.dart' as test3; -import 'model_dbs/duplicate_fieldname.dart' as test4; import 'model_dbs/no_default_constructor.dart' as test5; main() { + // These unused imports make sure that [ModelDBImpl.fromLibrary()] will find + // all the Model/ModelDescription classes. + assert([test1.A, test2.A, test3.A, test4.A, test5.A] != null); + newModelDB(Symbol symbol) => new ModelDBImpl.fromLibrary(symbol); group('model_db', () { diff --git a/pkgs/gcloud/test/db/properties_test.dart b/pkgs/gcloud/test/db/properties_test.dart index 7b05e29b..205a7f34 100644 --- a/pkgs/gcloud/test/db/properties_test.dart +++ b/pkgs/gcloud/test/db/properties_test.dart @@ -75,9 +75,14 @@ main() { expect(prop.validate(null, null), isTrue); expect(prop.validate(null, [1, 2]), isTrue); expect(prop.encodeValue(null, null), equals(null)); - expect(prop.encodeValue(null, []).bytes, equals([])); - expect(prop.encodeValue(null, [1, 2]).bytes, equals([1, 2])); - expect(prop.encodeValue(null, new Uint8List.fromList([1, 2])).bytes, + expect((prop.encodeValue(null, []) as datastore.BlobValue).bytes, + equals([])); + expect((prop.encodeValue(null, [1, 2]) as datastore.BlobValue).bytes, + equals([1, 2])); + expect( + (prop.encodeValue(null, new Uint8List.fromList([1, 2])) + as datastore.BlobValue) + .bytes, equals([1, 2])); expect(prop.decodePrimitiveValue(null, null), equals(null)); expect(prop.decodePrimitiveValue(null, new datastore.BlobValue([])), @@ -185,7 +190,7 @@ class Custom { class CustomProperty extends StringProperty { const CustomProperty( - {String propertyName: null, bool required: false, bool indexed: true}); + {String propertyName, bool required: false, bool indexed: true}); bool validate(ModelDB db, Object value) { if (required && value == null) return false; @@ -197,7 +202,7 @@ class CustomProperty extends StringProperty { return new Custom()..customValue = value; } - Object encodeValue(ModelDB db, Object value) { + Object encodeValue(ModelDB db, Object value, {bool forComparison: false}) { if (value == null) return null; return (value as Custom).customValue; } @@ -209,7 +214,7 @@ class KeyMock implements Key { KeyMock(this._datastoreKey); Object id = 1; - Type type = null; + Type type; Key get parent => this; bool get isEmpty => false; Partition get partition => null; diff --git a/pkgs/gcloud/test/db_all_e2e_test.dart b/pkgs/gcloud/test/db_all_e2e_test.dart index 75bde888..dbb858c7 100644 --- a/pkgs/gcloud/test/db_all_e2e_test.dart +++ b/pkgs/gcloud/test/db_all_e2e_test.dart @@ -4,18 +4,17 @@ library gcloud.test.db_all_test; -import 'dart:io'; import 'dart:async'; +import 'dart:io'; import 'package:gcloud/db.dart' as db; import 'package:gcloud/src/datastore_impl.dart' as datastore_impl; import 'package:unittest/unittest.dart'; +import 'common_e2e.dart'; +import 'datastore/e2e/datastore_test_impl.dart' as datastore_test; import 'db/e2e/db_test_impl.dart' as db_test; import 'db/e2e/metamodel_test_impl.dart' as db_metamodel_test; -import 'datastore/e2e/datastore_test_impl.dart' as datastore_test; - -import 'common_e2e.dart'; main() { var scopes = datastore_impl.DatastoreImpl.SCOPES; diff --git a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart index c422c952..d5b4408c 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart @@ -52,7 +52,7 @@ runTests(PubSub pubsub, String project, String prefix) { test('create-lookup-delete', () async { var topicName = generateTopicName(); var subscriptionName = generateSubscriptionName(); - var topic = await pubsub.createTopic(topicName); + await pubsub.createTopic(topicName); var subscription = await pubsub.createSubscription(subscriptionName, topicName); expect(subscription.name, subscriptionName); @@ -70,7 +70,7 @@ runTests(PubSub pubsub, String project, String prefix) { test('create-list-delete', () async { const int count = 5; var topicName = generateTopicName(); - var topic = await pubsub.createTopic(topicName); + await pubsub.createTopic(topicName); var subscriptionPrefix = generateSubscriptionName(); diff --git a/pkgs/gcloud/test/pubsub/pubsub_test.dart b/pkgs/gcloud/test/pubsub/pubsub_test.dart index 8a0bc14f..61d0c8c9 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_test.dart @@ -5,7 +5,6 @@ import 'dart:async'; import 'dart:convert'; -import 'package:http/http.dart' as http; import 'package:unittest/unittest.dart'; import 'package:gcloud/pubsub.dart'; @@ -18,7 +17,7 @@ import '../common_e2e.dart'; const String HOSTNAME = 'pubsub.googleapis.com'; const String ROOT_PATH = '/v1beta2/'; -http.Client mockClient() => new MockClient(HOSTNAME, ROOT_PATH); +MockClient mockClient() => new MockClient(HOSTNAME, ROOT_PATH); main() { group('api', () { @@ -882,7 +881,7 @@ main() { expect(request.messages.length, 1); expect(request.messages[0].data, messageBase64); expect(request.messages[0].attributes, isNull); - return mock.respond(new pubsub.PublishResponse()..messageIds = [0]); + return mock.respond(new pubsub.PublishResponse()..messageIds = ['0']); })); return topic.publishString(message).then(expectAsync((result) { @@ -917,7 +916,7 @@ main() { expect(request.messages[0].attributes, isNotNull); expect(request.messages[0].attributes.length, attributes.length); expect(request.messages[0].attributes, attributes); - return mock.respond(new pubsub.PublishResponse()..messageIds = [0]); + return mock.respond(new pubsub.PublishResponse()..messageIds = ['0']); })); return topic diff --git a/pkgs/gcloud/test/storage/e2e_test.dart b/pkgs/gcloud/test/storage/e2e_test.dart index 88e52c35..df296613 100644 --- a/pkgs/gcloud/test/storage/e2e_test.dart +++ b/pkgs/gcloud/test/storage/e2e_test.dart @@ -23,8 +23,8 @@ bool testDetailedApiError(e) => e is storage_api.DetailedApiRequestError; const int MB = 1024 * 1024; const int maxNormalUpload = 1 * MB; const int minResumableUpload = maxNormalUpload + 1; -var bytesResumableUpload = - new List.generate(minResumableUpload, (e) => e & 255); +final bytesResumableUpload = + new List.generate(minResumableUpload, (e) => e & 255); runTests(Storage storage, Bucket testBucket) { group('bucket', () { diff --git a/pkgs/gcloud/test/storage/storage_test.dart b/pkgs/gcloud/test/storage/storage_test.dart index c3142af2..014cdbbe 100644 --- a/pkgs/gcloud/test/storage/storage_test.dart +++ b/pkgs/gcloud/test/storage/storage_test.dart @@ -75,7 +75,7 @@ main() { return mock.respond(new storage.Bucket()..name = bucketName); }, count: predefined.length)); - var futures = []; + var futures = []; for (int i = 0; i < predefined.length; i++) { futures.add( api.createBucket(bucketName, predefinedAcl: predefined[i][0])); @@ -130,7 +130,7 @@ main() { return mock.respond(new storage.Bucket()..name = bucketName); }, count: acls.length)); - var futures = []; + var futures = []; for (int i = 0; i < acls.length; i++) { futures.add(api.createBucket(bucketName, acl: acls[i])); } @@ -195,7 +195,7 @@ main() { return mock.respond(new storage.Bucket()..name = bucketName); }, count: predefined.length * acls.length)); - var futures = []; + var futures = []; for (int i = 0; i < predefined.length; i++) { for (int j = 0; j < acls.length; j++) { futures.add(api.createBucket(bucketName, @@ -436,8 +436,6 @@ main() { .then(expectAsync((_) => upload(addToSink, false))); } - ; - test('write-short-1', () { withMockClient((mock, api) { runTest(mock, api, [bytesNormalUpload], bytesNormalUpload.length); @@ -648,7 +646,7 @@ main() { }, count: metadata.length)); var bucket = api.bucket(bucketName); - var futures = []; + var futures = []; for (int i = 0; i < metadata.length; i++) { futures .add(bucket.writeBytes(objectName, bytes, metadata: metadata[i])); @@ -714,7 +712,7 @@ main() { }, count: metadata.length * 2)); var bucket = api.bucket(bucketName); - var futures = []; + var futures = []; for (int i = 0; i < metadata.length; i++) { futures.add(bucket.writeBytes(objectName, bytesResumableUpload, metadata: metadata[i])); @@ -757,7 +755,7 @@ main() { }, count: predefined.length)); var bucket = api.bucket(bucketName); - var futures = []; + var futures = []; for (int i = 0; i < predefined.length; i++) { futures.add(bucket.writeBytes(objectName, bytes, predefinedAcl: predefined[i][0])); @@ -820,7 +818,7 @@ main() { }, count: acls.length)); var bucket = api.bucket(bucketName); - var futures = []; + var futures = []; for (int i = 0; i < acls.length; i++) { futures.add(bucket.writeBytes(objectName, bytes, acl: acls[i])); } @@ -894,7 +892,7 @@ main() { }, count: predefined.length * acls.length)); var bucket = api.bucket(bucketName); - var futures = []; + var futures = []; for (int i = 0; i < predefined.length; i++) { for (int j = 0; j < acls.length; j++) { futures.add(bucket.writeBytes(objectName, bytes, From 89cb93ae46a2235da594a4d326c856d6d89c30e7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Este-Gracias?= Date: Thu, 15 Mar 2018 19:20:50 +0100 Subject: [PATCH 106/239] Align dependencies (dart-lang/gcloud#51) * Align dependencies * Fix storage tests * Fix pub/sub tests * Transform expect into assert * Update documentation about indexes for datastore * Fix dartanalyzer warnings * Remove .idea in gitignore --- pkgs/gcloud/.gitignore | 2 +- pkgs/gcloud/lib/src/pubsub_impl.dart | 8 +- pkgs/gcloud/lib/src/storage_impl.dart | 2 +- pkgs/gcloud/pubspec.yaml | 6 +- pkgs/gcloud/test/common.dart | 2 +- pkgs/gcloud/test/common_e2e.dart | 28 +-- .../datastore/e2e/datastore_test_impl.dart | 40 +++-- .../gcloud/test/datastore/error_matchers.dart | 11 +- pkgs/gcloud/test/db/db_test.dart | 2 +- pkgs/gcloud/test/db/e2e/db_test_impl.dart | 48 +++--- .../test/db/e2e/metamodel_test_impl.dart | 8 +- pkgs/gcloud/test/db/model_db_test.dart | 2 +- pkgs/gcloud/test/db/properties_test.dart | 2 +- pkgs/gcloud/test/db_all_e2e_test.dart | 55 ++++-- pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart | 156 +++++++++-------- pkgs/gcloud/test/pubsub/pubsub_test.dart | 160 +++++++++--------- pkgs/gcloud/test/service_scope_test.dart | 50 +++--- pkgs/gcloud/test/storage/e2e_test.dart | 112 ++++++------ pkgs/gcloud/test/storage/storage_test.dart | 144 ++++++++-------- 19 files changed, 444 insertions(+), 394 deletions(-) diff --git a/pkgs/gcloud/.gitignore b/pkgs/gcloud/.gitignore index 2f08921c..794cf6c1 100644 --- a/pkgs/gcloud/.gitignore +++ b/pkgs/gcloud/.gitignore @@ -1,4 +1,4 @@ pubspec.lock packages .pub -.packages +.packages \ No newline at end of file diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart index 9db0f73e..156f6992 100644 --- a/pkgs/gcloud/lib/src/pubsub_impl.dart +++ b/pkgs/gcloud/lib/src/pubsub_impl.dart @@ -418,10 +418,12 @@ class _TopicPageImpl implements Page { final List items; _TopicPageImpl(this._api, this._pageSize, pubsub.ListTopicsResponse response) - : items = new List(response.topics.length), + : items = new List(response.topics != null ? response.topics.length : 0), _nextPageToken = response.nextPageToken { - for (int i = 0; i < response.topics.length; i++) { - items[i] = new _TopicImpl(_api, response.topics[i]); + if (response.topics != null) { + for (int i = 0; i < response.topics.length; i++) { + items[i] = new _TopicImpl(_api, response.topics[i]); + } } } diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index 0acffd48..395c84b5 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -497,7 +497,7 @@ class _MediaUploadStreamSink implements StreamSink> { final _controller = new StreamController>(sync: true); StreamSubscription _subscription; StreamController _resumableController; - final _doneCompleter = new Completer<_ObjectInfoImpl>(); + final _doneCompleter = new Completer(); static const int _STATE_LENGTH_KNOWN = 0; static const int _STATE_PROBING_LENGTH = 1; diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 10e54658..cff67b5c 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -6,14 +6,14 @@ homepage: https://github.com/dart-lang/gcloud environment: sdk: '>=1.13.0 <2.0.0' dependencies: - googleapis: '>=0.2.0 <0.45.0' - googleapis_beta: '>=0.10.0 <0.40.0' + googleapis: '>=0.50.0 <0.51.0' + googleapis_beta: '>=0.45.0 <0.46.0' http: '>=0.11.0 <0.12.0' dev_dependencies: googleapis_auth: '>=0.2.3 <0.3.0' http_parser: '>=2.0.0 <4.0.0' mime: '>=0.9.0+3 <0.10.0' - unittest: '>=0.11.0 <0.12.0' + test: '>=0.12.0 <0.13.0' transformers: - $dart2js: $include: [] diff --git a/pkgs/gcloud/test/common.dart b/pkgs/gcloud/test/common.dart index 4cf3bbd8..d6003913 100644 --- a/pkgs/gcloud/test/common.dart +++ b/pkgs/gcloud/test/common.dart @@ -9,7 +9,7 @@ import 'package:http/http.dart' as http; import 'package:http/testing.dart' as http_testing; import 'package:http_parser/http_parser.dart' as http_parser; import 'package:mime/mime.dart' as mime; -import 'package:unittest/unittest.dart'; +import 'package:test/test.dart'; const CONTENT_TYPE_JSON_UTF8 = 'application/json; charset=utf-8'; diff --git a/pkgs/gcloud/test/common_e2e.dart b/pkgs/gcloud/test/common_e2e.dart index 0899bc23..612f4f1c 100644 --- a/pkgs/gcloud/test/common_e2e.dart +++ b/pkgs/gcloud/test/common_e2e.dart @@ -7,7 +7,6 @@ library gcloud.test.common_e2e; import 'dart:async'; import 'dart:io'; -import 'package:unittest/unittest.dart'; import 'package:googleapis_auth/auth_io.dart' as auth; import 'package:http/http.dart' as http; @@ -78,7 +77,7 @@ Future withAuthClient(List scopes, AuthCallback callback, if (!onBot() && (project == null || serviceKeyLocation == null)) { throw new StateError( - 'Envoronment variables $PROJECT_ENV and $SERVICE_KEY_LOCATION_ENV ' + 'Environment variables $PROJECT_ENV and $SERVICE_KEY_LOCATION_ENV ' 'required when not running on the package bot'); } @@ -92,30 +91,7 @@ Future withAuthClient(List scopes, AuthCallback callback, .clientViaServiceAccount(creds, scopes) .then((http.Client client) { if (trace) client = new TraceClient(client); - return callback(project, client).whenComplete(() => client.close()); + return callback(project, client); }); }); } - -Future runE2EUnittest(Function callback) { - var config = new E2EConfiguration(); - - unittestConfiguration = config; - callback(); - - return config.done; -} - -class E2EConfiguration extends SimpleConfiguration { - final Completer _completer = new Completer(); - - Future get done => _completer.future; - - onDone(success) { - new Future.sync(() { - super.onDone(success); - }) - .then((_) => _completer.complete(_)) - .catchError((error, stack) => _completer.completeError(error, stack)); - } -} diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index 7185c1d4..5606ea6f 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -22,16 +22,17 @@ library datastore_test; /// - name: listproperty /// - name: test_property /// direction: desc -/// $ gcloud preview datastore create-indexes . -/// 02:19 PM Host: appengine.google.com -/// 02:19 PM Uploading index definitions. +/// $ gcloud datastore create-indexes index.yaml +/// +/// Now, wait for indexing done import 'dart:async'; import 'package:gcloud/datastore.dart'; import 'package:gcloud/src/datastore_impl.dart' as datastore_impl; import 'package:gcloud/common.dart'; -import 'package:unittest/unittest.dart'; +import 'package:http/http.dart'; +import 'package:test/test.dart'; import '../../common_e2e.dart'; import '../error_matchers.dart'; @@ -47,7 +48,7 @@ Future> consumePages(FirstPageProvider provider) { return new StreamFromPages(provider).stream.toList(); } -runTests(Datastore datastore, String namespace) { +void runTests(Datastore datastore, String namespace) { Partition partition = new Partition(namespace); Future withTransaction(Function f, {bool xg: false}) { @@ -254,10 +255,10 @@ runTests(Datastore datastore, String namespace) { transactional: true, xg: true); }); - test('negative_insert_20000_entities', () { + test('negative_insert_20000_entities', () async { // Maybe it should not be a [DataStoreError] here? // FIXME/TODO: This was adapted - expect(datastore.commit(inserts: named20000), throws); + expect(datastore.commit(inserts: named20000), throwsA(isSocketException)); }); // TODO: test invalid inserts (like entities without key, ...) @@ -598,6 +599,7 @@ runTests(Datastore datastore, String namespace) { throwsA(isTransactionAbortedError)); }); }); + group('query', () { Future> testQuery(String kind, {List filters, @@ -737,7 +739,7 @@ runTests(Datastore datastore, String namespace) { var sortedAndFiltered = sorted.where(filterFunction).toList(); var sortedAndListFiltered = sorted.where(listFilterFunction).toList(); var indexedEntity = sorted.where(indexFilterMatches).toList(); - expect(indexedEntity.length, equals(1)); + assert(indexedEntity.length == 1); var filters = [ new Filter(FilterRelation.GreatherThan, QUERY_KEY, QUERY_LOWER_BOUND), @@ -1019,7 +1021,7 @@ runTests(Datastore datastore, String namespace) { return datastore.commit(deletes: [subSubKey, subSubKey2]); } ]; - return Future.forEach(futures, (f) => f()).then(expectAsync((_) {})); + return Future.forEach(futures, (f) => f()).then(expectAsync1((_) {})); }); }); }); @@ -1098,13 +1100,21 @@ Future waitUntilEntitiesHelper( }); } -main() { +Future main() async { + Datastore datastore; + BaseClient client; + var scopes = datastore_impl.DatastoreImpl.SCOPES; + await withAuthClient(scopes, (String project, httpClient) { + datastore = new datastore_impl.DatastoreImpl(httpClient, project); + client = httpClient; + return cleanupDB(datastore, null); + }); - withAuthClient(scopes, (String project, httpClient) { - var datastore = new datastore_impl.DatastoreImpl(httpClient, 's~$project'); - return cleanupDB(datastore, null).then((_) { - return runE2EUnittest(() => runTests(datastore, null)); - }); + tearDownAll(() async { + await cleanupDB(datastore, null); + client.close(); }); + + runTests(datastore, null); } diff --git a/pkgs/gcloud/test/datastore/error_matchers.dart b/pkgs/gcloud/test/datastore/error_matchers.dart index 7509ed73..be1496f0 100644 --- a/pkgs/gcloud/test/datastore/error_matchers.dart +++ b/pkgs/gcloud/test/datastore/error_matchers.dart @@ -4,7 +4,9 @@ library error_matchers; -import 'package:unittest/unittest.dart'; +import 'dart:io'; + +import 'package:test/test.dart'; import 'package:gcloud/datastore.dart'; class _ApplicationError extends TypeMatcher { @@ -37,6 +39,11 @@ class _IntMatcher extends TypeMatcher { bool matches(item, Map matchState) => item is int; } +class _SocketException extends TypeMatcher { + const _SocketException() : super("SocketException"); + bool matches(item, Map matchState) => item is SocketException; +} + const isApplicationError = const _ApplicationError(); const isDataStoreError = const _DataStoreError(); @@ -45,3 +52,5 @@ const isNeedIndexError = const _NeedIndexError(); const isTimeoutError = const _TimeoutError(); const isInt = const _IntMatcher(); + +const isSocketException = const _SocketException(); \ No newline at end of file diff --git a/pkgs/gcloud/test/db/db_test.dart b/pkgs/gcloud/test/db/db_test.dart index 587cf480..c8346d7b 100644 --- a/pkgs/gcloud/test/db/db_test.dart +++ b/pkgs/gcloud/test/db/db_test.dart @@ -5,7 +5,7 @@ library gcloud.db_test; import 'package:gcloud/db.dart'; -import 'package:unittest/unittest.dart'; +import 'package:test/test.dart'; @Kind() class Foobar extends Model {} diff --git a/pkgs/gcloud/test/db/e2e/db_test_impl.dart b/pkgs/gcloud/test/db/e2e/db_test_impl.dart index 7af6ba8c..f31cde8b 100644 --- a/pkgs/gcloud/test/db/e2e/db_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/db_test_impl.dart @@ -40,15 +40,16 @@ library db_test; /// - name: name /// direction: asc /// -/// $ gcloud preview datastore create-indexes . -/// 02:19 PM Host: appengine.google.com -/// 02:19 PM Uploading index definitions. +/// $ gcloud datastore create-indexes index.yaml +/// +/// Now, wait for indexing done import 'dart:async'; -import 'package:unittest/unittest.dart'; import 'package:gcloud/db.dart' as db; import 'package:gcloud/src/datastore_impl.dart' as datastore_impl; +import 'package:http/http.dart'; +import 'package:test/test.dart'; import '../../common_e2e.dart'; import '../../datastore/e2e/datastore_test_impl.dart' as datastore_test; @@ -137,7 +138,7 @@ class ExpandoPerson extends db.ExpandoModel { Future sleep(Duration duration) => new Future.delayed(duration); -runTests(db.DatastoreDB store, String namespace) { +void runTests(db.DatastoreDB store, String namespace) { var partition = store.newPartition(namespace); void compareModels(List expectedModels, List models, @@ -180,11 +181,13 @@ runTests(db.DatastoreDB store, String namespace) { }); }); } else { - return store.commit(inserts: objects).then(expectAsync((_) { - return store.lookup(keys).then(expectAsync((List models) { + return store.commit(inserts: objects).then(expectAsync1((_) { + return store.lookup(keys).then(expectAsync1((List models) { compareModels(objects, models); - return store.commit(deletes: keys).then(expectAsync((_) { - return store.lookup(keys).then(expectAsync((List models) { + return store.commit(deletes: keys).then(expectAsync1((_) { + return store + .lookup(keys) + .then(expectAsync1((List models) { for (var i = 0; i < models.length; i++) { expect(models[i], isNull); } @@ -330,7 +333,7 @@ runTests(db.DatastoreDB store, String namespace) { ..parentKey = root ..age = 83 ..name = 'user83'); - return store.commit(inserts: persons).then(expectAsync((_) { + return store.commit(inserts: persons).then(expectAsync1((_) { // At this point, autoIds are allocated and are reflected in the // models (as well as parentKey if it was empty). @@ -360,13 +363,13 @@ runTests(db.DatastoreDB store, String namespace) { // because an id doesn't need to be globally unique, only under // entities with the same parent. - return store.lookup(keys).then(expectAsync((List models) { + return store.lookup(keys).then(expectAsync1((List models) { // Since the id/parentKey fields are set after commit and a lookup // returns new model instances, we can do full model comparison // here. compareModels(persons, models); - return store.commit(deletes: keys).then(expectAsync((_) { - return store.lookup(keys).then(expectAsync((List models) { + return store.commit(deletes: keys).then(expectAsync1((_) { + return store.lookup(keys).then(expectAsync1((List models) { for (var i = 0; i < models.length; i++) { expect(models[i], isNull); } @@ -677,14 +680,21 @@ Future waitUntilEntitiesHelper(db.DatastoreDB mdb, List keys, }); } -main() { - var scopes = datastore_impl.DatastoreImpl.SCOPES; +Future main() async { + db.DatastoreDB store; + BaseClient client; - withAuthClient(scopes, (String project, httpClient) { - var datastore = new datastore_impl.DatastoreImpl(httpClient, 's~$project'); + var scopes = datastore_impl.DatastoreImpl.SCOPES; + await withAuthClient(scopes, (String project, httpClient) { + var datastore = new datastore_impl.DatastoreImpl(httpClient, project); return datastore_test.cleanupDB(datastore, null).then((_) { - return runE2EUnittest( - () => runTests(new db.DatastoreDB(datastore), null)); + store = new db.DatastoreDB(datastore); }); }); + + tearDownAll(() { + client?.close(); + }); + + runTests(store, null); } diff --git a/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart index 9a467466..f8169d7e 100644 --- a/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart @@ -6,7 +6,7 @@ library metamodel_test; import 'dart:async'; -import 'package:unittest/unittest.dart'; +import 'package:test/test.dart'; import 'package:gcloud/datastore.dart'; import 'package:gcloud/datastore.dart' show Key, Partition; @@ -45,7 +45,7 @@ Future sleep(Duration duration) { return completer.future; } -runTests(datastore, db.DatastoreDB store) { +void runTests(datastore, db.DatastoreDB store) { // Shorten this name, so we don't have to break lines at 80 chars. final cond = predicate; @@ -74,8 +74,8 @@ runTests(datastore, db.DatastoreDB store) { var futures = []; for (var namespace in namespaces) { if (!(namespace == null || - namespace == 'FooNamespace' || - namespace == 'BarNamespace')) { + namespace.name == 'FooNamespace' || + namespace.name == 'BarNamespace')) { continue; } var partition = store.newPartition(namespace.name); diff --git a/pkgs/gcloud/test/db/model_db_test.dart b/pkgs/gcloud/test/db/model_db_test.dart index 571f2568..22b1bbbf 100644 --- a/pkgs/gcloud/test/db/model_db_test.dart +++ b/pkgs/gcloud/test/db/model_db_test.dart @@ -7,7 +7,7 @@ library gcloud.db_impl_test; import 'dart:async'; import 'package:gcloud/db.dart'; -import 'package:unittest/unittest.dart'; +import 'package:test/test.dart'; import 'model_dbs/duplicate_fieldname.dart' as test4; import 'model_dbs/duplicate_kind.dart' as test1; diff --git a/pkgs/gcloud/test/db/properties_test.dart b/pkgs/gcloud/test/db/properties_test.dart index 205a7f34..30ae3d0d 100644 --- a/pkgs/gcloud/test/db/properties_test.dart +++ b/pkgs/gcloud/test/db/properties_test.dart @@ -8,7 +8,7 @@ import 'dart:typed_data'; import 'package:gcloud/db.dart'; import 'package:gcloud/datastore.dart' as datastore; -import 'package:unittest/unittest.dart'; +import 'package:test/test.dart'; main() { group('properties', () { diff --git a/pkgs/gcloud/test/db_all_e2e_test.dart b/pkgs/gcloud/test/db_all_e2e_test.dart index dbb858c7..db4efd35 100644 --- a/pkgs/gcloud/test/db_all_e2e_test.dart +++ b/pkgs/gcloud/test/db_all_e2e_test.dart @@ -9,39 +9,58 @@ import 'dart:io'; import 'package:gcloud/db.dart' as db; import 'package:gcloud/src/datastore_impl.dart' as datastore_impl; -import 'package:unittest/unittest.dart'; +import 'package:http/http.dart'; +import 'package:test/test.dart'; import 'common_e2e.dart'; import 'datastore/e2e/datastore_test_impl.dart' as datastore_test; import 'db/e2e/db_test_impl.dart' as db_test; import 'db/e2e/metamodel_test_impl.dart' as db_metamodel_test; -main() { +Future main() async { var scopes = datastore_impl.DatastoreImpl.SCOPES; - var now = new DateTime.now().millisecondsSinceEpoch; String namespace = '${Platform.operatingSystem}${now}'; - withAuthClient(scopes, (String project, httpClient) { - var datastore = new datastore_impl.DatastoreImpl(httpClient, project); - var datastoreDB = new db.DatastoreDB(datastore); + datastore_impl.DatastoreImpl datastore; + db.DatastoreDB datastoreDB; + Client client; + + await withAuthClient(scopes, (String project, httpClient) { + datastore = new datastore_impl.DatastoreImpl(httpClient, project); + datastoreDB = new db.DatastoreDB(datastore); + client = httpClient; + }); + + tearDownAll(() async { + client.close(); + }); + + group('datastore_test', () { + tearDown(() async { + await datastore_test.cleanupDB(datastore, namespace); + }); - return runE2EUnittest(() { - datastore_test.runTests(datastore, namespace); + datastore_test.runTests(datastore, namespace); + }); - test('sleep-between-test-suites', () { - expect(new Future.delayed(const Duration(seconds: 10)), completes); - }); + test('sleep-between-test-suites', () { + expect(new Future.delayed(const Duration(seconds: 10)), completes); + }); - db_test.runTests(datastoreDB, namespace); + group('datastore_test', () { + db_test.runTests(datastoreDB, namespace); + }); - test('sleep-between-test-suites', () { - expect(new Future.delayed(const Duration(seconds: 10)), completes); - }); + test('sleep-between-test-suites', () { + expect(new Future.delayed(const Duration(seconds: 10)), completes); + }); - db_metamodel_test.runTests(datastore, datastoreDB); - }).whenComplete(() { - return datastore_test.cleanupDB(datastore, namespace); + group('datastore_test', () { + tearDown(() async { + await datastore_test.cleanupDB(datastore, namespace); }); + + db_metamodel_test.runTests(datastore, datastoreDB); }); } diff --git a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart index d5b4408c..187e9f1b 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart @@ -3,11 +3,93 @@ // BSD-style license that can be found in the LICENSE file. import 'package:gcloud/pubsub.dart'; -import 'package:unittest/unittest.dart'; +import 'package:http/http.dart'; +import 'package:test/test.dart'; import '../common_e2e.dart'; -runTests(PubSub pubsub, String project, String prefix) { +void main() { + PubSub pubsub; + String project; + String prefix; + BaseClient client; + + setUpAll(() { + // Generate a unique prefix for all names generated by the tests. + var id = new DateTime.now().millisecondsSinceEpoch; + prefix = 'dart-e2e-test-$id'; + + return withAuthClient(PubSub.SCOPES, (String _project, httpClient) async { + // Share the same pubsub connection for all tests. + pubsub = new PubSub(httpClient, _project); + project = _project; + client = httpClient; + }); + }); + + tearDownAll(() async { + bool leftovers = false; + bool cleanupErrors = false; + + print('checking for leftover subscriptions'); + try { + // Try to delete any leftover subscriptions from the tests. + var subscriptions = await pubsub.listSubscriptions().toList(); + for (var subscription in subscriptions) { + if (subscription.name.startsWith(prefix)) { + try { + print('WARNING: Removing leftover subscription ' + '${subscription.name}'); + leftovers = true; + await pubsub.deleteSubscription(subscription.name); + } catch (e, s) { + print('Error during test cleanup of subscription ' + '${subscription.name} ($e)'); + print(s); + cleanupErrors = true; + } + } + } + } catch (e, s) { + print('Error checking for leftover subscriptions ($e)'); + print(s); + cleanupErrors = true; + } + + // Try to delete any leftover topics from the tests. + print('checking for leftover topics'); + try { + var topics = await pubsub.listTopics().toList(); + for (var topic in topics) { + if (topic.name.startsWith(prefix)) { + try { + print('WARNING: Removing leftover topic ${topic.name}'); + leftovers = true; + await pubsub.deleteTopic(topic.name); + } catch (e, s) { + print('Error during test cleanup of topic ${topic.name} ($e)'); + print(s); + cleanupErrors = true; + } + } + } + } catch (e, s) { + print('Error checking for leftover topics ($e)'); + print(s); + cleanupErrors = true; + } + + if (leftovers) { + throw 'Test terminated with leftover topics and/or subscriptions'; + } + + if (cleanupErrors) { + throw 'Test encountered errors while checking for leftovers'; + } + + client.close(); + }); + String generateTopicName() { var id = new DateTime.now().millisecondsSinceEpoch; return '$prefix-topic-$id'; @@ -107,73 +189,3 @@ runTests(PubSub pubsub, String project, String prefix) { }); }); } - -main() { - // Generate a unique prefix for all names generated by the tests. - var id = new DateTime.now().millisecondsSinceEpoch; - var prefix = 'dart-e2e-test-$id'; - - withAuthClient(PubSub.SCOPES, (String project, httpClient) async { - // Share the same pubsub connection for all tests. - bool leftovers = false; - bool cleanupErrors = false; - var pubsub = new PubSub(httpClient, project); - try { - await runE2EUnittest(() { - runTests(pubsub, project, prefix); - }); - } finally { - print('checking for leftover subscriptions'); - try { - // Try to delete any leftover subscriptions from the tests. - var subscriptions = await pubsub.listSubscriptions().toList(); - for (var subscription in subscriptions) { - if (subscription.name.startsWith(prefix)) { - try { - print('WARNING: Removing leftover subscription ' - '${subscription.name}'); - leftovers = true; - await pubsub.deleteSubscription(subscription.name); - } catch (e) { - print('Error during test cleanup of subscription ' - '${subscription.name} ($e)'); - cleanupErrors = true; - } - } - } - } catch (e) { - print('Error checking for leftover subscriptions ($e)'); - cleanupErrors = true; - } - - // Try to delete any leftover topics from the tests. - print('checking for leftover topics'); - try { - var topics = await pubsub.listTopics().toList(); - for (var topic in topics) { - if (topic.name.startsWith(prefix)) { - try { - print('WARNING: Removing leftover topic ${topic.name}'); - leftovers = true; - await pubsub.deleteTopic(topic.name); - } catch (e) { - print('Error during test cleanup of topic ${topic.name} ($e)'); - cleanupErrors = true; - } - } - } - } catch (e) { - print('Error checking for leftover topics ($e)'); - cleanupErrors = true; - } - } - - if (leftovers) { - throw 'Test terminated with leftover topics and/or subscriptions'; - } - - if (cleanupErrors) { - throw 'Test encountered errors while checking for leftovers'; - } - }); -} diff --git a/pkgs/gcloud/test/pubsub/pubsub_test.dart b/pkgs/gcloud/test/pubsub/pubsub_test.dart index 61d0c8c9..86889c6e 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_test.dart @@ -5,7 +5,7 @@ import 'dart:async'; import 'dart:convert'; -import 'package:unittest/unittest.dart'; +import 'package:test/test.dart'; import 'package:gcloud/pubsub.dart'; @@ -48,7 +48,7 @@ main() { mock.register( 'PUT', 'projects/$PROJECT/topics/test-topic', - expectAsync((request) { + expectAsync1((request) { var requestTopic = new pubsub.Topic.fromJson(JSON.decode(request.body)); expect(requestTopic.name, absoluteName); @@ -56,11 +56,11 @@ main() { }, count: 2)); var api = new PubSub(mock, PROJECT); - return api.createTopic(name).then(expectAsync((topic) { + return api.createTopic(name).then(expectAsync1((topic) { expect(topic.name, name); expect(topic.project, PROJECT); expect(topic.absoluteName, absoluteName); - return api.createTopic(absoluteName).then(expectAsync((topic) { + return api.createTopic(absoluteName).then(expectAsync1((topic) { expect(topic.name, name); expect(topic.absoluteName, absoluteName); })); @@ -83,15 +83,15 @@ main() { mock.register( 'DELETE', 'projects/test-project/topics/test-topic', - expectAsync((request) { + expectAsync1((request) { expect(request.body.length, 0); return mock.respondEmpty(); }, count: 2)); var api = new PubSub(mock, PROJECT); - return api.deleteTopic(name).then(expectAsync((result) { + return api.deleteTopic(name).then(expectAsync1((result) { expect(result, isNull); - return api.deleteTopic(absoluteName).then(expectAsync((topic) { + return api.deleteTopic(absoluteName).then(expectAsync1((topic) { expect(result, isNull); })); })); @@ -113,17 +113,17 @@ main() { mock.register( 'GET', 'projects/test-project/topics/test-topic', - expectAsync((request) { + expectAsync1((request) { expect(request.body.length, 0); return mock.respond(new pubsub.Topic()..name = absoluteName); }, count: 2)); var api = new PubSub(mock, PROJECT); - return api.lookupTopic(name).then(expectAsync((topic) { + return api.lookupTopic(name).then(expectAsync1((topic) { expect(topic.name, name); expect(topic.project, PROJECT); expect(topic.absoluteName, absoluteName); - return api.lookupTopic(absoluteName).then(expectAsync((topic) { + return api.lookupTopic(absoluteName).then(expectAsync1((topic) { expect(topic.name, name); expect(topic.absoluteName, absoluteName); })); @@ -165,7 +165,7 @@ main() { mock.register( 'GET', 'projects/$PROJECT/topics', - expectAsync((request) { + expectAsync1((request) { pageCount++; expect(request.url.queryParameters['pageSize'], '$pageSize'); expect(request.body.length, 0); @@ -193,7 +193,7 @@ main() { var api = new PubSub(mock, PROJECT); return api .listTopics() - .listen(expectAsync((_) => null, count: count)) + .listen(expectAsync1((_) => null, count: count)) .asFuture(); } @@ -215,8 +215,8 @@ main() { registerQueryMock(mock, 70, 50); var api = new PubSub(mock, PROJECT); - api.listTopics().listen(expectAsync(((_) => null), count: 70), - onDone: expectAsync(() => null)) + api.listTopics().listen(expectAsync1(((_) => null), count: 70), + onDone: expectAsync0(() => null)) ..pause() ..resume() ..pause() @@ -231,7 +231,7 @@ main() { var count = 0; var subscription; subscription = api.listTopics().listen( - expectAsync(((_) { + expectAsync1(((_) { subscription ..pause() ..resume() @@ -243,7 +243,7 @@ main() { } return null; }), count: 70), - onDone: expectAsync(() => null)) + onDone: expectAsync0(() => null)) ..pause(); scheduleMicrotask(() => subscription.resume()); }); @@ -265,7 +265,7 @@ main() { var api = new PubSub(mock, PROJECT); var subscription; subscription = api.listTopics().listen( - expectAsync((_) => subscription.cancel()), + expectAsync1((_) => subscription.cancel()), onDone: () => throw 'Unexpected'); }); @@ -274,15 +274,15 @@ main() { // Test error on first GET request. var mock = mockClient(); mock.register('GET', 'projects/$PROJECT/topics', - expectAsync((request) { + expectAsync1((request) { return mock.respondError(500); })); var api = new PubSub(mock, PROJECT); var subscription; subscription = api.listTopics().listen((_) => throw 'Unexpected', - onDone: expectAsync(() => null), + onDone: expectAsync0(() => null), onError: - expectAsync((e) => e is pubsub.DetailedApiRequestError)); + expectAsync1((e) => e is pubsub.DetailedApiRequestError)); if (withPause) { subscription.pause(); scheduleMicrotask(() => subscription.resume()); @@ -304,7 +304,7 @@ main() { int count = 0; var subscription; subscription = api.listTopics().listen( - expectAsync(((_) { + expectAsync1(((_) { count++; if (count == 50) { if (withPause) { @@ -313,15 +313,15 @@ main() { } mock.clear(); mock.register('GET', 'projects/$PROJECT/topics', - expectAsync((request) { + expectAsync1((request) { return mock.respondError(500); })); } return null; }), count: 50), - onDone: expectAsync(() => null), + onDone: expectAsync0(() => null), onError: - expectAsync((e) => e is pubsub.DetailedApiRequestError)); + expectAsync1((e) => e is pubsub.DetailedApiRequestError)); } runTest(false); @@ -335,14 +335,14 @@ main() { registerQueryMock(mock, 0, 50); var api = new PubSub(mock, PROJECT); - return api.pageTopics().then(expectAsync((page) { + return api.pageTopics().then(expectAsync1((page) { expect(page.items.length, 0); expect(page.isLast, isTrue); expect(page.next(), completion(isNull)); mock.clear(); registerQueryMock(mock, 0, 20); - return api.pageTopics(pageSize: 20).then(expectAsync((page) { + return api.pageTopics(pageSize: 20).then(expectAsync1((page) { expect(page.items.length, 0); expect(page.isLast, isTrue); expect(page.next(), completion(isNull)); @@ -355,14 +355,14 @@ main() { registerQueryMock(mock, 10, 50); var api = new PubSub(mock, PROJECT); - return api.pageTopics().then(expectAsync((page) { + return api.pageTopics().then(expectAsync1((page) { expect(page.items.length, 10); expect(page.isLast, isTrue); expect(page.next(), completion(isNull)); mock.clear(); registerQueryMock(mock, 20, 20); - return api.pageTopics(pageSize: 20).then(expectAsync((page) { + return api.pageTopics(pageSize: 20).then(expectAsync1((page) { expect(page.items.length, 20); expect(page.isLast, isTrue); expect(page.next(), completion(isNull)); @@ -384,7 +384,7 @@ main() { expect(page.isLast, pageCount == totalPages); expect(page.items.length, page.isLast ? n - (totalPages - 1) * pageSize : pageSize); - page.next().then(expectAsync((page) { + page.next().then(expectAsync1((page) { if (page != null) { handlePage(page); } else { @@ -395,7 +395,7 @@ main() { } var api = new PubSub(mock, PROJECT); - api.pageTopics(pageSize: pageSize).then(expectAsync(handlePage)); + api.pageTopics(pageSize: pageSize).then(expectAsync1(handlePage)); return completer.future; } @@ -426,7 +426,7 @@ main() { mock.register( 'PUT', 'projects/$PROJECT/subscriptions', - expectAsync((request) { + expectAsync1((request) { var requestSubscription = new pubsub.Subscription.fromJson(JSON.decode(request.body)); expect(requestSubscription.name, absoluteName); @@ -437,12 +437,12 @@ main() { var api = new PubSub(mock, PROJECT); return api .createSubscription(name, topicName) - .then(expectAsync((subscription) { + .then(expectAsync1((subscription) { expect(subscription.name, name); expect(subscription.absoluteName, absoluteName); return api .createSubscription(absoluteName, absoluteTopicName) - .then(expectAsync((subscription) { + .then(expectAsync1((subscription) { expect(subscription.name, name); expect(subscription.project, PROJECT); expect(subscription.absoluteName, absoluteName); @@ -468,15 +468,17 @@ main() { mock.register( 'DELETE', 'projects/$PROJECT/subscriptions', - expectAsync((request) { + expectAsync1((request) { expect(request.body.length, 0); return mock.respondEmpty(); }, count: 2)); var api = new PubSub(mock, PROJECT); - return api.deleteSubscription(name).then(expectAsync((result) { + return api.deleteSubscription(name).then(expectAsync1((result) { expect(result, isNull); - return api.deleteSubscription(absoluteName).then(expectAsync((topic) { + return api + .deleteSubscription(absoluteName) + .then(expectAsync1((topic) { expect(result, isNull); })); })); @@ -498,19 +500,19 @@ main() { mock.register( 'GET', new RegExp('projects/$PROJECT/subscriptions'), - expectAsync((request) { + expectAsync1((request) { expect(request.body.length, 0); return mock .respond(new pubsub.Subscription()..name = absoluteName); }, count: 2)); var api = new PubSub(mock, PROJECT); - return api.lookupSubscription(name).then(expectAsync((subscription) { + return api.lookupSubscription(name).then(expectAsync1((subscription) { expect(subscription.name, name); expect(subscription.absoluteName, absoluteName); return api .lookupSubscription(absoluteName) - .then(expectAsync((subscription) { + .then(expectAsync1((subscription) { expect(subscription.name, name); expect(subscription.project, PROJECT); expect(subscription.absoluteName, absoluteName); @@ -554,7 +556,7 @@ main() { mock.register( 'GET', 'projects/$PROJECT/subscriptions', - expectAsync((request) { + expectAsync1((request) { pageCount++; expect(request.url.queryParameters['pageSize'], '$pageSize'); expect(request.body.length, 0); @@ -583,7 +585,7 @@ main() { var api = new PubSub(mock, PROJECT); return api .listSubscriptions(topic) - .listen(expectAsync((_) => null, count: count)) + .listen(expectAsync1((_) => null, count: count)) .asFuture(); } @@ -616,8 +618,8 @@ main() { var api = new PubSub(mock, PROJECT); api.listSubscriptions().listen( - expectAsync(((_) => null), count: 70), - onDone: expectAsync(() => null)) + expectAsync1(((_) => null), count: 70), + onDone: expectAsync0(() => null)) ..pause() ..resume() ..pause() @@ -632,7 +634,7 @@ main() { var count = 0; var subscription; subscription = api.listSubscriptions().listen( - expectAsync(((_) { + expectAsync1(((_) { subscription ..pause() ..resume() @@ -644,7 +646,7 @@ main() { } return null; }), count: 70), - onDone: expectAsync(() => null)) + onDone: expectAsync0(() => null)) ..pause(); scheduleMicrotask(() => subscription.resume()); }); @@ -666,7 +668,7 @@ main() { var api = new PubSub(mock, PROJECT); var subscription; subscription = api.listSubscriptions().listen( - expectAsync((_) => subscription.cancel()), + expectAsync1((_) => subscription.cancel()), onDone: () => throw 'Unexpected'); }); @@ -675,16 +677,16 @@ main() { // Test error on first GET request. var mock = mockClient(); mock.register('GET', 'projects/$PROJECT/subscriptions', - expectAsync((request) { + expectAsync1((request) { return mock.respondError(500); })); var api = new PubSub(mock, PROJECT); var subscription; subscription = api.listSubscriptions().listen( (_) => throw 'Unexpected', - onDone: expectAsync(() => null), + onDone: expectAsync0(() => null), onError: - expectAsync((e) => e is pubsub.DetailedApiRequestError)); + expectAsync1((e) => e is pubsub.DetailedApiRequestError)); if (withPause) { subscription.pause(); scheduleMicrotask(() => subscription.resume()); @@ -706,7 +708,7 @@ main() { int count = 0; var subscription; subscription = api.listSubscriptions().listen( - expectAsync(((_) { + expectAsync1(((_) { count++; if (count == 50) { if (withPause) { @@ -715,15 +717,15 @@ main() { } mock.clear(); mock.register('GET', 'projects/$PROJECT/subscriptions', - expectAsync((request) { + expectAsync1((request) { return mock.respondError(500); })); } return null; }), count: 50), - onDone: expectAsync(() => null), + onDone: expectAsync0(() => null), onError: - expectAsync((e) => e is pubsub.DetailedApiRequestError)); + expectAsync1((e) => e is pubsub.DetailedApiRequestError)); } runTest(false); @@ -737,7 +739,9 @@ main() { registerQueryMock(mock, 0, 50, topic: topic); var api = new PubSub(mock, PROJECT); - return api.pageSubscriptions(topic: topic).then(expectAsync((page) { + return api + .pageSubscriptions(topic: topic) + .then(expectAsync1((page) { expect(page.items.length, 0); expect(page.isLast, isTrue); expect(page.next(), completion(isNull)); @@ -746,7 +750,7 @@ main() { registerQueryMock(mock, 0, 20, topic: topic); return api .pageSubscriptions(topic: topic, pageSize: 20) - .then(expectAsync((page) { + .then(expectAsync1((page) { expect(page.items.length, 0); expect(page.isLast, isTrue); expect(page.next(), completion(isNull)); @@ -764,7 +768,9 @@ main() { registerQueryMock(mock, 10, 50, topic: topic); var api = new PubSub(mock, PROJECT); - return api.pageSubscriptions(topic: topic).then(expectAsync((page) { + return api + .pageSubscriptions(topic: topic) + .then(expectAsync1((page) { expect(page.items.length, 10); expect(page.isLast, isTrue); expect(page.next(), completion(isNull)); @@ -773,7 +779,7 @@ main() { registerQueryMock(mock, 20, 20, topic: topic); return api .pageSubscriptions(topic: topic, pageSize: 20) - .then(expectAsync((page) { + .then(expectAsync1((page) { expect(page.items.length, 20); expect(page.isLast, isTrue); expect(page.next(), completion(isNull)); @@ -853,7 +859,7 @@ main() { var attributes = {'a': '1', 'b': 'text'}; registerLookup(mock) { - mock.register('GET', absoluteName, expectAsync((request) { + mock.register('GET', absoluteName, expectAsync1((request) { expect(request.body.length, 0); return mock.respond(new pubsub.Topic()..name = absoluteName); })); @@ -863,7 +869,7 @@ main() { mock.register( 'POST', 'projects/test-project/topics/test-topic:publish', - expectAsync((request) { + expectAsync1((request) { var publishRequest = new pubsub.PublishRequest.fromJson(JSON.decode(request.body)); return fn(publishRequest); @@ -875,7 +881,7 @@ main() { registerLookup(mock); var api = new PubSub(mock, PROJECT); - return api.lookupTopic(name).then(expectAsync((topic) { + return api.lookupTopic(name).then(expectAsync1((topic) { mock.clear(); registerPublish(mock, 4, ((request) { expect(request.messages.length, 1); @@ -884,17 +890,17 @@ main() { return mock.respond(new pubsub.PublishResponse()..messageIds = ['0']); })); - return topic.publishString(message).then(expectAsync((result) { + return topic.publishString(message).then(expectAsync1((result) { expect(result, isNull); - return topic.publishBytes(messageBytes).then(expectAsync((result) { + return topic.publishBytes(messageBytes).then(expectAsync1((result) { expect(result, isNull); return topic .publish(new Message.withString(message)) - .then(expectAsync((result) { + .then(expectAsync1((result) { expect(result, isNull); return topic .publish(new Message.withBytes(messageBytes)) - .then(expectAsync((result) { + .then(expectAsync1((result) { expect(result, isNull); })); })); @@ -908,7 +914,7 @@ main() { registerLookup(mock); var api = new PubSub(mock, PROJECT); - return api.lookupTopic(name).then(expectAsync((topic) { + return api.lookupTopic(name).then(expectAsync1((topic) { mock.clear(); registerPublish(mock, 4, ((request) { expect(request.messages.length, 1); @@ -921,21 +927,21 @@ main() { return topic .publishString(message, attributes: attributes) - .then(expectAsync((result) { + .then(expectAsync1((result) { expect(result, isNull); return topic .publishBytes(messageBytes, attributes: attributes) - .then(expectAsync((result) { + .then(expectAsync1((result) { expect(result, isNull); return topic .publish( new Message.withString(message, attributes: attributes)) - .then(expectAsync((result) { + .then(expectAsync1((result) { expect(result, isNull); return topic .publish(new Message.withBytes(messageBytes, attributes: attributes)) - .then(expectAsync((result) { + .then(expectAsync1((result) { expect(result, isNull); })); })); @@ -946,22 +952,22 @@ main() { test('delete', () { var mock = mockClient(); - mock.register('GET', absoluteName, expectAsync((request) { + mock.register('GET', absoluteName, expectAsync1((request) { expect(request.body.length, 0); return mock.respond(new pubsub.Topic()..name = absoluteName); })); var api = new PubSub(mock, PROJECT); - return api.lookupTopic(name).then(expectAsync((topic) { + return api.lookupTopic(name).then(expectAsync1((topic) { expect(topic.name, name); expect(topic.absoluteName, absoluteName); - mock.register('DELETE', absoluteName, expectAsync((request) { + mock.register('DELETE', absoluteName, expectAsync1((request) { expect(request.body.length, 0); return mock.respondEmpty(); })); - return topic.delete().then(expectAsync((result) { + return topic.delete().then(expectAsync1((result) { expect(result, isNull); })); })); @@ -974,22 +980,22 @@ main() { test('delete', () { var mock = mockClient(); - mock.register('GET', absoluteName, expectAsync((request) { + mock.register('GET', absoluteName, expectAsync1((request) { expect(request.body.length, 0); return mock.respond(new pubsub.Topic()..name = absoluteName); })); var api = new PubSub(mock, PROJECT); - return api.lookupSubscription(name).then(expectAsync((subscription) { + return api.lookupSubscription(name).then(expectAsync1((subscription) { expect(subscription.name, name); expect(subscription.absoluteName, absoluteName); - mock.register('DELETE', absoluteName, expectAsync((request) { + mock.register('DELETE', absoluteName, expectAsync1((request) { expect(request.body.length, 0); return mock.respondEmpty(); })); - return subscription.delete().then(expectAsync((result) { + return subscription.delete().then(expectAsync1((result) { expect(result, isNull); })); })); diff --git a/pkgs/gcloud/test/service_scope_test.dart b/pkgs/gcloud/test/service_scope_test.dart index 753677d3..6b540492 100644 --- a/pkgs/gcloud/test/service_scope_test.dart +++ b/pkgs/gcloud/test/service_scope_test.dart @@ -7,7 +7,7 @@ library gcloud.test.service_scope_test; import 'dart:async'; import 'package:gcloud/service_scope.dart' as ss; -import 'package:unittest/unittest.dart'; +import 'package:test/test.dart'; main() { test('no-service-scope', () { @@ -16,14 +16,14 @@ main() { expect(() => ss.lookup(1), throwsA(isStateError)); var c = new Completer.sync(); - ss.fork(expectAsync(() { + ss.fork(expectAsync0(() { c.complete(); return new Future.value(); })); // Assert that after fork()ing we still don't have a service scope outside // of the zone created by the fork()ing. - c.future.then(expectAsync((_) { + c.future.then(expectAsync1((_) { expect(() => ss.register(1, 'foobar'), throwsA(isStateError)); expect(() => ss.registerScopeExitCallback(() {}), throwsA(isStateError)); expect(() => ss.lookup(1), throwsA(isStateError)); @@ -31,7 +31,7 @@ main() { }); test('non-existent-key', () { - return ss.fork(expectAsync(() { + return ss.fork(expectAsync0(() { expect(ss.lookup(1), isNull); return new Future.value(); })); @@ -39,20 +39,20 @@ main() { test('fork-callback-returns-non-future', () { // The closure passed to fork() must return a future. - expect(() => ss.fork(expectAsync(() => null)), throwsA(isArgumentError)); + expect(() => ss.fork(expectAsync0(() => null)), throwsA(isArgumentError)); }); test('error-on-double-insert', () { // Ensure that inserting twice with the same key results in an error. - return ss.fork(expectAsync(() => new Future.sync(() { + return ss.fork(expectAsync0(() => new Future.sync(() { ss.register(1, 'firstValue'); expect(() => ss.register(1, 'firstValue'), throwsA(isArgumentError)); }))); }); test('only-cleanup', () { - return ss.fork(expectAsync(() => new Future.sync(() { - ss.registerScopeExitCallback(expectAsync(() {})); + return ss.fork(expectAsync0(() => new Future.sync(() { + ss.registerScopeExitCallback(expectAsync0(() {})); }))); }); @@ -60,7 +60,7 @@ main() { // Ensure cleanup functions are called in the reverse order of inserting // their entries. int insertions = 0; - return ss.fork(expectAsync(() => new Future.value(() { + return ss.fork(expectAsync0(() => new Future.value(() { int NUM = 10; for (int i = 0; i < NUM; i++) { @@ -68,7 +68,7 @@ main() { insertions++; ss.register(key, 'value$i'); - ss.registerScopeExitCallback(expectAsync(() { + ss.registerScopeExitCallback(expectAsync0(() { expect(insertions, equals(i + 1)); insertions--; })); @@ -86,21 +86,21 @@ main() { test('onion-cleanup', () { // Ensures that a cleanup method can look up things registered before it. - return ss.fork(expectAsync(() { - ss.registerScopeExitCallback(expectAsync(() { + return ss.fork(expectAsync0(() { + ss.registerScopeExitCallback(expectAsync0(() { expect(ss.lookup(1), isNull); expect(ss.lookup(2), isNull); })); ss.register(1, 'value1'); - ss.registerScopeExitCallback(expectAsync(() { + ss.registerScopeExitCallback(expectAsync0(() { expect(ss.lookup(1), equals('value1')); expect(ss.lookup(2), isNull); })); - ss.register(2, 'value2', onScopeExit: expectAsync(() { + ss.register(2, 'value2', onScopeExit: expectAsync0(() { expect(ss.lookup(1), equals('value1')); expect(ss.lookup(2), isNull); })); - ss.registerScopeExitCallback(expectAsync(() { + ss.registerScopeExitCallback(expectAsync0(() { expect(ss.lookup(1), 'value1'); expect(ss.lookup(2), 'value2'); })); @@ -126,7 +126,7 @@ main() { }); } })) - .catchError(expectAsync((e, _) { + .catchError(expectAsync2((e, _) { for (int i = 0; i < 10; i++) { expect('$e'.contains('xx${i}yy'), equals(i.isEven)); } @@ -136,13 +136,13 @@ main() { test('service-scope-destroyed-after-callback-completes', () { // Ensure that once the closure passed to fork() completes, the service // scope is destroyed. - return ss.fork(expectAsync(() => new Future.sync(() { + return ss.fork(expectAsync0(() => new Future.sync(() { var key = 1; ss.register(key, 'firstValue'); ss.registerScopeExitCallback(Zone.current.bindCallback(() { // Spawn an async task which will be run after the cleanups to ensure // the service scope got destroyed. - Timer.run(expectAsync(() { + Timer.run(expectAsync0(() { expect(() => ss.lookup(key), throwsA(isStateError)); expect(() => ss.register(2, 'value'), throwsA(isStateError)); expect(() => ss.registerScopeExitCallback(() {}), @@ -156,12 +156,12 @@ main() { test('override-parent-value', () { // Ensure that once the closure passed to fork() completes, the service // scope is destroyed. - return ss.fork(expectAsync(() => new Future.sync(() { + return ss.fork(expectAsync0(() => new Future.sync(() { var key = 1; ss.register(key, 'firstValue'); expect(ss.lookup(key), equals('firstValue')); - return ss.fork(expectAsync(() => new Future.sync(() { + return ss.fork(expectAsync0(() => new Future.sync(() { ss.register(key, 'secondValue'); expect(ss.lookup(key), equals('secondValue')); }))); @@ -171,10 +171,10 @@ main() { test('fork-onError-handler', () { // Ensure that once the closure passed to fork() completes, the service // scope is destroyed. - ss.fork(expectAsync(() { + ss.fork(expectAsync0(() { Timer.run(() => throw new StateError('foobar')); return new Future.value(); - }), onError: expectAsync((error, _) { + }), onError: expectAsync2((error, _) { expect(error, isStateError); })); }); @@ -188,19 +188,19 @@ main() { var subKey1 = 3; var subKey2 = 4; - return ss.fork(expectAsync(() { + return ss.fork(expectAsync0(() { int cleanupFork1 = 0; int cleanupFork2 = 0; ss.register(rootKey, 'root'); - ss.registerScopeExitCallback(expectAsync(() { + ss.registerScopeExitCallback(expectAsync0(() { expect(cleanupFork1, equals(2)); expect(cleanupFork2, equals(2)); })); expect(ss.lookup(rootKey), equals('root')); Future spawnChild(ownSubKey, otherSubKey, int i, cleanup) { - return ss.fork(expectAsync(() => new Future.sync(() { + return ss.fork(expectAsync0(() => new Future.sync(() { ss.register(subKey, 'fork$i'); ss.registerScopeExitCallback(cleanup); ss.register(ownSubKey, 'sub$i'); diff --git a/pkgs/gcloud/test/storage/e2e_test.dart b/pkgs/gcloud/test/storage/e2e_test.dart index df296613..2453befc 100644 --- a/pkgs/gcloud/test/storage/e2e_test.dart +++ b/pkgs/gcloud/test/storage/e2e_test.dart @@ -8,7 +8,7 @@ import 'dart:async'; import 'package:googleapis/storage/v1.dart' as storage_api; import 'package:gcloud/storage.dart'; -import 'package:unittest/unittest.dart'; +import 'package:test/test.dart'; import '../common_e2e.dart'; @@ -26,18 +26,44 @@ const int minResumableUpload = maxNormalUpload + 1; final bytesResumableUpload = new List.generate(minResumableUpload, (e) => e & 255); -runTests(Storage storage, Bucket testBucket) { +void main() { + Storage storage; + String testBucketName; + Bucket testBucket; + + setUpAll(() { + return withAuthClient(Storage.SCOPES, (String project, httpClient) { + testBucketName = generateBucketName(); + + // Share the same storage connection for all tests. + storage = new Storage(httpClient, project); + + // Create a shared bucket for all object tests. + return storage.createBucket(testBucketName).then((_) { + testBucket = storage.bucket(testBucketName); + }); + }); + }); + + tearDownAll(() { + // Deleting a bucket relies on eventually consistent behaviour, hence + // the delay in attempt to prevent test flakiness. + return new Future.delayed(STORAGE_LIST_DELAY, () { + return storage.deleteBucket(testBucketName); + }); + }); + group('bucket', () { test('create-info-delete', () { var bucketName = generateBucketName(); - return storage.createBucket(bucketName).then(expectAsync((result) { + return storage.createBucket(bucketName).then(expectAsync1((result) { expect(result, isNull); - return storage.bucketInfo(bucketName).then(expectAsync((info) { + return storage.bucketInfo(bucketName).then(expectAsync1((info) { expect(info.bucketName, bucketName); expect(info.etag, isNotNull); expect(info.created is DateTime, isTrue); expect(info.id, isNotNull); - return storage.deleteBucket(bucketName).then(expectAsync((result) { + return storage.deleteBucket(bucketName).then(expectAsync1((result) { expect(result, isNull); })); })); @@ -49,13 +75,13 @@ runTests(Storage storage, Bucket testBucket) { var bucketName = generateBucketName(); return storage .createBucket(bucketName, predefinedAcl: predefinedAcl) - .then(expectAsync((result) { + .then(expectAsync1((result) { expect(result, isNull); - return storage.bucketInfo(bucketName).then(expectAsync((info) { + return storage.bucketInfo(bucketName).then(expectAsync1((info) { var acl = info.acl; expect(info.bucketName, bucketName); expect(acl.entries.length, expectedLength); - return storage.deleteBucket(bucketName).then(expectAsync((result) { + return storage.deleteBucket(bucketName).then(expectAsync1((result) { expect(result, isNull); })); })); @@ -63,23 +89,23 @@ runTests(Storage storage, Bucket testBucket) { } return Future.forEach([ - // TODO: Figure out why some returned ACLs are empty. - () => test(PredefinedAcl.authenticatedRead, 0), - // [test, [PredefinedAcl.private, 0]], // TODO: Cannot delete. + // See documentation: https://cloud.google.com/storage/docs/access-control/lists + () => test(PredefinedAcl.authenticatedRead, 2), + () => test(PredefinedAcl.private, 1), () => test(PredefinedAcl.projectPrivate, 3), - () => test(PredefinedAcl.publicRead, 0), - () => test(PredefinedAcl.publicReadWrite, 0) - ], (f) => f().then(expectAsync((_) {}))); + () => test(PredefinedAcl.publicRead, 2), + () => test(PredefinedAcl.publicReadWrite, 2), + ], (f) => f().then(expectAsync1((_) {}))); }); test('create-error', () { - storage.createBucket('goog-reserved').catchError(expectAsync((e) { + storage.createBucket('goog-reserved').catchError(expectAsync1((e) { expect(e, isNotNull); }), test: testDetailedApiError); }); }); - solo_group('object', () { + group('object', () { // Run all object tests in the same bucket to try to avoid the rate-limit // for creating and deleting buckets while testing. Future withTestBucket(function) { @@ -91,13 +117,13 @@ runTests(Storage storage, Bucket testBucket) { test('create-read-delete', () { Future test(name, bytes) { return withTestBucket((Bucket bucket) { - return bucket.writeBytes('test', bytes).then(expectAsync((info) { + return bucket.writeBytes('test', bytes).then(expectAsync1((info) { expect(info, isNotNull); return bucket .read('test') - .fold([], (p, e) => p..addAll(e)).then(expectAsync((result) { + .fold([], (p, e) => p..addAll(e)).then(expectAsync1((result) { expect(result, bytes); - return bucket.delete('test').then(expectAsync((result) { + return bucket.delete('test').then(expectAsync1((result) { expect(result, isNull); })); })); @@ -108,7 +134,7 @@ runTests(Storage storage, Bucket testBucket) { return Future.forEach([ () => test('test-1', [1, 2, 3]), () => test('test-2', bytesResumableUpload) - ], (f) => f().then(expectAsync((_) {}))); + ], (f) => f().then(expectAsync1((_) {}))); }); test('create-with-predefined-acl-delete', () { @@ -116,14 +142,14 @@ runTests(Storage storage, Bucket testBucket) { Future test(objectName, predefinedAcl, expectedLength) { return bucket .writeBytes(objectName, [1, 2, 3], predefinedAcl: predefinedAcl) - .then(expectAsync((result) { + .then(expectAsync1((result) { expect(result, isNotNull); - return bucket.info(objectName).then(expectAsync((info) { + return bucket.info(objectName).then(expectAsync1((info) { var acl = info.metadata.acl; expect(info.name, objectName); expect(info.etag, isNotNull); expect(acl.entries.length, expectedLength); - return bucket.delete(objectName).then(expectAsync((result) { + return bucket.delete(objectName).then(expectAsync1((result) { expect(result, isNull); })); })); @@ -137,7 +163,7 @@ runTests(Storage storage, Bucket testBucket) { () => test('test-4', PredefinedAcl.publicRead, 2), () => test('test-5', PredefinedAcl.bucketOwnerFullControl, 2), () => test('test-6', PredefinedAcl.bucketOwnerRead, 2) - ], (f) => f().then(expectAsync((_) {}))); + ], (f) => f().then(expectAsync1((_) {}))); }); }); @@ -146,14 +172,14 @@ runTests(Storage storage, Bucket testBucket) { Future test(objectName, acl, expectedLength) { return bucket .writeBytes(objectName, [1, 2, 3], acl: acl) - .then(expectAsync((result) { + .then(expectAsync1((result) { expect(result, isNotNull); - return bucket.info(objectName).then(expectAsync((info) { + return bucket.info(objectName).then(expectAsync1((info) { var acl = info.metadata.acl; expect(info.name, objectName); expect(info.etag, isNotNull); expect(acl.entries.length, expectedLength); - return bucket.delete(objectName).then(expectAsync((result) { + return bucket.delete(objectName).then(expectAsync1((result) { expect(result, isNull); })); })); @@ -190,7 +216,7 @@ runTests(Storage storage, Bucket testBucket) { () => test('test-2', acl2, acl2.entries.length + 1), () => test('test-3', acl3, acl3.entries.length + 1), () => test('test-4', acl4, acl4.entries.length + 1) - ], (f) => f().then(expectAsync((_) {}))); + ], (f) => f().then(expectAsync1((_) {}))); }); }); @@ -199,9 +225,9 @@ runTests(Storage storage, Bucket testBucket) { Future test(objectName, metadata, bytes) { return bucket .writeBytes(objectName, bytes, metadata: metadata) - .then(expectAsync((result) { + .then(expectAsync1((result) { expect(result, isNotNull); - return bucket.info(objectName).then(expectAsync((info) { + return bucket.info(objectName).then(expectAsync1((info) { expect(info.name, objectName); expect(info.length, bytes.length); expect(info.updated is DateTime, isTrue); @@ -217,7 +243,7 @@ runTests(Storage storage, Bucket testBucket) { expect(info.metadata.contentEncoding, metadata.contentEncoding); expect(info.metadata.contentLanguage, metadata.contentLanguage); expect(info.metadata.custom, metadata.custom); - return bucket.delete(objectName).then(expectAsync((result) { + return bucket.delete(objectName).then(expectAsync1((result) { expect(result, isNull); })); })); @@ -238,29 +264,7 @@ runTests(Storage storage, Bucket testBucket) { () => test('test-2', metadata2, [65, 66, 67]), () => test('test-3', metadata1, bytesResumableUpload), () => test('test-4', metadata2, bytesResumableUpload) - ], (f) => f().then(expectAsync((_) {}))); - }); - }); - }); -} - -main() { - withAuthClient(Storage.SCOPES, (String project, httpClient) { - var testBucket = generateBucketName(); - - // Share the same storage connection for all tests. - var storage = new Storage(httpClient, project); - - // Create a shared bucket for all object tests. - return storage.createBucket(testBucket).then((_) { - return runE2EUnittest(() { - runTests(storage, storage.bucket(testBucket)); - }).whenComplete(() { - // Deleting a bucket relies on eventually consistent behaviour, hence - // the delay in attempt to prevent test flakiness. - return new Future.delayed(STORAGE_LIST_DELAY, () { - return storage.deleteBucket(testBucket); - }); + ], (f) => f().then(expectAsync1((_) {}))); }); }); }); diff --git a/pkgs/gcloud/test/storage/storage_test.dart b/pkgs/gcloud/test/storage/storage_test.dart index 014cdbbe..1a410d8d 100644 --- a/pkgs/gcloud/test/storage/storage_test.dart +++ b/pkgs/gcloud/test/storage/storage_test.dart @@ -8,7 +8,7 @@ import 'dart:async'; import 'dart:convert'; import 'package:http/http.dart' as http; -import 'package:unittest/unittest.dart'; +import 'package:test/test.dart'; import 'package:gcloud/storage.dart'; @@ -39,7 +39,7 @@ main() { test('create', () { withMockClient((mock, api) { - mock.register('POST', 'b', expectAsync((request) { + mock.register('POST', 'b', expectAsync1((request) { var requestBucket = new storage.Bucket.fromJson(JSON.decode(request.body)); expect(requestBucket.name, bucketName); @@ -65,7 +65,7 @@ main() { mock.register( 'POST', 'b', - expectAsync((request) { + expectAsync1((request) { var requestBucket = new storage.Bucket.fromJson(JSON.decode(request.body)); expect(requestBucket.name, bucketName); @@ -109,7 +109,7 @@ main() { mock.register( 'POST', 'b', - expectAsync((request) { + expectAsync1((request) { var requestBucket = new storage.Bucket.fromJson(JSON.decode(request.body)); expect(requestBucket.name, bucketName); @@ -171,7 +171,7 @@ main() { mock.register( 'POST', 'b', - expectAsync((request) { + expectAsync1((request) { var requestBucket = new storage.Bucket.fromJson(JSON.decode(request.body)); int predefinedIndex = count ~/ acls.length; @@ -209,7 +209,7 @@ main() { test('delete', () { withMockClient((mock, api) { mock.register('DELETE', new RegExp(r'b/[a-z/-]*$'), - expectAsync((request) { + expectAsync1((request) { expect(request.url.path, '${ROOT_PATH}b/$bucketName'); expect(request.body.length, 0); return mock.respond(new storage.Bucket()..name = bucketName); @@ -226,7 +226,7 @@ main() { mock.register( 'GET', new RegExp(r'b/[a-z/-]*$'), - expectAsync((request) { + expectAsync1((request) { expect(request.url.path, '${ROOT_PATH}b/$bucketName'); expect(request.body.length, 0); if (exists) { @@ -236,7 +236,7 @@ main() { } }, count: 2)); - return api.bucketExists(bucketName).then(expectAsync((result) { + return api.bucketExists(bucketName).then(expectAsync1((result) { expect(result, isTrue); exists = false; expect(api.bucketExists(bucketName), completion(isFalse)); @@ -246,7 +246,8 @@ main() { test('stat', () { withMockClient((mock, api) { - mock.register('GET', new RegExp(r'b/[a-z/-]*$'), expectAsync((request) { + mock.register('GET', new RegExp(r'b/[a-z/-]*$'), + expectAsync1((request) { expect(request.url.path, '${ROOT_PATH}b/$bucketName'); expect(request.body.length, 0); return mock.respond(new storage.Bucket() @@ -254,7 +255,7 @@ main() { ..timeCreated = new DateTime(2014)); })); - return api.bucketInfo(bucketName).then(expectAsync((result) { + return api.bucketInfo(bucketName).then(expectAsync1((result) { expect(result.bucketName, bucketName); expect(result.created, new DateTime(2014)); })); @@ -264,13 +265,13 @@ main() { group('list', () { test('empty', () { withMockClient((mock, api) { - mock.register('GET', 'b', expectAsync((request) { + mock.register('GET', 'b', expectAsync1((request) { expect(request.body.length, 0); return mock.respond(new storage.Buckets()); })); api.listBucketNames().listen((_) => throw 'Unexpected', - onDone: expectAsync(() => null)); + onDone: expectAsync0(() => null)); }); }); @@ -295,7 +296,7 @@ main() { withMockClient((mock, api) { mock.register( 'POST', 'b/srcBucket/o/srcObject/copyTo/b/destBucket/o/destObject', - expectAsync((request) { + expectAsync1((request) { return mock.respond(new storage.Object()..name = 'destObject'); })); expect( @@ -336,10 +337,10 @@ main() { expectNormalUpload(mock, data, objectName) { var bytes = data.fold([], (p, e) => p..addAll(e)); - mock.registerUpload('POST', 'b/$bucketName/o', expectAsync((request) { + mock.registerUpload('POST', 'b/$bucketName/o', expectAsync1((request) { return mock .processNormalMediaUpload(request) - .then(expectAsync((mediaUpload) { + .then(expectAsync1((mediaUpload) { var object = new storage.Object.fromJson(JSON.decode(mediaUpload.json)); expect(object.name, objectName); @@ -355,7 +356,7 @@ main() { expect(bytes.length, bytesResumableUpload.length); int count = 0; mock.registerResumableUpload('POST', 'b/$bucketName/o', - expectAsync((request) { + expectAsync1((request) { var requestObject = new storage.Object.fromJson(JSON.decode(request.body)); expect(requestObject.name, objectName); @@ -364,7 +365,7 @@ main() { mock.registerResumableUpload( 'PUT', 'b/$PROJECT/o', - expectAsync((request) { + expectAsync1((request) { count++; if (count == 1) { expect(request.bodyBytes.length, MB); @@ -381,31 +382,31 @@ main() { } Future pipeToSink(sink, List> data) { - sink.done.then(expectAsync(checkResult)); + sink.done.then(expectAsync1(checkResult)); sink.done.catchError((e) => throw 'Unexpected $e'); return new Stream.fromIterable(data) .pipe(sink) - .then(expectAsync(checkResult)) + .then(expectAsync1(checkResult)) .catchError((e) => throw 'Unexpected $e'); } Future addStreamToSink(sink, List> data) { - sink.done.then(expectAsync(checkResult)); + sink.done.then(expectAsync1(checkResult)); sink.done.catchError((e) => throw 'Unexpected $e'); return sink .addStream(new Stream.fromIterable(data)) .then((_) => sink.close()) - .then(expectAsync(checkResult)) + .then(expectAsync1(checkResult)) .catchError((e) => throw 'Unexpected $e'); } Future addToSink(sink, List> data) { - sink.done.then(expectAsync(checkResult)); + sink.done.then(expectAsync1(checkResult)); sink.done.catchError((e) => throw 'Unexpected $e'); data.forEach((bytes) => sink.add(bytes)); return sink .close() - .then(expectAsync(checkResult)) + .then(expectAsync1(checkResult)) .catchError((e) => throw 'Unexpected $e'); } @@ -429,11 +430,11 @@ main() { } return upload(pipeToSink, true) - .then(expectAsync((_) => upload(pipeToSink, false))) - .then(expectAsync((_) => upload(addStreamToSink, true))) - .then(expectAsync((_) => upload(addStreamToSink, false))) - .then(expectAsync((_) => upload(addToSink, true))) - .then(expectAsync((_) => upload(addToSink, false))); + .then(expectAsync1((_) => upload(pipeToSink, false))) + .then(expectAsync1((_) => upload(addStreamToSink, true))) + .then(expectAsync1((_) => upload(addStreamToSink, false))) + .then(expectAsync1((_) => upload(addToSink, true))) + .then(expectAsync1((_) => upload(addToSink, false))); } test('write-short-1', () { @@ -459,28 +460,29 @@ main() { withMockClient((mock, api) { Future test(length) { mock.clear(); - mock.registerUpload('POST', 'b/$bucketName/o', expectAsync((request) { + mock.registerUpload('POST', 'b/$bucketName/o', + expectAsync1((request) { return mock.respondError(500); })); var bucket = api.bucket(bucketName); var sink = bucket.write(bucketName, length: length); sink.done.then((_) => throw 'Unexpected').catchError( - expectAsync(expectNotNull), + expectAsync1(expectNotNull), test: testDetailedApiError); - sink.done.catchError(expectAsync(expectNotNull), + sink.done.catchError(expectAsync1(expectNotNull), test: testDetailedApiError); return new Stream.fromIterable([bytesNormalUpload]) .pipe(sink) .then((_) => throw 'Unexpected') - .catchError(expectAsync(expectNotNull), + .catchError(expectAsync1(expectNotNull), test: testDetailedApiError); } test(null) // Unknown length. - .then(expectAsync((_) => test(1))) - .then(expectAsync((_) => test(10))) - .then(expectAsync((_) => test(maxNormalUpload))); + .then(expectAsync1((_) => test(1))) + .then(expectAsync1((_) => test(10))) + .then(expectAsync1((_) => test(maxNormalUpload))); }); }); @@ -490,30 +492,30 @@ main() { Future test(length) { mock.clear(); mock.registerResumableUpload('POST', 'b/$bucketName/o', - expectAsync((request) { + expectAsync1((request) { return mock.respondInitiateResumableUpload(PROJECT); })); mock.registerResumableUpload( 'PUT', 'b/$PROJECT/o', - expectAsync((request) { + expectAsync1((request) { return mock.respondError(502); }, count: 3)); // Default 3 retries in googleapis library. var bucket = api.bucket(bucketName); var sink = bucket.write(bucketName); sink.done.then((_) => throw 'Unexpected').catchError( - expectAsync(expectNotNull), + expectAsync1(expectNotNull), test: testDetailedApiError); return new Stream.fromIterable([bytesResumableUpload]) .pipe(sink) .then((_) => throw 'Unexpected') - .catchError(expectAsync(expectNotNull), + .catchError(expectAsync1(expectNotNull), test: testDetailedApiError); } test(null) // Unknown length. - .then(expectAsync((_) => test(minResumableUpload))); + .then(expectAsync1((_) => test(minResumableUpload))); }); }); @@ -522,28 +524,28 @@ main() { Future test(data, length) { mock.clear(); mock.registerResumableUpload('POST', 'b/$bucketName/o', - expectAsync((request) { + expectAsync1((request) { return mock.respondInitiateResumableUpload(PROJECT); })); mock.registerResumableUpload('PUT', 'b/$PROJECT/o', - expectAsync((request) { + expectAsync1((request) { return mock.respondContinueResumableUpload(); })); // Default 3 retries in googleapis library. var bucket = api.bucket(bucketName); var sink = bucket.write(bucketName, length: length); sink.done.then((_) => throw 'Unexpected').catchError( - expectAsync(expectNotNull), + expectAsync1(expectNotNull), test: (e) => e is String || e is storage.ApiRequestError); return new Stream.fromIterable(data) .pipe(sink) .then((_) => throw 'Unexpected') - .catchError(expectAsync(expectNotNull), + .catchError(expectAsync1(expectNotNull), test: (e) => e is String || e is storage.ApiRequestError); } test([bytesResumableUpload], bytesResumableUpload.length + 1) - .then(expectAsync((_) => test([ + .then(expectAsync1((_) => test([ bytesResumableUpload, [1, 2] ], bytesResumableUpload.length + 1))); @@ -556,7 +558,7 @@ main() { var sink = bucket.write(bucketName); sink.done .then((_) => throw 'Unexpected') - .catchError(expectAsync(expectNotNull), test: testArgumentError); + .catchError(expectAsync1(expectNotNull), test: testArgumentError); var stream = new Stream.fromIterable([ [1, 2, 3] ]); @@ -564,7 +566,7 @@ main() { sink.addError(new ArgumentError()); sink .close() - .catchError(expectAsync(expectNotNull), test: testArgumentError); + .catchError(expectAsync1(expectNotNull), test: testArgumentError); }); }); }); @@ -572,14 +574,14 @@ main() { test('write-long-add-error', () { withMockClient((mock, api) { mock.registerResumableUpload('POST', 'b/$bucketName/o', - expectAsync((request) { + expectAsync1((request) { return mock.respondInitiateResumableUpload(PROJECT); })); // The resumable upload will buffer until either close or a full chunk, // so when we add an error the last byte is never sent. Therefore this // PUT is only called once. mock.registerResumableUpload('PUT', 'b/$PROJECT/o', - expectAsync((request) { + expectAsync1((request) { expect(request.bodyBytes.length, 1024 * 1024); return mock.respondContinueResumableUpload(); })); @@ -588,13 +590,13 @@ main() { var sink = bucket.write(bucketName); sink.done .then((_) => throw 'Unexpected') - .catchError(expectAsync(expectNotNull), test: testArgumentError); + .catchError(expectAsync1(expectNotNull), test: testArgumentError); var stream = new Stream.fromIterable([bytesResumableUpload]); sink.addStream(stream).then((_) { sink.addError(new ArgumentError()); sink .close() - .catchError(expectAsync(expectNotNull), test: testArgumentError); + .catchError(expectAsync1(expectNotNull), test: testArgumentError); }); }); }); @@ -622,10 +624,10 @@ main() { mock.registerUpload( 'POST', 'b/$bucketName/o', - expectAsync((request) { + expectAsync1((request) { return mock .processNormalMediaUpload(request) - .then(expectAsync((mediaUpload) { + .then(expectAsync1((mediaUpload) { var object = new storage.Object.fromJson(JSON.decode(mediaUpload.json)); ObjectMetadata m = metadata[count]; @@ -678,7 +680,7 @@ main() { mock.registerResumableUpload( 'POST', 'b/$bucketName/o', - expectAsync((request) { + expectAsync1((request) { var object = new storage.Object.fromJson(JSON.decode(request.body)); ObjectMetadata m = metadata[countInitial]; @@ -694,7 +696,7 @@ main() { mock.registerResumableUpload( 'PUT', 'b/$PROJECT/o', - expectAsync((request) { + expectAsync1((request) { ObjectMetadata m = metadata[countData % metadata.length]; var contentType = m.contentType != null ? m.contentType @@ -738,10 +740,10 @@ main() { mock.registerUpload( 'POST', 'b/$bucketName/o', - expectAsync((request) { + expectAsync1((request) { return mock .processNormalMediaUpload(request) - .then(expectAsync((mediaUpload) { + .then(expectAsync1((mediaUpload) { var object = new storage.Object.fromJson(JSON.decode(mediaUpload.json)); expect(object.name, objectName); @@ -790,10 +792,10 @@ main() { mock.registerUpload( 'POST', 'b/$bucketName/o', - expectAsync((request) { + expectAsync1((request) { return mock .processNormalMediaUpload(request) - .then(expectAsync((mediaUpload) { + .then(expectAsync1((mediaUpload) { var object = new storage.Object.fromJson(JSON.decode(mediaUpload.json)); expect(object.name, objectName); @@ -861,10 +863,10 @@ main() { mock.registerUpload( 'POST', 'b/$bucketName/o', - expectAsync((request) { + expectAsync1((request) { return mock .processNormalMediaUpload(request) - .then(expectAsync((mediaUpload) { + .then(expectAsync1((mediaUpload) { int predefinedIndex = count ~/ acls.length; int aclIndex = count % acls.length; var object = @@ -907,7 +909,7 @@ main() { test('success', () async { await withMockClientAsync((MockClient mock, Storage api) async { mock.register('GET', 'b/$bucketName/o/$objectName', - expectAsync(mock.respondBytes)); + expectAsync1(mock.respondBytes)); var bucket = api.bucket(bucketName); var data = []; @@ -960,7 +962,7 @@ main() { test('with length', () async { await withMockClientAsync((MockClient mock, Storage api) async { mock.register('GET', 'b/$bucketName/o/$objectName', - expectAsync(mock.respondBytes)); + expectAsync1(mock.respondBytes)); var bucket = api.bucket(bucketName); var data = []; @@ -973,7 +975,7 @@ main() { test('with offset and length', () async { await withMockClientAsync((MockClient mock, Storage api) async { mock.register('GET', 'b/$bucketName/o/$objectName', - expectAsync(mock.respondBytes)); + expectAsync1(mock.respondBytes)); var bucket = api.bucket(bucketName); var data = []; @@ -988,7 +990,7 @@ main() { test('file does not exist', () async { await withMockClientAsync((MockClient mock, Storage api) async { mock.register('GET', 'b/$bucketName/o/$objectName', - expectAsync((request) { + expectAsync1((request) { expect(request.url.queryParameters['alt'], 'media'); return mock.respondError(404); })); @@ -1008,7 +1010,7 @@ main() { test('stat', () { withMockClient((mock, api) { mock.register('GET', 'b/$bucketName/o/$objectName', - expectAsync((request) { + expectAsync1((request) { expect(request.url.queryParameters['alt'], 'json'); return mock.respond(new storage.Object() ..name = objectName @@ -1018,7 +1020,7 @@ main() { var api = new Storage(mock, PROJECT); var bucket = api.bucket(bucketName); - bucket.info(objectName).then(expectAsync((stat) { + bucket.info(objectName).then(expectAsync1((stat) { expect(stat.name, objectName); expect(stat.updated, new DateTime(2014)); expect(stat.metadata.contentType, 'mime/type'); @@ -1029,7 +1031,7 @@ main() { test('stat-acl', () { withMockClient((mock, api) { mock.register('GET', 'b/$bucketName/o/$objectName', - expectAsync((request) { + expectAsync1((request) { expect(request.url.queryParameters['alt'], 'json'); var acl1 = new storage.ObjectAccessControl(); acl1.entity = 'user-1234567890'; @@ -1047,7 +1049,7 @@ main() { var api = new Storage(mock, PROJECT); var bucket = api.bucket(bucketName); - bucket.info(objectName).then(expectAsync((ObjectInfo info) { + bucket.info(objectName).then(expectAsync1((ObjectInfo info) { expect(info.name, objectName); expect(info.metadata.acl.entries.length, 3); expect(info.metadata.acl.entries[0] is AclEntry, isTrue); @@ -1063,14 +1065,14 @@ main() { group('list', () { test('empty', () { withMockClient((mock, api) { - mock.register('GET', 'b/$bucketName/o', expectAsync((request) { + mock.register('GET', 'b/$bucketName/o', expectAsync1((request) { expect(request.body.length, 0); return mock.respond(new storage.Objects()); })); var bucket = api.bucket(bucketName); bucket.list().listen((_) => throw 'Unexpected', - onDone: expectAsync(() => null)); + onDone: expectAsync0(() => null)); }); }); From 795cfbeff94b391fab304d98e9d0eb2cb9c4d5c0 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Fri, 16 Mar 2018 08:43:01 -0700 Subject: [PATCH 107/239] Delete PATENTS Approved by Google OSS --- pkgs/gcloud/PATENTS | 23 ----------------------- 1 file changed, 23 deletions(-) delete mode 100644 pkgs/gcloud/PATENTS diff --git a/pkgs/gcloud/PATENTS b/pkgs/gcloud/PATENTS deleted file mode 100644 index 69541968..00000000 --- a/pkgs/gcloud/PATENTS +++ /dev/null @@ -1,23 +0,0 @@ -Additional IP Rights Grant (Patents) - -"This implementation" means the copyrightable works distributed by -Google as part of the Dart Project. - -Google hereby grants to you a perpetual, worldwide, non-exclusive, -no-charge, royalty-free, irrevocable (except as stated in this -section) patent license to make, have made, use, offer to sell, sell, -import, transfer, and otherwise run, modify and propagate the contents -of this implementation of Dart, where such license applies only to -those patent claims, both currently owned by Google and acquired in -the future, licensable by Google that are necessarily infringed by -this implementation of Dart. This grant does not include claims that -would be infringed only as a consequence of further modification of -this implementation. If you or your agent or exclusive licensee -institute or order or agree to the institution of patent litigation -against any entity (including a cross-claim or counterclaim in a -lawsuit) alleging that this implementation of Dart or any code -incorporated within this implementation of Dart constitutes direct or -contributory patent infringement, or inducement of patent -infringement, then any patent rights granted to you under this License -for this implementation of Dart shall terminate as of the date such -litigation is filed. From 318f89b42d5a55d28f6cdf5eff8bce43c09725f0 Mon Sep 17 00:00:00 2001 From: Jakob Andersen Date: Fri, 18 May 2018 11:05:38 +0200 Subject: [PATCH 108/239] Fix Dart 2 runtime issues. (dart-lang/gcloud#53) Upgrade to Dart 2.0.0-dev.54.0 to fix issues with mirrors in Dart 2 mode. Added lots of types. --- pkgs/gcloud/.gitignore | 3 +- pkgs/gcloud/CHANGELOG.md | 4 ++ pkgs/gcloud/lib/common.dart | 4 +- pkgs/gcloud/lib/datastore.dart | 2 +- pkgs/gcloud/lib/db.dart | 2 +- pkgs/gcloud/lib/service_scope.dart | 4 +- pkgs/gcloud/lib/src/datastore_impl.dart | 13 ++--- pkgs/gcloud/lib/src/db/annotations.dart | 11 ++-- pkgs/gcloud/lib/src/db/db.dart | 54 +++++++++---------- pkgs/gcloud/lib/src/db/model_db.dart | 16 +++--- pkgs/gcloud/lib/src/db/model_db_impl.dart | 38 ++++++------- pkgs/gcloud/lib/src/db/models.dart | 1 - pkgs/gcloud/lib/src/pubsub_impl.dart | 18 +++---- pkgs/gcloud/lib/src/storage_impl.dart | 19 +++---- pkgs/gcloud/pubspec.yaml | 8 +-- pkgs/gcloud/test/common.dart | 22 ++++---- pkgs/gcloud/test/common_e2e.dart | 2 +- .../datastore/e2e/datastore_test_impl.dart | 15 +++--- pkgs/gcloud/test/datastore/e2e/utils.dart | 6 +-- .../gcloud/test/datastore/error_matchers.dart | 2 +- pkgs/gcloud/test/db/e2e/db_test_impl.dart | 22 ++++---- pkgs/gcloud/test/db/properties_test.dart | 2 +- pkgs/gcloud/test/pubsub/pubsub_test.dart | 21 ++++---- pkgs/gcloud/test/storage/storage_test.dart | 43 ++++++++------- 24 files changed, 174 insertions(+), 158 deletions(-) diff --git a/pkgs/gcloud/.gitignore b/pkgs/gcloud/.gitignore index 794cf6c1..7903d444 100644 --- a/pkgs/gcloud/.gitignore +++ b/pkgs/gcloud/.gitignore @@ -1,4 +1,5 @@ +.dart_tool/ pubspec.lock packages .pub -.packages \ No newline at end of file +.packages diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index f18707b9..9a6645b8 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.5.0 + +* Fixes to support Dart 2. + ## 0.4.0+1 * Made a number of strong-mode improvements. diff --git a/pkgs/gcloud/lib/common.dart b/pkgs/gcloud/lib/common.dart index d8135000..c5212778 100644 --- a/pkgs/gcloud/lib/common.dart +++ b/pkgs/gcloud/lib/common.dart @@ -36,10 +36,10 @@ class StreamFromPages { bool _paused = false; bool _cancelled = false; Page _currentPage; - StreamController _controller; + StreamController _controller; StreamFromPages(this._firstPageProvider) { - _controller = new StreamController( + _controller = new StreamController( sync: true, onListen: _onListen, onPause: _onPause, diff --git a/pkgs/gcloud/lib/datastore.dart b/pkgs/gcloud/lib/datastore.dart index cbdbd2dd..9dd85b12 100644 --- a/pkgs/gcloud/lib/datastore.dart +++ b/pkgs/gcloud/lib/datastore.dart @@ -141,7 +141,7 @@ class Key { factory Key.fromParent(String kind, int id, {Key parent}) { var partition; - var elements = []; + var elements = []; if (parent != null) { partition = parent.partition; elements.addAll(parent.elements); diff --git a/pkgs/gcloud/lib/db.dart b/pkgs/gcloud/lib/db.dart index b020d3f7..3322afd5 100644 --- a/pkgs/gcloud/lib/db.dart +++ b/pkgs/gcloud/lib/db.dart @@ -15,7 +15,7 @@ import 'dart:core' as core; import 'dart:mirrors' as mirrors; import 'common.dart' show StreamFromPages; -import 'datastore.dart' as datastore; +import 'datastore.dart' as ds; import 'service_scope.dart' as ss; part 'src/db/annotations.dart'; diff --git a/pkgs/gcloud/lib/service_scope.dart b/pkgs/gcloud/lib/service_scope.dart index 754944f5..d9e5d089 100644 --- a/pkgs/gcloud/lib/service_scope.dart +++ b/pkgs/gcloud/lib/service_scope.dart @@ -122,12 +122,12 @@ void register(Object key, Object value, {onScopeExit()}) { /// /// The registered on-scope-exit functions are executed in reverse registration /// order. -Object registerScopeExitCallback(onScopeExitCallback()) { +void registerScopeExitCallback(onScopeExitCallback()) { var serviceScope = _serviceScope; if (serviceScope == null) { throw new StateError('Not running inside a service scope zone.'); } - return serviceScope.registerOnScopeExitCallback(onScopeExitCallback); + serviceScope.registerOnScopeExitCallback(onScopeExitCallback); } /// Look up an item by it's key in the currently active service scope. diff --git a/pkgs/gcloud/lib/src/datastore_impl.dart b/pkgs/gcloud/lib/src/datastore_impl.dart index b5844e4a..789a8865 100644 --- a/pkgs/gcloud/lib/src/datastore_impl.dart +++ b/pkgs/gcloud/lib/src/datastore_impl.dart @@ -162,8 +162,8 @@ class DatastoreImpl implements datastore.Datastore { } static datastore.Entity _convertApi2DatastoreEntity(api.Entity entity) { - var unindexedProperties = new Set(); - var properties = {}; + var unindexedProperties = new Set(); + var properties = {}; if (entity.properties != null) { entity.properties.forEach((String name, api.Value value) { @@ -267,7 +267,7 @@ class DatastoreImpl implements datastore.Datastore { return orders.map(_convertDatastore2ApiOrder).toList(); } - static Future _handleError(error, stack) { + static Future _handleError(error, stack) { if (error is api.DetailedApiRequestError) { if (error.status == 400) { return new Future.error( @@ -317,7 +317,7 @@ class DatastoreImpl implements datastore.Datastore { request.mode = 'NON_TRANSACTIONAL'; } - var mutations = request.mutations = []; + var mutations = request.mutations = []; if (inserts != null) { for (int i = 0; i < inserts.length; i++) { mutations.add(new api.Mutation() @@ -349,7 +349,8 @@ class DatastoreImpl implements datastore.Datastore { keys = mutationResults .skip(autoIdStartIndex) .take(autoIdInserts.length) - .map((api.MutationResult r) => _convertApi2DatastoreKey(r.key)) + .map( + (api.MutationResult r) => _convertApi2DatastoreKey(r.key)) .toList(); } return new datastore.CommitResult(keys); @@ -495,7 +496,7 @@ class QueryPageImpl implements Page { request.query.limit = batchLimit; return api.projects.runQuery(request, project).then((response) { - var returnedEntities = const []; + var returnedEntities = const []; var batch = response.batch; if (batch.entityResults != null) { diff --git a/pkgs/gcloud/lib/src/db/annotations.dart b/pkgs/gcloud/lib/src/db/annotations.dart index 92459d35..88a511e3 100644 --- a/pkgs/gcloud/lib/src/db/annotations.dart +++ b/pkgs/gcloud/lib/src/db/annotations.dart @@ -179,7 +179,7 @@ class ModelKeyProperty extends PrimitiveProperty { Object decodePrimitiveValue(ModelDB db, Object value) { if (value == null) return null; - return db.fromDatastoreKey(value as datastore.Key); + return db.fromDatastoreKey(value as ds.Key); } } @@ -201,13 +201,13 @@ class BlobProperty extends PrimitiveProperty { Object encodeValue(ModelDB db, Object value, {bool forComparison: false}) { if (value == null) return null; - return new datastore.BlobValue(value); + return new ds.BlobValue(value); } Object decodePrimitiveValue(ModelDB db, Object value) { if (value == null) return null; - return (value as datastore.BlobValue).bytes; + return (value as ds.BlobValue).bytes; } } @@ -298,4 +298,9 @@ class StringListProperty extends ListProperty { const StringListProperty({String propertyName, bool indexed: true}) : super(const StringProperty(), propertyName: propertyName, indexed: indexed); + + @override + Object decodePrimitiveValue(ModelDB db, Object value) { + return (super.decodePrimitiveValue(db, value) as core.List).cast(); + } } diff --git a/pkgs/gcloud/lib/src/db/db.dart b/pkgs/gcloud/lib/src/db/db.dart index aa738253..d463101e 100644 --- a/pkgs/gcloud/lib/src/db/db.dart +++ b/pkgs/gcloud/lib/src/db/db.dart @@ -25,7 +25,7 @@ class Transaction { static const int _TRANSACTION_COMMITTED = 2; final DatastoreDB db; - final datastore.Transaction _datastoreTransaction; + final ds.Transaction _datastoreTransaction; final List _inserts = []; final List _deletes = []; @@ -108,30 +108,30 @@ class Transaction { } class Query { - final _relationMapping = const { - '<': datastore.FilterRelation.LessThan, - '<=': datastore.FilterRelation.LessThanOrEqual, - '>': datastore.FilterRelation.GreatherThan, - '>=': datastore.FilterRelation.GreatherThanOrEqual, - '=': datastore.FilterRelation.Equal, + final _relationMapping = const { + '<': ds.FilterRelation.LessThan, + '<=': ds.FilterRelation.LessThanOrEqual, + '>': ds.FilterRelation.GreatherThan, + '>=': ds.FilterRelation.GreatherThanOrEqual, + '=': ds.FilterRelation.Equal, }; final DatastoreDB _db; - final datastore.Transaction _transaction; + final ds.Transaction _transaction; final String _kind; final Partition _partition; final Key _ancestorKey; - final List _filters = []; - final List _orders = []; + final List _filters = []; + final List _orders = []; int _offset; int _limit; Query(DatastoreDB dbImpl, Type kind, {Partition partition, Key ancestorKey, - datastore.Transaction datastoreTransaction}) + ds.Transaction datastoreTransaction}) : _db = dbImpl, _kind = dbImpl.modelDB.kindName(kind), _partition = partition, @@ -165,11 +165,11 @@ class Query { // This is for backwards compatibility: We allow [datastore.Key]s for now. // TODO: We should remove the condition in a major version update of // `package:gcloud`. - if (comparisonObject is! datastore.Key) { + if (comparisonObject is! ds.Key) { comparisonObject = _db.modelDB .toDatastoreValue(_kind, name, comparisonObject, forComparison: true); } - _filters.add(new datastore.Filter( + _filters.add(new ds.Filter( _relationMapping[comparison], propertyName, comparisonObject)); } @@ -182,11 +182,11 @@ class Query { void order(String orderString) { // TODO: validate [orderString] (e.g. is name valid) if (orderString.startsWith('-')) { - _orders.add(new datastore.Order(datastore.OrderDirection.Decending, + _orders.add(new ds.Order(ds.OrderDirection.Decending, _convertToDatastoreName(orderString.substring(1)))); } else { - _orders.add(new datastore.Order(datastore.OrderDirection.Ascending, - _convertToDatastoreName(orderString))); + _orders.add(new ds.Order( + ds.OrderDirection.Ascending, _convertToDatastoreName(orderString))); } } @@ -220,7 +220,7 @@ class Query { if (_ancestorKey != null) { ancestorKey = _db.modelDB.toDatastoreKey(_ancestorKey); } - var query = new datastore.Query( + var query = new ds.Query( ancestorKey: ancestorKey, kind: _kind, filters: _filters, @@ -230,10 +230,10 @@ class Query { var partition; if (_partition != null) { - partition = new datastore.Partition(_partition.namespace); + partition = new ds.Partition(_partition.namespace); } - return new StreamFromPages((int pageSize) { + return new StreamFromPages((int pageSize) { return _db.datastore .query(query, transaction: _transaction, partition: partition); }).stream.map(_db.modelDB.fromDatastoreEntity); @@ -254,7 +254,7 @@ class Query { } class DatastoreDB { - final datastore.Datastore datastore; + final ds.Datastore datastore; final ModelDB _modelDB; Partition _defaultPartition; @@ -356,13 +356,13 @@ class DatastoreDB { Future _commitHelper(DatastoreDB db, {List inserts, List deletes, - datastore.Transaction datastoreTransaction}) { + ds.Transaction datastoreTransaction}) { var entityInserts, entityAutoIdInserts, entityDeletes; var autoIdModelInserts; if (inserts != null) { - entityInserts = []; - entityAutoIdInserts = []; - autoIdModelInserts = []; + entityInserts = []; + entityAutoIdInserts = []; + autoIdModelInserts = []; for (var model in inserts) { // If parent was not explicitly set, we assume this model will map to @@ -388,7 +388,7 @@ Future _commitHelper(DatastoreDB db, autoIdInserts: entityAutoIdInserts, deletes: entityDeletes, transaction: datastoreTransaction) - .then((datastore.CommitResult result) { + .then((ds.CommitResult result) { if (entityAutoIdInserts != null && entityAutoIdInserts.length > 0) { for (var i = 0; i < result.autoIdInsertKeys.length; i++) { var key = db.modelDB.fromDatastoreKey(result.autoIdInsertKeys[i]); @@ -400,11 +400,11 @@ Future _commitHelper(DatastoreDB db, } Future> _lookupHelper(DatastoreDB db, List keys, - {datastore.Transaction datastoreTransaction}) { + {ds.Transaction datastoreTransaction}) { var entityKeys = keys.map(db.modelDB.toDatastoreKey).toList(); return db.datastore .lookup(entityKeys, transaction: datastoreTransaction) - .then((List entities) { + .then((List entities) { return entities.map(db.modelDB.fromDatastoreEntity).toList(); }); } diff --git a/pkgs/gcloud/lib/src/db/model_db.dart b/pkgs/gcloud/lib/src/db/model_db.dart index 63fc3e71..8feb11e7 100644 --- a/pkgs/gcloud/lib/src/db/model_db.dart +++ b/pkgs/gcloud/lib/src/db/model_db.dart @@ -11,24 +11,24 @@ part of gcloud.db; */ abstract class ModelDB { /** - * Converts a [datastore.Key] to a [Key]. + * Converts a [ds.Key] to a [Key]. */ - Key fromDatastoreKey(datastore.Key datastoreKey); + Key fromDatastoreKey(ds.Key datastoreKey); /** - * Converts a [Key] to a [datastore.Key]. + * Converts a [Key] to a [ds.Key]. */ - datastore.Key toDatastoreKey(Key dbKey); + ds.Key toDatastoreKey(Key dbKey); /** - * Converts a [Model] instance to a [datastore.Entity]. + * Converts a [Model] instance to a [ds.Entity]. */ - datastore.Entity toDatastoreEntity(Model model); + ds.Entity toDatastoreEntity(Model model); /** - * Converts a [datastore.Entity] to a [Model] instance. + * Converts a [ds.Entity] to a [Model] instance. */ - Model fromDatastoreEntity(datastore.Entity entity); + Model fromDatastoreEntity(ds.Entity entity); /** * Returns the kind name for instances of [type]. diff --git a/pkgs/gcloud/lib/src/db/model_db_impl.dart b/pkgs/gcloud/lib/src/db/model_db_impl.dart index 124b3d69..d1a016d3 100644 --- a/pkgs/gcloud/lib/src/db/model_db_impl.dart +++ b/pkgs/gcloud/lib/src/db/model_db_impl.dart @@ -56,8 +56,8 @@ class ModelDBImpl implements ModelDB { _initialize([mirrors.currentMirrorSystem().findLibrary(librarySymbol)]); } - /// Converts a [datastore.Key] to a [Key]. - Key fromDatastoreKey(datastore.Key datastoreKey) { + /// Converts a [ds.Key] to a [Key]. + Key fromDatastoreKey(ds.Key datastoreKey) { var namespace = new Partition(datastoreKey.partition.namespace); Key key = namespace.emptyKey; for (var element in datastoreKey.elements) { @@ -73,9 +73,9 @@ class ModelDBImpl implements ModelDB { return key; } - /// Converts a [Key] to a [datastore.Key]. - datastore.Key toDatastoreKey(Key dbKey) { - List elements = []; + /// Converts a [Key] to a [ds.Key]. + ds.Key toDatastoreKey(Key dbKey) { + List elements = []; var currentKey = dbKey; while (!currentKey.isEmpty) { var id = currentKey.id; @@ -94,16 +94,16 @@ class ModelDBImpl implements ModelDB { 'id was of type ${id.runtimeType}'); } - elements.add(new datastore.KeyElement(kind, id)); + elements.add(new ds.KeyElement(kind, id)); currentKey = currentKey.parent; } Partition partition = currentKey._parent; - return new datastore.Key(elements.reversed.toList(), - partition: new datastore.Partition(partition.namespace)); + return new ds.Key(elements.reversed.toList(), + partition: new ds.Partition(partition.namespace)); } - /// Converts a [Model] instance to a [datastore.Entity]. - datastore.Entity toDatastoreEntity(Model model) { + /// Converts a [Model] instance to a [ds.Entity]. + ds.Entity toDatastoreEntity(Model model) { try { var modelDescription = _modelDescriptionForType(model.runtimeType); return modelDescription.encodeModel(this, model); @@ -112,8 +112,8 @@ class ModelDBImpl implements ModelDB { } } - /// Converts a [datastore.Entity] to a [Model] instance. - Model fromDatastoreEntity(datastore.Entity entity) { + /// Converts a [ds.Entity] to a [Model] instance. + Model fromDatastoreEntity(ds.Entity entity) { if (entity == null) return null; Key key = fromDatastoreKey(entity.key); @@ -383,17 +383,17 @@ class _ModelDescription { String kindName(ModelDBImpl db) => kind; - datastore.Entity encodeModel(ModelDBImpl db, T model) { + ds.Entity encodeModel(ModelDBImpl db, T model) { var key = db.toDatastoreKey(model.key); - var properties = {}; + var properties = {}; var mirror = mirrors.reflect(model); db._propertiesForModel(this).forEach((String fieldName, Property prop) { _encodeProperty(db, model, mirror, properties, fieldName, prop); }); - return new datastore.Entity(key, properties, + return new ds.Entity(key, properties, unIndexedProperties: _unIndexedProperties); } @@ -412,7 +412,7 @@ class _ModelDescription { properties[propertyName] = prop.encodeValue(db, value); } - Model decodeEntity(ModelDBImpl db, Key key, datastore.Entity entity) { + Model decodeEntity(ModelDBImpl db, Key key, ds.Entity entity) { if (entity == null) return null; // NOTE: this assumes a default constructor for the model classes! @@ -429,7 +429,7 @@ class _ModelDescription { return mirror.reflectee; } - _decodeProperty(ModelDBImpl db, datastore.Entity entity, + _decodeProperty(ModelDBImpl db, ds.Entity entity, mirrors.InstanceMirror mirror, String fieldName, Property prop) { String propertyName = fieldNameToPropertyName(fieldName); @@ -491,7 +491,7 @@ class _ExpandoModelDescription extends _ModelDescription { usedNames = new Set()..addAll(realFieldNames)..addAll(realPropertyNames); } - datastore.Entity encodeModel(ModelDBImpl db, ExpandoModel model) { + ds.Entity encodeModel(ModelDBImpl db, ExpandoModel model) { var entity = super.encodeModel(db, model); var properties = entity.properties; model.additionalProperties.forEach((String key, Object value) { @@ -503,7 +503,7 @@ class _ExpandoModelDescription extends _ModelDescription { return entity; } - Model decodeEntity(ModelDBImpl db, Key key, datastore.Entity entity) { + Model decodeEntity(ModelDBImpl db, Key key, ds.Entity entity) { if (entity == null) return null; ExpandoModel model = super.decodeEntity(db, key, entity); diff --git a/pkgs/gcloud/lib/src/db/models.dart b/pkgs/gcloud/lib/src/db/models.dart index 84546b93..b4672aa8 100644 --- a/pkgs/gcloud/lib/src/db/models.dart +++ b/pkgs/gcloud/lib/src/db/models.dart @@ -119,7 +119,6 @@ abstract class Model { * set arbitrary fields on these models. The expanded values must be values * accepted by the [RawDatastore] implementation. */ -@proxy abstract class ExpandoModel extends Model { final Map additionalProperties = {}; diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart index 156f6992..1a34d39b 100644 --- a/pkgs/gcloud/lib/src/pubsub_impl.dart +++ b/pkgs/gcloud/lib/src/pubsub_impl.dart @@ -221,10 +221,10 @@ class _MessageImpl implements Message { : _stringMessage = null; List get asBytes => - _bytesMessage != null ? _bytesMessage : UTF8.encode(_stringMessage); + _bytesMessage != null ? _bytesMessage : utf8.encode(_stringMessage); String get asString => - _stringMessage != null ? _stringMessage : UTF8.decode(_bytesMessage); + _stringMessage != null ? _stringMessage : utf8.decode(_bytesMessage); } /// Message received using [Subscription.pull]. @@ -246,7 +246,7 @@ class _PullMessage implements Message { } String get asString { - if (_string == null) _string = UTF8.decode(_message.dataAsBytes); + if (_string == null) _string = utf8.decode(_message.dataAsBytes); return _string; } @@ -265,9 +265,9 @@ class _PushMessage implements Message { _PushMessage(this._base64Message, this.attributes); - List get asBytes => BASE64.decode(_base64Message); + List get asBytes => base64.decode(_base64Message); - String get asString => UTF8.decode(asBytes); + String get asString => utf8.decode(asBytes); } /// Pull event received from Pub/Sub pull delivery. @@ -309,14 +309,14 @@ class _PushEventImpl implements PushEvent { _PushEventImpl(this._message, this._subscriptionName); factory _PushEventImpl.fromJson(String json) { - Map body = JSON.decode(json); + Map body = jsonDecode(json); String data = body['message']['data']; - Map labels = new HashMap(); + Map labels = new HashMap(); body['message']['labels'].forEach((label) { var key = label['key']; var value = label['strValue']; if (value == null) value = label['numValue']; - labels[key] = value; + labels[key] = value.toString(); }); String subscription = body['subscription']; // TODO(#1): Remove this when the push event subscription name is prefixed @@ -353,7 +353,7 @@ class _TopicImpl implements Topic { Future delete() => _api._deleteTopic(_topic.name); Future publishString(String message, {Map attributes}) { - return _api._publish(_topic.name, UTF8.encode(message), attributes); + return _api._publish(_topic.name, utf8.encode(message), attributes); } Future publishBytes(List message, {Map attributes}) { diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index 395c84b5..90a17fe3 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -372,10 +372,10 @@ class _ObjectInfoImpl implements ObjectInfo { String get etag => _object.etag; - List get md5Hash => BASE64.decode(_object.md5Hash); + List get md5Hash => base64.decode(_object.md5Hash); int get crc32CChecksum { - var list = BASE64.decode(_object.crc32c); + var list = base64.decode(_object.crc32c); return (list[3] << 24) | (list[2] << 16) | (list[1] << 8) | list[0]; } @@ -452,7 +452,7 @@ class _ObjectMetadata implements ObjectMetadata { Map get custom { if (_object.metadata == null) return null; if (_cachedCustom == null) { - _cachedCustom = new UnmodifiableMapView(_object.metadata); + _cachedCustom = new UnmodifiableMapView(_object.metadata); } return _cachedCustom; } @@ -497,7 +497,7 @@ class _MediaUploadStreamSink implements StreamSink> { final _controller = new StreamController>(sync: true); StreamSubscription _subscription; StreamController _resumableController; - final _doneCompleter = new Completer(); + final _doneCompleter = new Completer(); static const int _STATE_LENGTH_KNOWN = 0; static const int _STATE_PROBING_LENGTH = 1; @@ -537,7 +537,7 @@ class _MediaUploadStreamSink implements StreamSink> { return _controller.addStream(stream); } - Future close() { + Future close() { _controller.close(); return _doneCompleter.future; } @@ -552,7 +552,7 @@ class _MediaUploadStreamSink implements StreamSink> { if (_bufferLength > _maxNormalUploadLength) { // Start resumable upload. // TODO: Avoid using another stream-controller. - _resumableController = new StreamController(sync: true); + _resumableController = new StreamController>(sync: true); buffer.forEach(_resumableController.add); _startResumableUpload(_resumableController.stream, _length); _state = _STATE_DECIDED_RESUMABLE; @@ -567,7 +567,8 @@ class _MediaUploadStreamSink implements StreamSink> { if (_state == _STATE_PROBING_LENGTH) { // As the data is already cached don't bother to wait on somebody // listening on the stream before adding the data. - _startNormalUpload(new Stream.fromIterable(buffer), _bufferLength); + _startNormalUpload( + new Stream>.fromIterable(buffer), _bufferLength); } else { _resumableController.close(); } @@ -591,7 +592,7 @@ class _MediaUploadStreamSink implements StreamSink> { _doneCompleter.completeError(e, s); } - void _startNormalUpload(Stream stream, int length) { + void _startNormalUpload(Stream> stream, int length) { var contentType = _object.contentType != null ? _object.contentType : 'application/octet-stream'; @@ -607,7 +608,7 @@ class _MediaUploadStreamSink implements StreamSink> { }, onError: _completeError); } - void _startResumableUpload(Stream stream, int length) { + void _startResumableUpload(Stream> stream, int length) { var contentType = _object.contentType != null ? _object.contentType : 'application/octet-stream'; diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index cff67b5c..9f4432bc 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,13 +1,13 @@ name: gcloud -version: 0.4.0+1 +version: 0.5.0 author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud environment: - sdk: '>=1.13.0 <2.0.0' + sdk: '>=2.0.0-dev.54.0 <2.0.0' dependencies: - googleapis: '>=0.50.0 <0.51.0' - googleapis_beta: '>=0.45.0 <0.46.0' + googleapis: '>=0.50.2 <1.0.0' + googleapis_beta: '>=0.45.2 <1.0.0' http: '>=0.11.0 <0.12.0' dev_dependencies: googleapis_auth: '>=0.2.3 <0.3.0' diff --git a/pkgs/gcloud/test/common.dart b/pkgs/gcloud/test/common.dart index d6003913..68de98fb 100644 --- a/pkgs/gcloud/test/common.dart +++ b/pkgs/gcloud/test/common.dart @@ -86,7 +86,7 @@ class MockClient extends http.BaseClient { Future respond(response) { return new Future.value(new http.Response( - JSON.encode(response.toJson()), 200, + jsonEncode(response.toJson()), 200, headers: RESPONSE_HEADERS)); } @@ -96,7 +96,7 @@ class MockClient extends http.BaseClient { } Future respondInitiateResumableUpload(project) { - Map headers = new Map.from(RESPONSE_HEADERS); + Map headers = new Map.from(RESPONSE_HEADERS); headers['location'] = 'https://www.googleapis.com/resumable/upload$rootPath' 'b/$project/o?uploadType=resumable&alt=json&' 'upload_id=AEnB2UqucpaWy7d5cr5iVQzmbQcQlLDIKiClrm0SAX3rJ7UN' @@ -113,7 +113,7 @@ class MockClient extends http.BaseClient { expect(request.url.queryParameters['alt'], 'media'); var myBytes = bytes; - var headers = new Map.from(RESPONSE_HEADERS); + var headers = new Map.from(RESPONSE_HEADERS); var range = request.headers['range']; if (range != null) { @@ -134,12 +134,12 @@ class MockClient extends http.BaseClient { var error = { 'error': {'code': statusCode, 'message': 'error'} }; - return new Future.value(new http.Response(JSON.encode(error), statusCode, + return new Future.value(new http.Response(jsonEncode(error), statusCode, headers: RESPONSE_HEADERS)); } - Future processNormalMediaUpload(http.Request request) { - var completer = new Completer(); + Future processNormalMediaUpload(http.Request request) { + var completer = new Completer(); var contentType = new http_parser.MediaType.parse(request.headers['content-type']); @@ -160,15 +160,15 @@ class MockClient extends http.BaseClient { // First part in the object JSON. expect(contentType, 'application/json; charset=utf-8'); mimeMultipart - .transform(UTF8.decoder) + .transform(utf8.decoder) .fold('', (p, e) => '$p$e') .then((j) => json = j); } else if (partCount == 2) { // Second part is the base64 encoded bytes. mimeMultipart - .transform(ASCII.decoder) + .transform(ascii.decoder) .fold('', (p, e) => '$p$e') - .then(BASE64.decode) + .then(base64.decode) .then((bytes) { completer.complete(new NormalMediaUpload(json, bytes, contentType)); }); @@ -200,14 +200,14 @@ class TraceClient extends http.BaseClient { print(request); return request.finalize().toBytes().then((body) { print('--- START REQUEST ---'); - print(UTF8.decode(body)); + print(utf8.decode(body)); print('--- END REQUEST ---'); var r = new RequestImpl(request.method, request.url, body); r.headers.addAll(request.headers); return client.send(r).then((http.StreamedResponse rr) { return rr.stream.toBytes().then((body) { print('--- START RESPONSE ---'); - print(UTF8.decode(body)); + print(utf8.decode(body)); print('--- END RESPONSE ---'); return new http.StreamedResponse( new http.ByteStream.fromBytes(body), rr.statusCode, diff --git a/pkgs/gcloud/test/common_e2e.dart b/pkgs/gcloud/test/common_e2e.dart index 612f4f1c..d0ba0f8f 100644 --- a/pkgs/gcloud/test/common_e2e.dart +++ b/pkgs/gcloud/test/common_e2e.dart @@ -51,7 +51,7 @@ Future serviceKeyJson(String serviceKeyLocation) { if (!serviceKeyLocation.startsWith('gs://')) { return new File(serviceKeyLocation).readAsString(); } - var future; + Future future; if (onBot()) { future = Process.run( 'python', ['third_party/gsutil/gsutil', 'cat', serviceKeyLocation], diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index 5606ea6f..1936d38a 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -45,13 +45,13 @@ Future sleep(Duration duration) { } Future> consumePages(FirstPageProvider provider) { - return new StreamFromPages(provider).stream.toList(); + return new StreamFromPages(provider).stream.toList(); } void runTests(Datastore datastore, String namespace) { Partition partition = new Partition(namespace); - Future withTransaction(Function f, {bool xg: false}) { + Future withTransaction(Function f, {bool xg: false}) { return datastore.beginTransaction(crossEntityGroup: xg).then(f); } @@ -258,7 +258,8 @@ void runTests(Datastore datastore, String namespace) { test('negative_insert_20000_entities', () async { // Maybe it should not be a [DataStoreError] here? // FIXME/TODO: This was adapted - expect(datastore.commit(inserts: named20000), throwsA(isSocketException)); + expect( + datastore.commit(inserts: named20000), throwsA(isSocketException)); }); // TODO: test invalid inserts (like entities without key, ...) @@ -542,7 +543,7 @@ void runTests(Datastore datastore, String namespace) { var changedEntities = new List(entities.length); for (int i = 0; i < entities.length; i++) { var entity = entities[i]; - var newProperties = new Map.from(entity.properties); + var newProperties = new Map.from(entity.properties); for (var prop in newProperties.keys) { newProperties[prop] = "${newProperties[prop]}conflict$value"; } @@ -762,7 +763,7 @@ void runTests(Datastore datastore, String namespace) { var orders = [new Order(OrderDirection.Decending, QUERY_KEY)]; test('query', () { - return insert(stringNamedEntities, []).then((keys) { + return insert(stringNamedEntities, []).then((keys) { return waitUntilEntitiesReady(datastore, stringNamedKeys, partition) .then((_) { var tests = [ @@ -1072,9 +1073,9 @@ Future waitUntilEntitiesGone(Datastore db, List keys, Partition p) { Future waitUntilEntitiesHelper( Datastore db, List keys, bool positive, Partition p) { - var keysByKind = {}; + var keysByKind = >{}; for (var key in keys) { - keysByKind.putIfAbsent(key.elements.last.kind, () => []).add(key); + keysByKind.putIfAbsent(key.elements.last.kind, () => []).add(key); } Future waitForKeys(String kind, List keys) { diff --git a/pkgs/gcloud/test/datastore/e2e/utils.dart b/pkgs/gcloud/test/datastore/e2e/utils.dart index 101fc740..bc7d4239 100644 --- a/pkgs/gcloud/test/datastore/e2e/utils.dart +++ b/pkgs/gcloud/test/datastore/e2e/utils.dart @@ -40,7 +40,7 @@ Map buildProperties(int i) { List buildKeys(int from, int to, {Function idFunction, String kind: TEST_KIND, Partition partition}) { - var keys = []; + var keys = []; for (var i = from; i < to; i++) { keys.add(buildKey(i, idFunction: idFunction, kind: kind, p: partition)); } @@ -49,7 +49,7 @@ List buildKeys(int from, int to, List buildEntities(int from, int to, {Function idFunction, String kind: TEST_KIND, Partition partition}) { - var entities = []; + var entities = []; var unIndexedProperties = new Set(); for (var i = from; i < to; i++) { var key = buildKey(i, idFunction: idFunction, kind: kind, p: partition); @@ -87,7 +87,7 @@ List buildEntityWithAllProperties(int from, int to, }; } - var entities = []; + var entities = []; for (var i = from; i < to; i++) { var key = buildKey(i, idFunction: (i) => 'allprop$i', kind: kind, p: partition); diff --git a/pkgs/gcloud/test/datastore/error_matchers.dart b/pkgs/gcloud/test/datastore/error_matchers.dart index be1496f0..9d0f7b1d 100644 --- a/pkgs/gcloud/test/datastore/error_matchers.dart +++ b/pkgs/gcloud/test/datastore/error_matchers.dart @@ -53,4 +53,4 @@ const isTimeoutError = const _TimeoutError(); const isInt = const _IntMatcher(); -const isSocketException = const _SocketException(); \ No newline at end of file +const isSocketException = const _SocketException(); diff --git a/pkgs/gcloud/test/db/e2e/db_test_impl.dart b/pkgs/gcloud/test/db/e2e/db_test_impl.dart index f31cde8b..4ec961cf 100644 --- a/pkgs/gcloud/test/db/e2e/db_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/db_test_impl.dart @@ -215,7 +215,7 @@ void runTests(db.DatastoreDB store, String namespace) { group('insert_lookup_delete', () { test('persons', () { var root = partition.emptyKey; - var persons = []; + var persons = []; for (var i = 1; i <= 10; i++) { persons.add(new Person() ..id = i @@ -228,7 +228,7 @@ void runTests(db.DatastoreDB store, String namespace) { }); test('users', () { var root = partition.emptyKey; - var users = []; + var users = []; for (var i = 1; i <= 10; i++) { users.add(new User() ..id = i @@ -241,9 +241,9 @@ void runTests(db.DatastoreDB store, String namespace) { }); test('expando_insert', () { var root = partition.emptyKey; - var expandoPersons = []; + var expandoPersons = []; for (var i = 1; i <= 10; i++) { - var expandoPerson = new ExpandoPerson() + dynamic expandoPerson = new ExpandoPerson() ..parentKey = root ..id = i ..name = 'user$i'; @@ -257,7 +257,7 @@ void runTests(db.DatastoreDB store, String namespace) { }); test('transactional_insert', () { var root = partition.emptyKey; - var models = []; + var models = []; models.add(new Person() ..id = 1 @@ -270,7 +270,7 @@ void runTests(db.DatastoreDB store, String namespace) { ..age = 2 ..name = 'user2' ..nickname = 'nickname2'); - var expandoPerson = new ExpandoPerson() + dynamic expandoPerson = new ExpandoPerson() ..parentKey = root ..id = 3 ..name = 'user1'; @@ -384,7 +384,7 @@ void runTests(db.DatastoreDB store, String namespace) { var root = partition.emptyKey; var users = []; for (var i = 1; i <= 10; i++) { - var languages = []; + var languages = []; if (i == 9) { languages = ['foo']; } else if (i == 10) { @@ -402,7 +402,7 @@ void runTests(db.DatastoreDB store, String namespace) { var expandoPersons = []; for (var i = 1; i <= 3; i++) { - var expandoPerson = new ExpandoPerson() + dynamic expandoPerson = new ExpandoPerson() ..parentKey = root ..id = i ..name = 'user$i' @@ -572,7 +572,7 @@ void runTests(db.DatastoreDB store, String namespace) { // Expando queries: Filter on expanded String property () async { var query = store.query(ExpandoPerson, partition: partition) - ..filter('foo =', expandoPersons.last.foo) + ..filter('foo =', (expandoPersons.last as dynamic).foo) ..run(); var models = await runQueryWithExponentialBackoff(query, 1); compareModels([expandoPersons.last], models); @@ -580,7 +580,7 @@ void runTests(db.DatastoreDB store, String namespace) { // Expando queries: Filter on expanded int property () async { var query = store.query(ExpandoPerson, partition: partition) - ..filter('bar =', expandoPersons.last.bar) + ..filter('bar =', (expandoPersons.last as dynamic).bar) ..run(); var models = await runQueryWithExponentialBackoff(query, 1); compareModels([expandoPersons.last], models); @@ -651,7 +651,7 @@ Future waitUntilEntitiesHelper(db.DatastoreDB mdb, List keys, bool positive, db.Partition partition) { var keysByKind = {}; for (var key in keys) { - keysByKind.putIfAbsent(key.type, () => []).add(key); + keysByKind.putIfAbsent(key.type, () => []).add(key); } Future waitForKeys(Type kind, List keys) { diff --git a/pkgs/gcloud/test/db/properties_test.dart b/pkgs/gcloud/test/db/properties_test.dart index 30ae3d0d..345db687 100644 --- a/pkgs/gcloud/test/db/properties_test.dart +++ b/pkgs/gcloud/test/db/properties_test.dart @@ -75,7 +75,7 @@ main() { expect(prop.validate(null, null), isTrue); expect(prop.validate(null, [1, 2]), isTrue); expect(prop.encodeValue(null, null), equals(null)); - expect((prop.encodeValue(null, []) as datastore.BlobValue).bytes, + expect((prop.encodeValue(null, []) as datastore.BlobValue).bytes, equals([])); expect((prop.encodeValue(null, [1, 2]) as datastore.BlobValue).bytes, equals([1, 2])); diff --git a/pkgs/gcloud/test/pubsub/pubsub_test.dart b/pkgs/gcloud/test/pubsub/pubsub_test.dart index 86889c6e..01bf96f9 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_test.dart @@ -50,7 +50,7 @@ main() { 'projects/$PROJECT/topics/test-topic', expectAsync1((request) { var requestTopic = - new pubsub.Topic.fromJson(JSON.decode(request.body)); + new pubsub.Topic.fromJson(jsonDecode(request.body)); expect(requestTopic.name, absoluteName); return mock.respond(new pubsub.Topic()..name = absoluteName); }, count: 2)); @@ -152,7 +152,7 @@ main() { // Mock that expect/generates [n] topics in pages of page size // [pageSize]. - registerQueryMock(mock, n, pageSize, [totalCalls]) { + registerQueryMock(MockClient mock, n, pageSize, [totalCalls]) { var totalPages = (n + pageSize - 1) ~/ pageSize; // No items still generate one request. if (totalPages == 0) totalPages = 1; @@ -428,7 +428,7 @@ main() { 'projects/$PROJECT/subscriptions', expectAsync1((request) { var requestSubscription = - new pubsub.Subscription.fromJson(JSON.decode(request.body)); + new pubsub.Subscription.fromJson(jsonDecode(request.body)); expect(requestSubscription.name, absoluteName); return mock .respond(new pubsub.Subscription()..name = absoluteName); @@ -543,7 +543,8 @@ main() { // Mock that expect/generates [n] subscriptions in pages of page size // [pageSize]. - registerQueryMock(mock, n, pageSize, {String topic, int totalCalls}) { + registerQueryMock(MockClient mock, n, pageSize, + {String topic, int totalCalls}) { var totalPages = (n + pageSize - 1) ~/ pageSize; // No items still generate one request. if (totalPages == 0) totalPages = 1; @@ -854,24 +855,24 @@ main() { var name = 'test-topic'; var absoluteName = 'projects/$PROJECT/topics/test-topic'; var message = 'Hello, world!'; - var messageBytes = UTF8.encode(message); - var messageBase64 = BASE64.encode(messageBytes); + var messageBytes = utf8.encode(message); + var messageBase64 = base64.encode(messageBytes); var attributes = {'a': '1', 'b': 'text'}; - registerLookup(mock) { + registerLookup(MockClient mock) { mock.register('GET', absoluteName, expectAsync1((request) { expect(request.body.length, 0); return mock.respond(new pubsub.Topic()..name = absoluteName); })); } - registerPublish(mock, count, fn) { + registerPublish(MockClient mock, count, fn) { mock.register( 'POST', 'projects/test-project/topics/test-topic:publish', expectAsync1((request) { var publishRequest = - new pubsub.PublishRequest.fromJson(JSON.decode(request.body)); + new pubsub.PublishRequest.fromJson(jsonDecode(request.body)); return fn(publishRequest); }, count: count)); } @@ -1053,7 +1054,7 @@ main() { '''; var event = new PushEvent.fromJson(requestBody); expect(event.message.asString, "Hello, world 30 of 50!"); - expect(event.message.attributes['messageNo'], 30); + expect(event.message.attributes['messageNo'], '30'); expect(event.message.attributes['test'], 'hello'); expect(event.subscriptionName, absoluteSubscriptionName); }); diff --git a/pkgs/gcloud/test/storage/storage_test.dart b/pkgs/gcloud/test/storage/storage_test.dart index 1a410d8d..e53e2531 100644 --- a/pkgs/gcloud/test/storage/storage_test.dart +++ b/pkgs/gcloud/test/storage/storage_test.dart @@ -39,9 +39,9 @@ main() { test('create', () { withMockClient((mock, api) { - mock.register('POST', 'b', expectAsync1((request) { + mock.register('POST', 'b', expectAsync1((http.Request request) { var requestBucket = - new storage.Bucket.fromJson(JSON.decode(request.body)); + new storage.Bucket.fromJson(jsonDecode(request.body)); expect(requestBucket.name, bucketName); return mock.respond(new storage.Bucket()..name = bucketName); })); @@ -65,9 +65,9 @@ main() { mock.register( 'POST', 'b', - expectAsync1((request) { + expectAsync1((http.Request request) { var requestBucket = - new storage.Bucket.fromJson(JSON.decode(request.body)); + new storage.Bucket.fromJson(jsonDecode(request.body)); expect(requestBucket.name, bucketName); expect(requestBucket.acl, isNull); expect(request.url.queryParameters['predefinedAcl'], @@ -109,9 +109,9 @@ main() { mock.register( 'POST', 'b', - expectAsync1((request) { + expectAsync1((http.Request request) { var requestBucket = - new storage.Bucket.fromJson(JSON.decode(request.body)); + new storage.Bucket.fromJson(jsonDecode(request.body)); expect(requestBucket.name, bucketName); expect(request.url.queryParameters['predefinedAcl'], isNull); expect(requestBucket.acl, isNotNull); @@ -171,9 +171,9 @@ main() { mock.register( 'POST', 'b', - expectAsync1((request) { + expectAsync1((http.Request request) { var requestBucket = - new storage.Bucket.fromJson(JSON.decode(request.body)); + new storage.Bucket.fromJson(jsonDecode(request.body)); int predefinedIndex = count ~/ acls.length; int aclIndex = count % acls.length; expect(requestBucket.name, bucketName); @@ -333,16 +333,19 @@ main() { bool testArgumentError(e) => e is ArgumentError; bool testDetailedApiError(e) => e is storage.DetailedApiRequestError; - Function expectNotNull(status) => (o) => expect(o, isNotNull); + final expectNotNull = (o) async { + expect(o, isNotNull); + return null; + }; - expectNormalUpload(mock, data, objectName) { + expectNormalUpload(MockClient mock, data, objectName) { var bytes = data.fold([], (p, e) => p..addAll(e)); mock.registerUpload('POST', 'b/$bucketName/o', expectAsync1((request) { return mock .processNormalMediaUpload(request) .then(expectAsync1((mediaUpload) { var object = - new storage.Object.fromJson(JSON.decode(mediaUpload.json)); + new storage.Object.fromJson(jsonDecode(mediaUpload.json)); expect(object.name, objectName); expect(mediaUpload.bytes, bytes); expect(mediaUpload.contentType, 'application/octet-stream'); @@ -351,14 +354,14 @@ main() { })); } - expectResumableUpload(mock, data, objectName) { + expectResumableUpload(MockClient mock, data, objectName) { var bytes = data.fold([], (p, e) => p..addAll(e)); expect(bytes.length, bytesResumableUpload.length); int count = 0; mock.registerResumableUpload('POST', 'b/$bucketName/o', expectAsync1((request) { var requestObject = - new storage.Object.fromJson(JSON.decode(request.body)); + new storage.Object.fromJson(jsonDecode(request.body)); expect(requestObject.name, objectName); return mock.respondInitiateResumableUpload(PROJECT); })); @@ -457,7 +460,7 @@ main() { }); test('write-short-error', () { - withMockClient((mock, api) { + withMockClient((MockClient mock, api) { Future test(length) { mock.clear(); mock.registerUpload('POST', 'b/$bucketName/o', @@ -537,7 +540,7 @@ main() { sink.done.then((_) => throw 'Unexpected').catchError( expectAsync1(expectNotNull), test: (e) => e is String || e is storage.ApiRequestError); - return new Stream.fromIterable(data) + return new Stream>.fromIterable(data) .pipe(sink) .then((_) => throw 'Unexpected') .catchError(expectAsync1(expectNotNull), @@ -629,7 +632,7 @@ main() { .processNormalMediaUpload(request) .then(expectAsync1((mediaUpload) { var object = - new storage.Object.fromJson(JSON.decode(mediaUpload.json)); + new storage.Object.fromJson(jsonDecode(mediaUpload.json)); ObjectMetadata m = metadata[count]; expect(object.name, objectName); expect(mediaUpload.bytes, bytes); @@ -682,7 +685,7 @@ main() { 'b/$bucketName/o', expectAsync1((request) { var object = - new storage.Object.fromJson(JSON.decode(request.body)); + new storage.Object.fromJson(jsonDecode(request.body)); ObjectMetadata m = metadata[countInitial]; expect(object.name, objectName); expect(object.cacheControl, m.cacheControl); @@ -745,7 +748,7 @@ main() { .processNormalMediaUpload(request) .then(expectAsync1((mediaUpload) { var object = - new storage.Object.fromJson(JSON.decode(mediaUpload.json)); + new storage.Object.fromJson(jsonDecode(mediaUpload.json)); expect(object.name, objectName); expect(mediaUpload.bytes, bytes); expect(mediaUpload.contentType, 'application/octet-stream'); @@ -797,7 +800,7 @@ main() { .processNormalMediaUpload(request) .then(expectAsync1((mediaUpload) { var object = - new storage.Object.fromJson(JSON.decode(mediaUpload.json)); + new storage.Object.fromJson(jsonDecode(mediaUpload.json)); expect(object.name, objectName); expect(mediaUpload.bytes, bytes); expect(mediaUpload.contentType, 'application/octet-stream'); @@ -870,7 +873,7 @@ main() { int predefinedIndex = count ~/ acls.length; int aclIndex = count % acls.length; var object = - new storage.Object.fromJson(JSON.decode(mediaUpload.json)); + new storage.Object.fromJson(jsonDecode(mediaUpload.json)); expect(object.name, objectName); expect(mediaUpload.bytes, bytes); expect(mediaUpload.contentType, 'application/octet-stream'); From d4e783a0bc797f533e8a308f870fb8173d3d8445 Mon Sep 17 00:00:00 2001 From: Matias Meno Date: Wed, 1 Aug 2018 14:07:29 +0200 Subject: [PATCH 109/239] Add generics support for queries and lookups (dart-lang/gcloud#55) --- pkgs/gcloud/.gitignore | 1 + pkgs/gcloud/CHANGELOG.md | 8 +++ pkgs/gcloud/README.md | 9 ++- pkgs/gcloud/analysis_options.yaml | 3 +- pkgs/gcloud/lib/common.dart | 6 +- pkgs/gcloud/lib/datastore.dart | 4 +- pkgs/gcloud/lib/db.dart | 2 +- pkgs/gcloud/lib/db/metamodel.dart | 4 +- pkgs/gcloud/lib/http.dart | 7 +- pkgs/gcloud/lib/pubsub.dart | 2 +- pkgs/gcloud/lib/service_scope.dart | 16 +++-- pkgs/gcloud/lib/src/datastore_impl.dart | 24 +++---- pkgs/gcloud/lib/src/db/annotations.dart | 4 +- pkgs/gcloud/lib/src/db/db.dart | 39 +++++------ pkgs/gcloud/lib/src/db/model_db.dart | 2 +- pkgs/gcloud/lib/src/db/model_db_impl.dart | 26 ++++---- pkgs/gcloud/lib/src/db/models.dart | 4 +- pkgs/gcloud/lib/src/pubsub_impl.dart | 12 ++-- pkgs/gcloud/lib/src/storage_impl.dart | 20 +++--- pkgs/gcloud/lib/storage.dart | 3 +- pkgs/gcloud/test/common.dart | 13 ++-- pkgs/gcloud/test/common_e2e.dart | 2 +- .../datastore/e2e/datastore_test_impl.dart | 29 ++++---- pkgs/gcloud/test/datastore/e2e/utils.dart | 3 +- .../gcloud/test/datastore/error_matchers.dart | 49 ++------------ pkgs/gcloud/test/db/e2e/db_test_impl.dart | 52 +++++++-------- .../test/db/e2e/metamodel_test_impl.dart | 10 +-- pkgs/gcloud/test/db/properties_test.dart | 4 +- pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart | 2 +- pkgs/gcloud/test/pubsub/pubsub_test.dart | 35 +++++----- pkgs/gcloud/test/service_scope_test.dart | 11 +++- pkgs/gcloud/test/storage/e2e_test.dart | 14 ++-- pkgs/gcloud/test/storage/storage_test.dart | 66 ++++++++++--------- 33 files changed, 246 insertions(+), 240 deletions(-) diff --git a/pkgs/gcloud/.gitignore b/pkgs/gcloud/.gitignore index 7903d444..96ce5399 100644 --- a/pkgs/gcloud/.gitignore +++ b/pkgs/gcloud/.gitignore @@ -3,3 +3,4 @@ pubspec.lock packages .pub .packages +.idea \ No newline at end of file diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 9a6645b8..5a518994 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,11 @@ +## 0.6.0 + +* **BREAKING CHANGE:** Add generics support. Instead of writing + `db.query(Person).run()` and getting back a generic `Stream`, you now + write `db.query().run()` and get `Stream`. + The same goes for `.lookup([key])`, which can now be written as + `.lookup([key])` and will return a `List`. + ## 0.5.0 * Fixes to support Dart 2. diff --git a/pkgs/gcloud/README.md b/pkgs/gcloud/README.md index f74b3901..abe92200 100644 --- a/pkgs/gcloud/README.md +++ b/pkgs/gcloud/README.md @@ -140,7 +140,14 @@ The function `query` is used to build a `Query` object which can be run to perform the query. ```dart -var persons = (await db.query(Person).run()).toList(); +var persons = (await db.query().run()).toList(); +``` + +To fetch one or multiple existing entities, use `lookup`. + +```dart +var person = (await db.lookup([key])).single; +var people = await db.lookup([key1, key2]); ``` NOTE: This package include a lower level API provided through the class diff --git a/pkgs/gcloud/analysis_options.yaml b/pkgs/gcloud/analysis_options.yaml index aeabf856..022bb830 100644 --- a/pkgs/gcloud/analysis_options.yaml +++ b/pkgs/gcloud/analysis_options.yaml @@ -1,5 +1,6 @@ analyzer: - strong-mode: true + strong-mode: + implicit-casts: false linter: rules: - avoid_empty_else diff --git a/pkgs/gcloud/lib/common.dart b/pkgs/gcloud/lib/common.dart index c5212778..81248910 100644 --- a/pkgs/gcloud/lib/common.dart +++ b/pkgs/gcloud/lib/common.dart @@ -31,11 +31,11 @@ typedef Future> FirstPageProvider(int pageSize); /// Helper class to turn a series of pages into a stream. class StreamFromPages { static const int _PAGE_SIZE = 50; - final FirstPageProvider _firstPageProvider; + final FirstPageProvider _firstPageProvider; bool _pendingRequest = false; bool _paused = false; bool _cancelled = false; - Page _currentPage; + Page _currentPage; StreamController _controller; StreamFromPages(this._firstPageProvider) { @@ -49,7 +49,7 @@ class StreamFromPages { Stream get stream => _controller.stream; - void _handleError(e, s) { + void _handleError(e, StackTrace s) { _controller.addError(e, s); _controller.close(); } diff --git a/pkgs/gcloud/lib/datastore.dart b/pkgs/gcloud/lib/datastore.dart index 9dd85b12..f643b16b 100644 --- a/pkgs/gcloud/lib/datastore.dart +++ b/pkgs/gcloud/lib/datastore.dart @@ -23,7 +23,7 @@ const Symbol _datastoreKey = #gcloud.datastore; /// /// Accessing this getter outside of a service scope will result in an error. /// See the `package:gcloud/service_scope.dart` library for more information. -Datastore get datastoreService => ss.lookup(_datastoreKey); +Datastore get datastoreService => ss.lookup(_datastoreKey) as Datastore; /// Registers the [Datastore] object within the current service scope. /// @@ -140,7 +140,7 @@ class Key { : this.partition = (partition == null) ? Partition.DEFAULT : partition; factory Key.fromParent(String kind, int id, {Key parent}) { - var partition; + Partition partition; var elements = []; if (parent != null) { partition = parent.partition; diff --git a/pkgs/gcloud/lib/db.dart b/pkgs/gcloud/lib/db.dart index 3322afd5..9fba24d2 100644 --- a/pkgs/gcloud/lib/db.dart +++ b/pkgs/gcloud/lib/db.dart @@ -33,7 +33,7 @@ const Symbol _dbKey = #gcloud.db; /// /// Accessing this getter outside of a service scope will result in an error. /// See the `package:gcloud/service_scope.dart` library for more information. -DatastoreDB get dbService => ss.lookup(_dbKey); +DatastoreDB get dbService => ss.lookup(_dbKey) as DatastoreDB; /// Registers the [DatastoreDB] object within the current service scope. /// diff --git a/pkgs/gcloud/lib/db/metamodel.dart b/pkgs/gcloud/lib/db/metamodel.dart index 81ff5b76..906ddae0 100644 --- a/pkgs/gcloud/lib/db/metamodel.dart +++ b/pkgs/gcloud/lib/db/metamodel.dart @@ -13,11 +13,11 @@ class Namespace extends db.ExpandoModel { String get name { // The default namespace will be reported with id 1. if (id == Namespace.EmptyNamespaceId) return null; - return id; + return id as String; } } @db.Kind(name: '__kind__') class Kind extends db.Model { - String get name => id; + String get name => id as String; } diff --git a/pkgs/gcloud/lib/http.dart b/pkgs/gcloud/lib/http.dart index 0127ab15..35b9479d 100644 --- a/pkgs/gcloud/lib/http.dart +++ b/pkgs/gcloud/lib/http.dart @@ -20,7 +20,8 @@ const Symbol _authenticatedClientKey = #gcloud.http; /// /// Accessing this getter outside of a service scope will result in an error. /// See the `package:gcloud/service_scope.dart` library for more information. -http.Client get authClientService => ss.lookup(_authenticatedClientKey); +http.Client get authClientService => + ss.lookup(_authenticatedClientKey) as http.Client; /// Registers the [http.Client] object within the current service scope. /// @@ -33,6 +34,8 @@ http.Client get authClientService => ss.lookup(_authenticatedClientKey); void registerAuthClientService(http.Client client, {bool close: true}) { ss.register(_authenticatedClientKey, client); if (close) { - ss.registerScopeExitCallback(() => client.close()); + ss.registerScopeExitCallback(() { + client.close(); + }); } } diff --git a/pkgs/gcloud/lib/pubsub.dart b/pkgs/gcloud/lib/pubsub.dart index e12119ec..3659adb1 100644 --- a/pkgs/gcloud/lib/pubsub.dart +++ b/pkgs/gcloud/lib/pubsub.dart @@ -27,7 +27,7 @@ const Symbol _pubsubKey = #gcloud.pubsub; /// /// Accessing this getter outside of a service scope will result in an error. /// See the `package:gcloud/service_scope.dart` library for more information. -PubSub get pubsubService => ss.lookup(_pubsubKey); +PubSub get pubsubService => ss.lookup(_pubsubKey) as PubSub; /// Registers the [pubsub] object within the current service scope. /// diff --git a/pkgs/gcloud/lib/service_scope.dart b/pkgs/gcloud/lib/service_scope.dart index d9e5d089..eef569be 100644 --- a/pkgs/gcloud/lib/service_scope.dart +++ b/pkgs/gcloud/lib/service_scope.dart @@ -86,7 +86,8 @@ const Symbol _ServiceScopeKey = #gcloud.service_scope; final _ServiceScope _emptyServiceScope = new _ServiceScope(); /// Returns the current [_ServiceScope] object. -_ServiceScope get _serviceScope => Zone.current[_ServiceScopeKey]; +_ServiceScope get _serviceScope => + Zone.current[_ServiceScopeKey] as _ServiceScope; /// Start a new zone with a new service scope and run [func] inside it. /// @@ -110,7 +111,7 @@ Future fork(Future func(), {Function onError}) { /// /// The registered on-scope-exit functions are executed in reverse registration /// order. -void register(Object key, Object value, {onScopeExit()}) { +void register(Object key, Object value, {ScopeExitCallback onScopeExit}) { var serviceScope = _serviceScope; if (serviceScope == null) { throw new StateError('Not running inside a service scope zone.'); @@ -122,7 +123,7 @@ void register(Object key, Object value, {onScopeExit()}) { /// /// The registered on-scope-exit functions are executed in reverse registration /// order. -void registerScopeExitCallback(onScopeExitCallback()) { +void registerScopeExitCallback(ScopeExitCallback onScopeExitCallback) { var serviceScope = _serviceScope; if (serviceScope == null) { throw new StateError('Not running inside a service scope zone.'); @@ -167,7 +168,8 @@ class _ServiceScope { /// Inserts a new item to the service scope using [serviceScopeKey]. /// /// Optionally calls a [onScopeExit] function once this service scope ends. - void register(Object serviceScopeKey, Object value, {onScopeExit()}) { + void register(Object serviceScopeKey, Object value, + {ScopeExitCallback onScopeExit}) { _ensureNotInCleaningState(); _ensureNotInDestroyingState(); @@ -187,7 +189,7 @@ class _ServiceScope { /// Inserts a new on-scope-exit function to be called once this service scope /// ends. - void registerOnScopeExitCallback(onScopeExitCallback()) { + void registerOnScopeExitCallback(ScopeExitCallback onScopeExitCallback) { _ensureNotInCleaningState(); _ensureNotInDestroyingState(); @@ -274,10 +276,12 @@ class _ServiceScope { } } +typedef Future ScopeExitCallback(); + class _RegisteredEntry { final Object key; final Object value; - final Function scopeExitCallback; + final ScopeExitCallback scopeExitCallback; _RegisteredEntry(this.key, this.value, this.scopeExitCallback); } diff --git a/pkgs/gcloud/lib/src/datastore_impl.dart b/pkgs/gcloud/lib/src/datastore_impl.dart index 789a8865..6beeacee 100644 --- a/pkgs/gcloud/lib/src/datastore_impl.dart +++ b/pkgs/gcloud/lib/src/datastore_impl.dart @@ -40,12 +40,13 @@ class DatastoreImpl implements datastore.Datastore { ..namespaceId = key.partition.namespace; apiKey.path = key.elements.map((datastore.KeyElement element) { - var part = new api.PathElement(); + final part = new api.PathElement(); part.kind = element.kind; - if (element.id is int) { - part.id = '${element.id}'; - } else if (element.id is String) { - part.name = element.id; + final id = element.id; + if (id is int) { + part.id = '$id'; + } else if (id is String) { + part.name = id; } else if (enforceId) { throw new datastore.ApplicationError( 'Error while encoding entity key: Using `null` as the id is not ' @@ -69,7 +70,7 @@ class DatastoreImpl implements datastore.Datastore { } }).toList(); - var partition; + datastore.Partition partition; if (key.partitionId != null) { partition = new datastore.Partition(key.partitionId.namespaceId); // TODO: assert projectId. @@ -267,7 +268,7 @@ class DatastoreImpl implements datastore.Datastore { return orders.map(_convertDatastore2ApiOrder).toList(); } - static Future _handleError(error, stack) { + static Future _handleError(error, StackTrace stack) { if (error is api.DetailedApiRequestError) { if (error.status == 400) { return new Future.error( @@ -340,7 +341,7 @@ class DatastoreImpl implements datastore.Datastore { } } return _api.projects.commit(request, _project).then((result) { - var keys; + List keys; if (autoIdInserts != null && autoIdInserts.length > 0) { List mutationResults = result.mutationResults; assert(autoIdStartIndex != -1); @@ -454,8 +455,7 @@ class DatastoreImpl implements datastore.Datastore { ..namespaceId = partition.namespace; } - return QueryPageImpl - .runQuery(_api, _project, request, query.limit) + return QueryPageImpl.runQuery(_api, _project, request, query.limit) .catchError(_handleError); } @@ -604,8 +604,8 @@ class QueryPageImpl implements Page { }); } - return QueryPageImpl - .runQuery(_api, _project, _nextRequest, _remainingNumberOfEntities) + return QueryPageImpl.runQuery( + _api, _project, _nextRequest, _remainingNumberOfEntities) .catchError(DatastoreImpl._handleError); } } diff --git a/pkgs/gcloud/lib/src/db/annotations.dart b/pkgs/gcloud/lib/src/db/annotations.dart index 88a511e3..b5206be3 100644 --- a/pkgs/gcloud/lib/src/db/annotations.dart +++ b/pkgs/gcloud/lib/src/db/annotations.dart @@ -174,7 +174,7 @@ class ModelKeyProperty extends PrimitiveProperty { Object encodeValue(ModelDB db, Object value, {bool forComparison: false}) { if (value == null) return null; - return db.toDatastoreKey(value); + return db.toDatastoreKey(value as Key); } Object decodePrimitiveValue(ModelDB db, Object value) { @@ -201,7 +201,7 @@ class BlobProperty extends PrimitiveProperty { Object encodeValue(ModelDB db, Object value, {bool forComparison: false}) { if (value == null) return null; - return new ds.BlobValue(value); + return new ds.BlobValue(value as List); } Object decodePrimitiveValue(ModelDB db, Object value) { diff --git a/pkgs/gcloud/lib/src/db/db.dart b/pkgs/gcloud/lib/src/db/db.dart index d463101e..e466c4a1 100644 --- a/pkgs/gcloud/lib/src/db/db.dart +++ b/pkgs/gcloud/lib/src/db/db.dart @@ -37,8 +37,9 @@ class Transaction { /** * Looks up [keys] within this transaction. */ - Future> lookup(List keys) { - return _lookupHelper(db, keys, datastoreTransaction: _datastoreTransaction); + Future> lookup(List keys) { + return _lookupHelper(db, keys, + datastoreTransaction: _datastoreTransaction); } /** @@ -60,7 +61,7 @@ class Transaction { * Note that [ancestorKey] is required, since a transaction is not allowed to * touch/look at an arbitrary number of rows. */ - Query query(Type kind, Key ancestorKey, {Partition partition}) { + Query query(Key ancestorKey, {Partition partition}) { // TODO(#25): The `partition` element is redundant and should be removed. if (partition == null) { partition = ancestorKey.partition; @@ -70,7 +71,7 @@ class Transaction { 'as the partition where the query executes in.'); } _checkSealed(); - return new Query(db, kind, + return new Query(db, partition: partition, ancestorKey: ancestorKey, datastoreTransaction: _datastoreTransaction); @@ -107,7 +108,7 @@ class Transaction { } } -class Query { +class Query { final _relationMapping = const { '<': ds.FilterRelation.LessThan, '<=': ds.FilterRelation.LessThanOrEqual, @@ -128,12 +129,12 @@ class Query { int _offset; int _limit; - Query(DatastoreDB dbImpl, Type kind, + Query(DatastoreDB dbImpl, {Partition partition, Key ancestorKey, ds.Transaction datastoreTransaction}) : _db = dbImpl, - _kind = dbImpl.modelDB.kindName(kind), + _kind = dbImpl.modelDB.kindName(T), _partition = partition, _ancestorKey = ancestorKey, _transaction = datastoreTransaction; @@ -215,8 +216,8 @@ class Query { * return the newest updates performed on the datastore since updates * will be reflected in the indices in an eventual consistent way. */ - Stream run() { - var ancestorKey; + Stream run() { + ds.Key ancestorKey; if (_ancestorKey != null) { ancestorKey = _db.modelDB.toDatastoreKey(_ancestorKey); } @@ -228,7 +229,7 @@ class Query { offset: _offset, limit: _limit); - var partition; + ds.Partition partition; if (_partition != null) { partition = new ds.Partition(_partition.namespace); } @@ -236,7 +237,7 @@ class Query { return new StreamFromPages((int pageSize) { return _db.datastore .query(query, transaction: _transaction, partition: partition); - }).stream.map(_db.modelDB.fromDatastoreEntity); + }).stream.map(_db.modelDB.fromDatastoreEntity); } // TODO: @@ -308,7 +309,7 @@ class DatastoreDB { /** * Build a query for [kind] models. */ - Query query(Type kind, {Partition partition, Key ancestorKey}) { + Query query({Partition partition, Key ancestorKey}) { // TODO(#26): There is only one case where `partition` is not redundant // Namely if `ancestorKey == null` and `partition != null`. We could // say we get rid of `partition` and enforce `ancestorKey` to @@ -324,8 +325,7 @@ class DatastoreDB { 'Ancestor queries must have the same partition in the ancestor key ' 'as the partition where the query executes in.'); } - return new Query(this, kind, - partition: partition, ancestorKey: ancestorKey); + return new Query(this, partition: partition, ancestorKey: ancestorKey); } /** @@ -334,8 +334,8 @@ class DatastoreDB { * For transactions, please use [beginTransaction] and call the [lookup] * method on it's returned [Transaction] object. */ - Future> lookup(List keys) { - return _lookupHelper(this, keys); + Future> lookup(List keys) { + return _lookupHelper(this, keys); } /** @@ -357,7 +357,8 @@ Future _commitHelper(DatastoreDB db, {List inserts, List deletes, ds.Transaction datastoreTransaction}) { - var entityInserts, entityAutoIdInserts, entityDeletes; + List entityInserts, entityAutoIdInserts; + List entityDeletes; var autoIdModelInserts; if (inserts != null) { entityInserts = []; @@ -399,12 +400,12 @@ Future _commitHelper(DatastoreDB db, }); } -Future> _lookupHelper(DatastoreDB db, List keys, +Future> _lookupHelper(DatastoreDB db, List keys, {ds.Transaction datastoreTransaction}) { var entityKeys = keys.map(db.modelDB.toDatastoreKey).toList(); return db.datastore .lookup(entityKeys, transaction: datastoreTransaction) .then((List entities) { - return entities.map(db.modelDB.fromDatastoreEntity).toList(); + return entities.map(db.modelDB.fromDatastoreEntity).toList(); }); } diff --git a/pkgs/gcloud/lib/src/db/model_db.dart b/pkgs/gcloud/lib/src/db/model_db.dart index 8feb11e7..07cd077a 100644 --- a/pkgs/gcloud/lib/src/db/model_db.dart +++ b/pkgs/gcloud/lib/src/db/model_db.dart @@ -28,7 +28,7 @@ abstract class ModelDB { /** * Converts a [ds.Entity] to a [Model] instance. */ - Model fromDatastoreEntity(ds.Entity entity); + T fromDatastoreEntity(ds.Entity entity); /** * Returns the kind name for instances of [type]. diff --git a/pkgs/gcloud/lib/src/db/model_db_impl.dart b/pkgs/gcloud/lib/src/db/model_db_impl.dart index d1a016d3..090d77de 100644 --- a/pkgs/gcloud/lib/src/db/model_db_impl.dart +++ b/pkgs/gcloud/lib/src/db/model_db_impl.dart @@ -113,7 +113,7 @@ class ModelDBImpl implements ModelDB { } /// Converts a [ds.Entity] to a [Model] instance. - Model fromDatastoreEntity(ds.Entity entity) { + T fromDatastoreEntity(ds.Entity entity) { if (entity == null) return null; Key key = fromDatastoreKey(entity.key); @@ -125,7 +125,7 @@ class ModelDBImpl implements ModelDB { } try { - return modelDescription.decodeEntity(this, key, entity); + return modelDescription.decodeEntity(this, key, entity); } catch (error, stack) { throw new StateError('Error while decoding entity ($error, $stack).'); } @@ -184,7 +184,8 @@ class ModelDBImpl implements ModelDB { void _initialize(Iterable libraries) { libraries.forEach((mirrors.LibraryMirror lm) { lm.declarations.values - .where((d) => d is mirrors.ClassMirror && d.hasReflectedType) + .whereType() + .where((d) => d.hasReflectedType) .forEach((declaration) { _tryLoadNewModelClass(declaration); }); @@ -217,7 +218,7 @@ class ModelDBImpl implements ModelDB { 'Cannot have more than one ModelMetadata() annotation ' 'on a Model class'); } - kindAnnotation = instance.reflectee; + kindAnnotation = instance.reflectee as Kind; } } @@ -242,7 +243,7 @@ class ModelDBImpl implements ModelDB { mirrors.ClassMirror modelClass, String name, bool useIntegerId) { assert(!_modelDesc2Type.containsKey(modelClass.reflectedType)); - var modelDesc; + _ModelDescription modelDesc; if (_isExpandoClass(modelClass)) { modelDesc = new _ExpandoModelDescription(name, useIntegerId); } else { @@ -289,9 +290,9 @@ class ModelDBImpl implements ModelDB { if (memberMap.containsKey(fieldSymbol) && memberMap[fieldSymbol].isGetter && decl.metadata != null) { - var propertyAnnotations = decl.metadata + final propertyAnnotations = decl.metadata .map((mirrors.InstanceMirror mirror) => mirror.reflectee) - .where((Object property) => property is Property) + .whereType() .toList(); if (propertyAnnotations.length > 1) { @@ -305,7 +306,7 @@ class ModelDBImpl implements ModelDB { var fieldName = mirrors.MirrorSystem.getName(fieldSymbol); // Determine the name to use for the property in datastore. - var propertyName = (property as Property).propertyName; + var propertyName = property.propertyName; if (propertyName == null) propertyName = fieldName; if (properties.containsKey(fieldName)) { @@ -412,7 +413,7 @@ class _ModelDescription { properties[propertyName] = prop.encodeValue(db, value); } - Model decodeEntity(ModelDBImpl db, Key key, ds.Entity entity) { + H decodeEntity(ModelDBImpl db, Key key, ds.Entity entity) { if (entity == null) return null; // NOTE: this assumes a default constructor for the model classes! @@ -426,7 +427,7 @@ class _ModelDescription { db._propertiesForModel(this).forEach((String fieldName, Property prop) { _decodeProperty(db, entity, mirror, fieldName, prop); }); - return mirror.reflectee; + return mirror.reflectee as H; } _decodeProperty(ModelDBImpl db, ds.Entity entity, @@ -503,7 +504,7 @@ class _ExpandoModelDescription extends _ModelDescription { return entity; } - Model decodeEntity(ModelDBImpl db, Key key, ds.Entity entity) { + T decodeEntity(ModelDBImpl db, Key key, ds.Entity entity) { if (entity == null) return null; ExpandoModel model = super.decodeEntity(db, key, entity); @@ -513,7 +514,8 @@ class _ExpandoModelDescription extends _ModelDescription { model.additionalProperties[key] = value; } }); - return model; + // TODO: check if there is a more elegant solution than this + return model as T; } String fieldNameToPropertyName(String fieldName) { diff --git a/pkgs/gcloud/lib/src/db/models.dart b/pkgs/gcloud/lib/src/db/models.dart index b4672aa8..b0278ef1 100644 --- a/pkgs/gcloud/lib/src/db/models.dart +++ b/pkgs/gcloud/lib/src/db/models.dart @@ -37,7 +37,7 @@ class Key { */ Key get parent { if (_parent is Key) { - return _parent; + return _parent as Key; } return null; } @@ -50,7 +50,7 @@ class Key { while (obj is! Partition) { obj = (obj as Key)._parent; } - return obj; + return obj as Partition; } Key append(Type modelType, {Object id}) { diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart index 1a34d39b..d89ba7c8 100644 --- a/pkgs/gcloud/lib/src/pubsub_impl.dart +++ b/pkgs/gcloud/lib/src/pubsub_impl.dart @@ -19,7 +19,7 @@ class _PubSubImpl implements PubSub { return name.startsWith('projects/') ? name : '${_topicPrefix}$name'; } - String _fullSubscriptionName(name) { + String _fullSubscriptionName(String name) { return name.startsWith('projects/') ? name : '${_subscriptionPrefix}$name'; } @@ -107,7 +107,7 @@ class _PubSubImpl implements PubSub { .then((_) => null); } - void _checkTopicName(name) { + void _checkTopicName(String name) { if (name.startsWith('projects/') && !name.contains('/topics/')) { throw new ArgumentError( "Illegal topic name. Absolute topic names must have the form " @@ -119,7 +119,7 @@ class _PubSubImpl implements PubSub { } } - void _checkSubscriptionName(name) { + void _checkSubscriptionName(String name) { if (name.startsWith('projects/') && !name.contains('/subscriptions/')) { throw new ArgumentError( "Illegal subscription name. Absolute subscription names must have " @@ -150,7 +150,7 @@ class _PubSubImpl implements PubSub { } Stream listTopics() { - Future> firstPage(pageSize) { + Future> firstPage(int pageSize) { return _listTopics(pageSize, null) .then((response) => new _TopicPageImpl(this, pageSize, response)); } @@ -185,7 +185,7 @@ class _PubSubImpl implements PubSub { } Stream listSubscriptions([String query]) { - Future> firstPage(pageSize) { + Future> firstPage(int pageSize) { return _listSubscriptions(query, pageSize, null).then((response) => new _SubscriptionPageImpl(this, query, pageSize, response)); } @@ -313,7 +313,7 @@ class _PushEventImpl implements PushEvent { String data = body['message']['data']; Map labels = new HashMap(); body['message']['labels'].forEach((label) { - var key = label['key']; + String key = label['key']; var value = label['strValue']; if (value == null) value = label['numValue']; labels[key] = value.toString(); diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index 90a17fe3..a190d044 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -80,7 +80,7 @@ class _StorageImpl implements Storage { } Stream listBucketNames() { - Future<_BucketPageImpl> firstPage(pageSize) { + Future<_BucketPageImpl> firstPage(int pageSize) { return _listBuckets(pageSize, null) .then((response) => new _BucketPageImpl(this, pageSize, response)); } @@ -161,7 +161,7 @@ class _BucketImpl implements Bucket { object = objectMetadata._object; // If no predefined ACL is passed use the default (if any). - var predefinedName; + String predefinedName; if (predefinedAcl != null || _defaultPredefinedObjectAcl != null) { var predefined = predefinedAcl != null ? predefinedAcl : _defaultPredefinedObjectAcl; @@ -186,7 +186,7 @@ class _BucketImpl implements Bucket { Acl acl, PredefinedAcl predefinedAcl, String contentType}) { - var sink = write(objectName, + _MediaUploadStreamSink sink = write(objectName, length: bytes.length, metadata: metadata, acl: acl, @@ -220,7 +220,7 @@ class _BucketImpl implements Bucket { options = new storage_api.PartialDownloadOptions(range); } - var media = await _api.objects + commons.Media media = await _api.objects .get(bucketName, objectName, downloadOptions: options); yield* media.stream; @@ -229,7 +229,7 @@ class _BucketImpl implements Bucket { Future info(String objectName) { return _api.objects .get(bucketName, objectName, projection: 'full') - .then((object) => new _ObjectInfoImpl(object)); + .then((object) => new _ObjectInfoImpl(object as storage_api.Object)); } Future delete(String objectName) { @@ -237,7 +237,7 @@ class _BucketImpl implements Bucket { } Stream list({String prefix}) { - Future<_ObjectPageImpl> firstPage(pageSize) { + Future<_ObjectPageImpl> firstPage(int pageSize) { return _listObjects(bucketName, prefix, _DIRECTORY_DELIMITER, 50, null) .then((response) => new _ObjectPageImpl(this, prefix, pageSize, response)); @@ -402,7 +402,7 @@ class _ObjectMetadata implements ObjectMetadata { final storage_api.Object _object; Acl _cachedAcl; ObjectGeneration _cachedGeneration; - Map _cachedCustom; + Map _cachedCustom; _ObjectMetadata( {Acl acl, @@ -496,7 +496,7 @@ class _MediaUploadStreamSink implements StreamSink> { final List> buffer = new List>(); final _controller = new StreamController>(sync: true); StreamSubscription _subscription; - StreamController _resumableController; + StreamController> _resumableController; final _doneCompleter = new Completer(); static const int _STATE_LENGTH_KNOWN = 0; @@ -574,7 +574,7 @@ class _MediaUploadStreamSink implements StreamSink> { } } - _onError(e, s) { + _onError(e, StackTrace s) { // If still deciding on the strategy complete with error. Otherwise // forward the error for default processing. if (_state == _STATE_PROBING_LENGTH) { @@ -584,7 +584,7 @@ class _MediaUploadStreamSink implements StreamSink> { } } - _completeError(e, s) { + _completeError(e, StackTrace s) { if (_state != _STATE_LENGTH_KNOWN) { // Always cancel subscription on error. _subscription.cancel(); diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index 90b96808..d3edf2f3 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -55,6 +55,7 @@ import 'dart:convert'; import 'package:http/http.dart' as http; import 'package:googleapis/storage/v1.dart' as storage_api; +import 'package:_discoveryapis_commons/_discoveryapis_commons.dart' as commons; import 'common.dart'; import 'service_scope.dart' as ss; @@ -72,7 +73,7 @@ const Symbol _storageKey = #gcloud.storage; /// /// Accessing this getter outside of a service scope will result in an error. /// See the `package:gcloud/service_scope.dart` library for more information. -Storage get storageService => ss.lookup(_storageKey); +Storage get storageService => ss.lookup(_storageKey) as Storage; /// Registers the [storage] object within the current service scope. /// diff --git a/pkgs/gcloud/test/common.dart b/pkgs/gcloud/test/common.dart index 68de98fb..0c516add 100644 --- a/pkgs/gcloud/test/common.dart +++ b/pkgs/gcloud/test/common.dart @@ -24,7 +24,7 @@ class MockClient extends http.BaseClient { final String rootPath; final Uri rootUri; - Map> mocks = {}; + Map> mocks = {}; http_testing.MockClient client; MockClient(String hostname, String rootPath) @@ -67,8 +67,9 @@ class MockClient extends http.BaseClient { throw 'No mock handler for method ${request.method} found. ' 'Request URL was: ${request.url}'; } - var mockHandler; - mocks[request.method].forEach((pattern, handler) { + http_testing.MockClientHandler mockHandler; + mocks[request.method] + .forEach((pattern, http_testing.MockClientHandler handler) { if (pattern.matchAsPrefix(path) != null) { mockHandler = handler; } @@ -96,7 +97,7 @@ class MockClient extends http.BaseClient { } Future respondInitiateResumableUpload(project) { - Map headers = new Map.from(RESPONSE_HEADERS); + final headers = new Map.from(RESPONSE_HEADERS); headers['location'] = 'https://www.googleapis.com/resumable/upload$rootPath' 'b/$project/o?uploadType=resumable&alt=json&' 'upload_id=AEnB2UqucpaWy7d5cr5iVQzmbQcQlLDIKiClrm0SAX3rJ7UN' @@ -130,7 +131,7 @@ class MockClient extends http.BaseClient { return new http.Response.bytes(myBytes, 200, headers: headers); } - Future respondError(statusCode) { + Future respondError(int statusCode) { var error = { 'error': {'code': statusCode, 'message': 'error'} }; @@ -147,7 +148,7 @@ class MockClient extends http.BaseClient { var boundary = contentType.parameters['boundary']; var partCount = 0; - var json; + String json; new Stream.fromIterable([ request.bodyBytes, [13, 10] diff --git a/pkgs/gcloud/test/common_e2e.dart b/pkgs/gcloud/test/common_e2e.dart index d0ba0f8f..d3909935 100644 --- a/pkgs/gcloud/test/common_e2e.dart +++ b/pkgs/gcloud/test/common_e2e.dart @@ -64,7 +64,7 @@ Future serviceKeyJson(String serviceKeyLocation) { if (result.exitCode != 0) { throw new Exception('Failed to run gsutil, ${result.stderr}'); } - return result.stdout; + return result.stdout.toString(); }); } diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index 1936d38a..deabb916 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -44,14 +44,14 @@ Future sleep(Duration duration) { return completer.future; } -Future> consumePages(FirstPageProvider provider) { +Future> consumePages(FirstPageProvider provider) { return new StreamFromPages(provider).stream.toList(); } void runTests(Datastore datastore, String namespace) { Partition partition = new Partition(namespace); - Future withTransaction(Function f, {bool xg: false}) { + Future withTransaction(FutureOr f(Transaction t), {bool xg: false}) { return datastore.beginTransaction(crossEntityGroup: xg).then(f); } @@ -379,7 +379,8 @@ void runTests(Datastore datastore, String namespace) { return insert([], unnamedEntities5).then((keys) { keys.forEach((key) => expect(isValidKey(key), isTrue)); return testLookup(keys, unnamedEntities5, - transactional: true, xg: true).then((_) { + transactional: true, xg: true) + .then((_) { return delete(keys); }); }); @@ -564,8 +565,7 @@ void runTests(Datastore datastore, String namespace) { for (var i = 0; i < NUM_TRANSACTIONS; i++) { transactions.add(datastore.beginTransaction(crossEntityGroup: xg)); } - return Future - .wait(transactions) + return Future.wait(transactions) .then((List transactions) { // Do a lookup for the entities in every transaction var lookups = >>[]; @@ -643,12 +643,13 @@ void runTests(Datastore datastore, String namespace) { int offset, int limit}) { return testQuery(kind, - filters: filters, - orders: orders, - transactional: transactional, - xg: xg, - offset: offset, - limit: limit).then((List entities) { + filters: filters, + orders: orders, + transactional: transactional, + xg: xg, + offset: offset, + limit: limit) + .then((List entities) { expect(entities.length, equals(expectedEntities.length)); if (correctOrder) { @@ -719,7 +720,7 @@ void runTests(Datastore datastore, String namespace) { // Reverse the order return -1 * (a.properties[QUERY_KEY] as String) - .compareTo(b.properties[QUERY_KEY]); + .compareTo(b.properties[QUERY_KEY].toString()); }; var filterFunction = (Entity entity) { @@ -1103,10 +1104,10 @@ Future waitUntilEntitiesHelper( Future main() async { Datastore datastore; - BaseClient client; + Client client; var scopes = datastore_impl.DatastoreImpl.SCOPES; - await withAuthClient(scopes, (String project, httpClient) { + await withAuthClient(scopes, (String project, Client httpClient) { datastore = new datastore_impl.DatastoreImpl(httpClient, project); client = httpClient; return cleanupDB(datastore, null); diff --git a/pkgs/gcloud/test/datastore/e2e/utils.dart b/pkgs/gcloud/test/datastore/e2e/utils.dart index bc7d4239..05664388 100644 --- a/pkgs/gcloud/test/datastore/e2e/utils.dart +++ b/pkgs/gcloud/test/datastore/e2e/utils.dart @@ -18,7 +18,8 @@ const TEST_UNINDEXED_PROPERTY = 'unindexedProp'; const TEST_BLOB_INDEXED_PROPERTY = 'blobPropertyIndexed'; final TEST_BLOB_INDEXED_VALUE = new BlobValue([0xaa, 0xaa, 0xff, 0xff]); -buildKey(int i, {Function idFunction, String kind: TEST_KIND, Partition p}) { +Key buildKey(int i, + {Function idFunction, String kind: TEST_KIND, Partition p}) { var path = [new KeyElement(kind, idFunction == null ? null : idFunction(i))]; return new Key(path, partition: p); } diff --git a/pkgs/gcloud/test/datastore/error_matchers.dart b/pkgs/gcloud/test/datastore/error_matchers.dart index 9d0f7b1d..9442e71f 100644 --- a/pkgs/gcloud/test/datastore/error_matchers.dart +++ b/pkgs/gcloud/test/datastore/error_matchers.dart @@ -9,48 +9,13 @@ import 'dart:io'; import 'package:test/test.dart'; import 'package:gcloud/datastore.dart'; -class _ApplicationError extends TypeMatcher { - const _ApplicationError() : super("ApplicationError"); - bool matches(item, Map matchState) => item is ApplicationError; -} +const isApplicationError = const TypeMatcher(); -class _DataStoreError extends TypeMatcher { - const _DataStoreError() : super("DataStoreError"); - bool matches(item, Map matchState) => item is DatastoreError; -} +const isDataStoreError = const TypeMatcher(); +const isTransactionAbortedError = const TypeMatcher(); +const isNeedIndexError = const TypeMatcher(); +const isTimeoutError = const TypeMatcher(); -class _TransactionAbortedError extends TypeMatcher { - const _TransactionAbortedError() : super("TransactionAbortedError"); - bool matches(item, Map matchState) => item is TransactionAbortedError; -} +const isInt = const TypeMatcher(); -class _NeedIndexError extends TypeMatcher { - const _NeedIndexError() : super("NeedIndexError"); - bool matches(item, Map matchState) => item is NeedIndexError; -} - -class _TimeoutError extends TypeMatcher { - const _TimeoutError() : super("TimeoutError"); - bool matches(item, Map matchState) => item is TimeoutError; -} - -class _IntMatcher extends TypeMatcher { - const _IntMatcher() : super("IntMatcher"); - bool matches(item, Map matchState) => item is int; -} - -class _SocketException extends TypeMatcher { - const _SocketException() : super("SocketException"); - bool matches(item, Map matchState) => item is SocketException; -} - -const isApplicationError = const _ApplicationError(); - -const isDataStoreError = const _DataStoreError(); -const isTransactionAbortedError = const _TransactionAbortedError(); -const isNeedIndexError = const _NeedIndexError(); -const isTimeoutError = const _TimeoutError(); - -const isInt = const _IntMatcher(); - -const isSocketException = const _SocketException(); +const isSocketException = const TypeMatcher(); diff --git a/pkgs/gcloud/test/db/e2e/db_test_impl.dart b/pkgs/gcloud/test/db/e2e/db_test_impl.dart index 4ec961cf..5d4c4092 100644 --- a/pkgs/gcloud/test/db/e2e/db_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/db_test_impl.dart @@ -67,7 +67,7 @@ class Person extends db.Model { operator ==(Object other) => sameAs(other); - sameAs(Object other) { + bool sameAs(Object other) { return other is Person && id == other.id && parentKey == other.parentKey && @@ -235,7 +235,7 @@ void runTests(db.DatastoreDB store, String namespace) { ..parentKey = root ..age = 42 + i ..name = 'user$i' - ..nickname = 'nickname${i%3}'); + ..nickname = 'nickname${i % 3}'); } return testInsertLookupDelete(users); }); @@ -251,7 +251,7 @@ void runTests(db.DatastoreDB store, String namespace) { expandoPerson.bar = i; expect(expandoPerson.additionalProperties['foo'], equals('foo$i')); expect(expandoPerson.additionalProperties['bar'], equals(i)); - expandoPersons.add(expandoPerson); + expandoPersons.add(expandoPerson as ExpandoPerson); } return testInsertLookupDelete(expandoPersons); }); @@ -289,7 +289,7 @@ void runTests(db.DatastoreDB store, String namespace) { ..parentKey = root ..age = 42 + i ..name = 'user$i' - ..nickname = 'nickname${i%3}'); + ..nickname = 'nickname${i % 3}'); } var persons = []; for (var i = 335; i <= 336; i++) { @@ -396,7 +396,7 @@ void runTests(db.DatastoreDB store, String namespace) { ..wife = root.append(User, id: 42 + i) ..age = 42 + i ..name = 'user$i' - ..nickname = 'nickname${i%3}' + ..nickname = 'nickname${i % 3}' ..languages = languages); } @@ -411,7 +411,7 @@ void runTests(db.DatastoreDB store, String namespace) { expandoPerson.bar = i; expect(expandoPerson.additionalProperties['foo'], equals('foo$i')); expect(expandoPerson.additionalProperties['bar'], equals(i)); - expandoPersons.add(expandoPerson); + expandoPersons.add(expandoPerson as ExpandoPerson); } var LOWER_BOUND = 'user2'; @@ -457,7 +457,7 @@ void runTests(db.DatastoreDB store, String namespace) { // objects. () { return store - .query(Person, partition: partition) + .query(partition: partition) .run() .toList() .then((List models) { @@ -468,7 +468,7 @@ void runTests(db.DatastoreDB store, String namespace) { // All users query () { return store - .query(User, partition: partition) + .query(partition: partition) .run() .toList() .then((List models) { @@ -478,7 +478,7 @@ void runTests(db.DatastoreDB store, String namespace) { // Sorted query () async { - var query = store.query(User, partition: partition) + var query = store.query(partition: partition) ..order('-name') ..order('nickname'); var models = await runQueryWithExponentialBackoff( @@ -486,7 +486,7 @@ void runTests(db.DatastoreDB store, String namespace) { compareModels(usersSortedNameDescNicknameAsc, models); }, () async { - var query = store.query(User, partition: partition) + var query = store.query(partition: partition) ..order('-name') ..order('-nickname') ..run(); @@ -497,7 +497,7 @@ void runTests(db.DatastoreDB store, String namespace) { // Sorted query with filter () async { - var query = store.query(User, partition: partition) + var query = store.query(partition: partition) ..filter('name >=', LOWER_BOUND) ..order('-name') ..order('nickname'); @@ -506,7 +506,7 @@ void runTests(db.DatastoreDB store, String namespace) { compareModels(usersSortedAndFilteredNameDescNicknameAsc, models); }, () async { - var query = store.query(User, partition: partition) + var query = store.query(partition: partition) ..filter('name >=', LOWER_BOUND) ..order('-name') ..order('-nickname') @@ -518,7 +518,7 @@ void runTests(db.DatastoreDB store, String namespace) { // Filter lists () async { - var query = store.query(User, partition: partition) + var query = store.query(partition: partition) ..filter('languages =', 'foo') ..order('name') ..run(); @@ -527,7 +527,7 @@ void runTests(db.DatastoreDB store, String namespace) { compareModels(fooUsers, models, anyOrder: true); }, () async { - var query = store.query(User, partition: partition) + var query = store.query(partition: partition) ..filter('languages =', 'bar') ..order('name') ..run(); @@ -539,7 +539,7 @@ void runTests(db.DatastoreDB store, String namespace) { // Filter equals () async { var wifeKey = root.append(User, id: usersWithWife.first.wife.id); - var query = store.query(User, partition: partition) + var query = store.query(partition: partition) ..filter('wife =', wifeKey) ..run(); var models = await runQueryWithExponentialBackoff( @@ -549,7 +549,7 @@ void runTests(db.DatastoreDB store, String namespace) { // Simple limit/offset test. () async { - var query = store.query(User, partition: partition) + var query = store.query(partition: partition) ..order('-name') ..order('nickname') ..offset(3) @@ -563,7 +563,7 @@ void runTests(db.DatastoreDB store, String namespace) { // Expando queries: Filter on normal property. () async { - var query = store.query(ExpandoPerson, partition: partition) + var query = store.query(partition: partition) ..filter('name =', expandoPersons.last.name) ..run(); var models = await runQueryWithExponentialBackoff(query, 1); @@ -571,7 +571,7 @@ void runTests(db.DatastoreDB store, String namespace) { }, // Expando queries: Filter on expanded String property () async { - var query = store.query(ExpandoPerson, partition: partition) + var query = store.query(partition: partition) ..filter('foo =', (expandoPersons.last as dynamic).foo) ..run(); var models = await runQueryWithExponentialBackoff(query, 1); @@ -579,7 +579,7 @@ void runTests(db.DatastoreDB store, String namespace) { }, // Expando queries: Filter on expanded int property () async { - var query = store.query(ExpandoPerson, partition: partition) + var query = store.query(partition: partition) ..filter('bar =', (expandoPersons.last as dynamic).bar) ..run(); var models = await runQueryWithExponentialBackoff(query, 1); @@ -588,7 +588,7 @@ void runTests(db.DatastoreDB store, String namespace) { // Expando queries: Filter normal property with different // propertyName (datastore name is 'NN'). () async { - var query = store.query(ExpandoPerson, partition: partition) + var query = store.query(partition: partition) ..filter('nickname =', expandoPersons.last.nickname) ..run(); var models = await runQueryWithExponentialBackoff(query, 1); @@ -649,14 +649,14 @@ Future waitUntilEntitiesGone( Future waitUntilEntitiesHelper(db.DatastoreDB mdb, List keys, bool positive, db.Partition partition) { - var keysByKind = {}; + var keysByKind = >{}; for (var key in keys) { keysByKind.putIfAbsent(key.type, () => []).add(key); } - Future waitForKeys(Type kind, List keys) { + Future waitForKeys(List keys) { return mdb - .query(kind, partition: partition) + .query(partition: partition) .run() .toList() .then((List models) { @@ -666,9 +666,9 @@ Future waitUntilEntitiesHelper(db.DatastoreDB mdb, List keys, if (key == model.key) found = true; } if (positive) { - if (!found) return waitForKeys(kind, keys); + if (!found) return waitForKeys(keys); } else { - if (found) return waitForKeys(kind, keys); + if (found) return waitForKeys(keys); } } return null; @@ -676,7 +676,7 @@ Future waitUntilEntitiesHelper(db.DatastoreDB mdb, List keys, } return Future.forEach(keysByKind.keys.toList(), (Type kind) { - return waitForKeys(kind, keysByKind[kind]); + return waitForKeys(keysByKind[kind]); }); } diff --git a/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart index f8169d7e..130cf2f5 100644 --- a/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart @@ -58,12 +58,8 @@ void runTests(datastore, db.DatastoreDB store) { return datastore.commit(inserts: entities).then((_) { return sleep(const Duration(seconds: 10)).then((_) { - var namespaceQuery = store.query(Namespace); - return namespaceQuery - .run() - .map((m) => m as Namespace) - .toList() - .then((namespaces) { + var namespaceQuery = store.query(); + return namespaceQuery.run().toList().then((namespaces) { expect(namespaces.length, greaterThanOrEqualTo(3)); expect(namespaces, contains(cond((ns) => ns.name == null))); expect( @@ -79,7 +75,7 @@ void runTests(datastore, db.DatastoreDB store) { continue; } var partition = store.newPartition(namespace.name); - var kindQuery = store.query(Kind, partition: partition); + var kindQuery = store.query(partition: partition); futures.add(kindQuery.run().toList().then((List kinds) { expect(kinds.length, greaterThanOrEqualTo(2)); if (namespace.name == null) { diff --git a/pkgs/gcloud/test/db/properties_test.dart b/pkgs/gcloud/test/db/properties_test.dart index 345db687..83944234 100644 --- a/pkgs/gcloud/test/db/properties_test.dart +++ b/pkgs/gcloud/test/db/properties_test.dart @@ -199,7 +199,7 @@ class CustomProperty extends StringProperty { Object decodePrimitiveValue(ModelDB db, Object value) { if (value == null) return null; - return new Custom()..customValue = value; + return new Custom()..customValue = value as String; } Object encodeValue(ModelDB db, Object value, {bool forComparison: false}) { @@ -243,7 +243,7 @@ class ModelDBMock implements ModelDB { } Map propertiesForModel(modelDescription) => null; - Model fromDatastoreEntity(datastore.Entity entity) => null; + T fromDatastoreEntity(datastore.Entity entity) => null; datastore.Entity toDatastoreEntity(Model model) => null; String fieldNameToPropertyName(String kind, String fieldName) => null; String kindName(Type type) => null; diff --git a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart index 187e9f1b..e2a9d23a 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart @@ -12,7 +12,7 @@ void main() { PubSub pubsub; String project; String prefix; - BaseClient client; + Client client; setUpAll(() { // Generate a unique prefix for all names generated by the tests. diff --git a/pkgs/gcloud/test/pubsub/pubsub_test.dart b/pkgs/gcloud/test/pubsub/pubsub_test.dart index 01bf96f9..e802de4b 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_test.dart @@ -5,6 +5,7 @@ import 'dart:async'; import 'dart:convert'; +import 'package:http/http.dart' as http; import 'package:test/test.dart'; import 'package:gcloud/pubsub.dart'; @@ -48,9 +49,9 @@ main() { mock.register( 'PUT', 'projects/$PROJECT/topics/test-topic', - expectAsync1((request) { + expectAsync1((http.Request request) { var requestTopic = - new pubsub.Topic.fromJson(jsonDecode(request.body)); + new pubsub.Topic.fromJson(jsonDecode(request.body) as Map); expect(requestTopic.name, absoluteName); return mock.respond(new pubsub.Topic()..name = absoluteName); }, count: 2)); @@ -152,8 +153,9 @@ main() { // Mock that expect/generates [n] topics in pages of page size // [pageSize]. - registerQueryMock(MockClient mock, n, pageSize, [totalCalls]) { - var totalPages = (n + pageSize - 1) ~/ pageSize; + registerQueryMock(MockClient mock, int n, int pageSize, + [int totalCalls]) { + int totalPages = (n + pageSize - 1) ~/ pageSize; // No items still generate one request. if (totalPages == 0) totalPages = 1; // Can pass in total calls if this mock is overwritten before all @@ -186,7 +188,7 @@ main() { } group('list', () { - Future q(count) { + Future q(int count) { var mock = mockClient(); registerQueryMock(mock, count, 50); @@ -371,7 +373,7 @@ main() { }); test('multiple', () { - runTest(n, pageSize) { + runTest(int n, int pageSize) { var totalPages = (n + pageSize - 1) ~/ pageSize; var pageCount = 0; @@ -379,7 +381,7 @@ main() { var mock = mockClient(); registerQueryMock(mock, n, pageSize); - handlePage(page) { + handlePage(Page page) { pageCount++; expect(page.isLast, pageCount == totalPages); expect(page.items.length, @@ -427,8 +429,8 @@ main() { 'PUT', 'projects/$PROJECT/subscriptions', expectAsync1((request) { - var requestSubscription = - new pubsub.Subscription.fromJson(jsonDecode(request.body)); + var requestSubscription = new pubsub.Subscription.fromJson( + jsonDecode(request.body) as Map); expect(requestSubscription.name, absoluteName); return mock .respond(new pubsub.Subscription()..name = absoluteName); @@ -543,7 +545,7 @@ main() { // Mock that expect/generates [n] subscriptions in pages of page size // [pageSize]. - registerQueryMock(MockClient mock, n, pageSize, + registerQueryMock(MockClient mock, int n, int pageSize, {String topic, int totalCalls}) { var totalPages = (n + pageSize - 1) ~/ pageSize; // No items still generate one request. @@ -579,7 +581,7 @@ main() { } group('list', () { - Future q(topic, count) { + Future q(String topic, int count) { var mock = mockClient(); registerQueryMock(mock, count, 50, topic: topic); @@ -793,7 +795,7 @@ main() { singleTest('topic'); }); - multipleTest(n, pageSize, topic) { + multipleTest(int n, int pageSize, String topic) { var totalPages = (n + pageSize - 1) ~/ pageSize; var pageCount = 0; @@ -801,7 +803,7 @@ main() { var mock = mockClient(); registerQueryMock(mock, n, pageSize, topic: topic); - handlingPage(page) { + handlingPage(Page page) { pageCount++; expect(page.isLast, pageCount == totalPages); expect(page.items.length, @@ -866,13 +868,14 @@ main() { })); } - registerPublish(MockClient mock, count, fn) { + registerPublish( + MockClient mock, int count, Future fn(request)) { mock.register( 'POST', 'projects/test-project/topics/test-topic:publish', expectAsync1((request) { - var publishRequest = - new pubsub.PublishRequest.fromJson(jsonDecode(request.body)); + var publishRequest = new pubsub.PublishRequest.fromJson( + jsonDecode(request.body) as Map); return fn(publishRequest); }, count: count)); } diff --git a/pkgs/gcloud/test/service_scope_test.dart b/pkgs/gcloud/test/service_scope_test.dart index 6b540492..65bafb49 100644 --- a/pkgs/gcloud/test/service_scope_test.dart +++ b/pkgs/gcloud/test/service_scope_test.dart @@ -199,7 +199,8 @@ main() { })); expect(ss.lookup(rootKey), equals('root')); - Future spawnChild(ownSubKey, otherSubKey, int i, cleanup) { + Future spawnChild( + ownSubKey, otherSubKey, int i, ss.ScopeExitCallback cleanup) { return ss.fork(expectAsync0(() => new Future.sync(() { ss.register(subKey, 'fork$i'); ss.registerScopeExitCallback(cleanup); @@ -214,8 +215,12 @@ main() { } return Future.wait([ - spawnChild(subKey1, subKey2, 1, () => cleanupFork1++), - spawnChild(subKey2, subKey1, 2, () => cleanupFork2++), + spawnChild(subKey1, subKey2, 1, () { + cleanupFork1++; + }), + spawnChild(subKey2, subKey1, 2, () { + cleanupFork2++; + }), ]); })); }); diff --git a/pkgs/gcloud/test/storage/e2e_test.dart b/pkgs/gcloud/test/storage/e2e_test.dart index 2453befc..c08e499c 100644 --- a/pkgs/gcloud/test/storage/e2e_test.dart +++ b/pkgs/gcloud/test/storage/e2e_test.dart @@ -71,7 +71,7 @@ void main() { }); test('create-with-predefined-acl-delete', () { - Future test(predefinedAcl, expectedLength) { + Future test(PredefinedAcl predefinedAcl, expectedLength) { var bucketName = generateBucketName(); return storage .createBucket(bucketName, predefinedAcl: predefinedAcl) @@ -108,14 +108,14 @@ void main() { group('object', () { // Run all object tests in the same bucket to try to avoid the rate-limit // for creating and deleting buckets while testing. - Future withTestBucket(function) { + Future withTestBucket(Future function(Bucket bucket)) { return function(testBucket).whenComplete(() { // TODO: Clean the bucket. }); } test('create-read-delete', () { - Future test(name, bytes) { + Future test(name, List bytes) { return withTestBucket((Bucket bucket) { return bucket.writeBytes('test', bytes).then(expectAsync1((info) { expect(info, isNotNull); @@ -139,7 +139,8 @@ void main() { test('create-with-predefined-acl-delete', () { return withTestBucket((Bucket bucket) { - Future test(objectName, predefinedAcl, expectedLength) { + Future test( + String objectName, PredefinedAcl predefinedAcl, expectedLength) { return bucket .writeBytes(objectName, [1, 2, 3], predefinedAcl: predefinedAcl) .then(expectAsync1((result) { @@ -169,7 +170,7 @@ void main() { test('create-with-acl-delete', () { return withTestBucket((Bucket bucket) { - Future test(objectName, acl, expectedLength) { + Future test(String objectName, Acl acl, expectedLength) { return bucket .writeBytes(objectName, [1, 2, 3], acl: acl) .then(expectAsync1((result) { @@ -222,7 +223,8 @@ void main() { test('create-with-metadata-delete', () { return withTestBucket((Bucket bucket) { - Future test(objectName, metadata, bytes) { + Future test( + String objectName, ObjectMetadata metadata, List bytes) { return bucket .writeBytes(objectName, bytes, metadata: metadata) .then(expectAsync1((result) { diff --git a/pkgs/gcloud/test/storage/storage_test.dart b/pkgs/gcloud/test/storage/storage_test.dart index e53e2531..16af7b89 100644 --- a/pkgs/gcloud/test/storage/storage_test.dart +++ b/pkgs/gcloud/test/storage/storage_test.dart @@ -20,7 +20,7 @@ import '../common_e2e.dart'; const String HOSTNAME = 'www.googleapis.com'; const String ROOT_PATH = '/storage/v1/'; -http.Client mockClient() => new MockClient(HOSTNAME, ROOT_PATH); +MockClient mockClient() => new MockClient(HOSTNAME, ROOT_PATH); withMockClient(function(MockClient client, Storage storage)) { var mock = mockClient(); @@ -41,7 +41,7 @@ main() { withMockClient((mock, api) { mock.register('POST', 'b', expectAsync1((http.Request request) { var requestBucket = - new storage.Bucket.fromJson(jsonDecode(request.body)); + new storage.Bucket.fromJson(jsonDecode(request.body) as Map); expect(requestBucket.name, bucketName); return mock.respond(new storage.Bucket()..name = bucketName); })); @@ -67,7 +67,7 @@ main() { 'b', expectAsync1((http.Request request) { var requestBucket = - new storage.Bucket.fromJson(jsonDecode(request.body)); + new storage.Bucket.fromJson(jsonDecode(request.body) as Map); expect(requestBucket.name, bucketName); expect(requestBucket.acl, isNull); expect(request.url.queryParameters['predefinedAcl'], @@ -77,8 +77,8 @@ main() { var futures = []; for (int i = 0; i < predefined.length; i++) { - futures.add( - api.createBucket(bucketName, predefinedAcl: predefined[i][0])); + futures.add(api.createBucket(bucketName, + predefinedAcl: predefined[i][0] as PredefinedAcl)); } return Future.wait(futures); }); @@ -111,7 +111,7 @@ main() { 'b', expectAsync1((http.Request request) { var requestBucket = - new storage.Bucket.fromJson(jsonDecode(request.body)); + new storage.Bucket.fromJson(jsonDecode(request.body) as Map); expect(requestBucket.name, bucketName); expect(request.url.queryParameters['predefinedAcl'], isNull); expect(requestBucket.acl, isNotNull); @@ -173,7 +173,7 @@ main() { 'b', expectAsync1((http.Request request) { var requestBucket = - new storage.Bucket.fromJson(jsonDecode(request.body)); + new storage.Bucket.fromJson(jsonDecode(request.body) as Map); int predefinedIndex = count ~/ acls.length; int aclIndex = count % acls.length; expect(requestBucket.name, bucketName); @@ -199,7 +199,8 @@ main() { for (int i = 0; i < predefined.length; i++) { for (int j = 0; j < acls.length; j++) { futures.add(api.createBucket(bucketName, - predefinedAcl: predefined[i][0], acl: acls[j])); + predefinedAcl: predefined[i][0] as PredefinedAcl, + acl: acls[j])); } } return Future.wait(futures); @@ -338,14 +339,14 @@ main() { return null; }; - expectNormalUpload(MockClient mock, data, objectName) { + expectNormalUpload(MockClient mock, data, String objectName) { var bytes = data.fold([], (p, e) => p..addAll(e)); mock.registerUpload('POST', 'b/$bucketName/o', expectAsync1((request) { return mock .processNormalMediaUpload(request) .then(expectAsync1((mediaUpload) { var object = - new storage.Object.fromJson(jsonDecode(mediaUpload.json)); + new storage.Object.fromJson(jsonDecode(mediaUpload.json) as Map); expect(object.name, objectName); expect(mediaUpload.bytes, bytes); expect(mediaUpload.contentType, 'application/octet-stream'); @@ -354,14 +355,14 @@ main() { })); } - expectResumableUpload(MockClient mock, data, objectName) { + expectResumableUpload(MockClient mock, data, String objectName) { var bytes = data.fold([], (p, e) => p..addAll(e)); expect(bytes.length, bytesResumableUpload.length); int count = 0; mock.registerResumableUpload('POST', 'b/$bucketName/o', expectAsync1((request) { var requestObject = - new storage.Object.fromJson(jsonDecode(request.body)); + new storage.Object.fromJson(jsonDecode(request.body) as Map); expect(requestObject.name, objectName); return mock.respondInitiateResumableUpload(PROJECT); })); @@ -384,7 +385,7 @@ main() { expect(result.name, objectName); } - Future pipeToSink(sink, List> data) { + Future pipeToSink(StreamSink> sink, List> data) { sink.done.then(expectAsync1(checkResult)); sink.done.catchError((e) => throw 'Unexpected $e'); return new Stream.fromIterable(data) @@ -393,7 +394,7 @@ main() { .catchError((e) => throw 'Unexpected $e'); } - Future addStreamToSink(sink, List> data) { + Future addStreamToSink(StreamSink> sink, List> data) { sink.done.then(expectAsync1(checkResult)); sink.done.catchError((e) => throw 'Unexpected $e'); return sink @@ -403,7 +404,7 @@ main() { .catchError((e) => throw 'Unexpected $e'); } - Future addToSink(sink, List> data) { + Future addToSink(StreamSink> sink, List> data) { sink.done.then(expectAsync1(checkResult)); sink.done.catchError((e) => throw 'Unexpected $e'); data.forEach((bytes) => sink.add(bytes)); @@ -413,17 +414,19 @@ main() { .catchError((e) => throw 'Unexpected $e'); } - Future runTest(mock, api, data, length) { + Future runTest( + MockClient mock, Storage api, List> data, int length) { var bucket = api.bucket(bucketName); - Future upload(fn, sendLength) { + Future upload(Future fn(StreamSink> sink, List> data), + bool sendLength) { mock.clear(); if (length <= maxNormalUpload) { expectNormalUpload(mock, data, objectName); } else { expectResumableUpload(mock, data, objectName); } - var sink; + StreamSink> sink; if (sendLength) { sink = bucket.write(objectName, length: length); } else { @@ -461,7 +464,7 @@ main() { test('write-short-error', () { withMockClient((MockClient mock, api) { - Future test(length) { + Future test(int length) { mock.clear(); mock.registerUpload('POST', 'b/$bucketName/o', expectAsync1((request) { @@ -524,7 +527,7 @@ main() { test('write-long-wrong-length', () { withMockClient((mock, api) { - Future test(data, length) { + Future test(List> data, int length) { mock.clear(); mock.registerResumableUpload('POST', 'b/$bucketName/o', expectAsync1((request) { @@ -631,8 +634,8 @@ main() { return mock .processNormalMediaUpload(request) .then(expectAsync1((mediaUpload) { - var object = - new storage.Object.fromJson(jsonDecode(mediaUpload.json)); + var object = new storage.Object.fromJson( + jsonDecode(mediaUpload.json) as Map); ObjectMetadata m = metadata[count]; expect(object.name, objectName); expect(mediaUpload.bytes, bytes); @@ -685,7 +688,7 @@ main() { 'b/$bucketName/o', expectAsync1((request) { var object = - new storage.Object.fromJson(jsonDecode(request.body)); + new storage.Object.fromJson(jsonDecode(request.body) as Map); ObjectMetadata m = metadata[countInitial]; expect(object.name, objectName); expect(object.cacheControl, m.cacheControl); @@ -747,8 +750,8 @@ main() { return mock .processNormalMediaUpload(request) .then(expectAsync1((mediaUpload) { - var object = - new storage.Object.fromJson(jsonDecode(mediaUpload.json)); + var object = new storage.Object.fromJson( + jsonDecode(mediaUpload.json) as Map); expect(object.name, objectName); expect(mediaUpload.bytes, bytes); expect(mediaUpload.contentType, 'application/octet-stream'); @@ -763,7 +766,7 @@ main() { var futures = []; for (int i = 0; i < predefined.length; i++) { futures.add(bucket.writeBytes(objectName, bytes, - predefinedAcl: predefined[i][0])); + predefinedAcl: predefined[i][0] as PredefinedAcl)); } return Future.wait(futures); }); @@ -799,8 +802,8 @@ main() { return mock .processNormalMediaUpload(request) .then(expectAsync1((mediaUpload) { - var object = - new storage.Object.fromJson(jsonDecode(mediaUpload.json)); + var object = new storage.Object.fromJson( + jsonDecode(mediaUpload.json) as Map); expect(object.name, objectName); expect(mediaUpload.bytes, bytes); expect(mediaUpload.contentType, 'application/octet-stream'); @@ -872,8 +875,8 @@ main() { .then(expectAsync1((mediaUpload) { int predefinedIndex = count ~/ acls.length; int aclIndex = count % acls.length; - var object = - new storage.Object.fromJson(jsonDecode(mediaUpload.json)); + var object = new storage.Object.fromJson( + jsonDecode(mediaUpload.json) as Map); expect(object.name, objectName); expect(mediaUpload.bytes, bytes); expect(mediaUpload.contentType, 'application/octet-stream'); @@ -901,7 +904,8 @@ main() { for (int i = 0; i < predefined.length; i++) { for (int j = 0; j < acls.length; j++) { futures.add(bucket.writeBytes(objectName, bytes, - acl: acls[j], predefinedAcl: predefined[i][0])); + acl: acls[j], + predefinedAcl: predefined[i][0] as PredefinedAcl)); } } return Future.wait(futures); From ec409af8675868e41d540ec1a3f15b65ad9fb12a Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Wed, 1 Aug 2018 14:08:57 +0200 Subject: [PATCH 110/239] Update pubspec.yaml to 0.6.0 (dart-lang/gcloud#56) --- pkgs/gcloud/pubspec.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 9f4432bc..2f0cd163 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.5.0 +version: 0.6.0 author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud From b3eba3b88a58ea15af84e3be5d036c226dde982d Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Wed, 1 Aug 2018 14:15:26 +0200 Subject: [PATCH 111/239] Widen sdk constraint to include <3.0.0 (dart-lang/gcloud#57) --- pkgs/gcloud/CHANGELOG.md | 5 +++++ pkgs/gcloud/pubspec.yaml | 5 +++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 5a518994..8e8680df 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,8 @@ +## 0.6.0+1 + +* Add explicit dependency to `package:_discoveryapis_commons` +* Widen sdk constraint to <3.0.0 + ## 0.6.0 * **BREAKING CHANGE:** Add generics support. Instead of writing diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 2f0cd163..a6514dad 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,11 +1,12 @@ name: gcloud -version: 0.6.0 +version: 0.6.0+1 author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud environment: - sdk: '>=2.0.0-dev.54.0 <2.0.0' + sdk: '>=2.0.0-dev.54.0 <3.0.0' dependencies: + _discoveryapis_commons: ^0.1.6+1 googleapis: '>=0.50.2 <1.0.0' googleapis_beta: '>=0.45.2 <1.0.0' http: '>=0.11.0 <0.12.0' From cf5f1e28108f6b5afb243a1c11a38eb36adb333f Mon Sep 17 00:00:00 2001 From: Janice Collins Date: Thu, 9 Aug 2018 07:54:28 -0700 Subject: [PATCH 112/239] Fix small bug where if the type is not found in the description table, we dereferenced a null (dart-lang/gcloud#59) --- pkgs/gcloud/lib/src/db/model_db_impl.dart | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/gcloud/lib/src/db/model_db_impl.dart b/pkgs/gcloud/lib/src/db/model_db_impl.dart index 090d77de..b32a084e 100644 --- a/pkgs/gcloud/lib/src/db/model_db_impl.dart +++ b/pkgs/gcloud/lib/src/db/model_db_impl.dart @@ -135,7 +135,7 @@ class ModelDBImpl implements ModelDB { /// /// If the model class `type` is not found it will throw an `ArgumentError`. String kindName(Type type) { - var kind = _modelDesc2Type[type].kind; + var kind = _modelDesc2Type[type]?.kind; if (kind == null) { throw new ArgumentError( 'The class $type was not associated with a kind.'); From 064f5432f645accefe360de0918836c9081173cd Mon Sep 17 00:00:00 2001 From: Jonas Finnemann Jensen Date: Tue, 6 Nov 2018 14:18:22 +0100 Subject: [PATCH 113/239] Documented index requirements for running datastore tests --- pkgs/gcloud/README.md | 6 +++++ pkgs/gcloud/test/index.yaml | 47 +++++++++++++++++++++++++++++++++++++ 2 files changed, 53 insertions(+) create mode 100644 pkgs/gcloud/test/index.yaml diff --git a/pkgs/gcloud/README.md b/pkgs/gcloud/README.md index abe92200..b90ac0a5 100644 --- a/pkgs/gcloud/README.md +++ b/pkgs/gcloud/README.md @@ -233,6 +233,12 @@ of the Google Cloud project to use. The value of the environment variable `GCLOUD_E2E_TEST_KEY` is a Google Cloud Storage path (starting with `gs://`) to a JSON key file for a service account providing access to the Cloud Project. +To pass the datastore tests you will also need to create indexes as follows: + +```bash +gcloud --project "$GCLOUD_E2E_TEST_PROJECT" datastore indexes create test/index.yaml +``` + [Datastore]: https://cloud.google.com/datastore/ [GCS]: https://cloud.google.com/storage/ [PubSub]: https://cloud.google.com/pubsub/ diff --git a/pkgs/gcloud/test/index.yaml b/pkgs/gcloud/test/index.yaml new file mode 100644 index 00000000..9f3dcec5 --- /dev/null +++ b/pkgs/gcloud/test/index.yaml @@ -0,0 +1,47 @@ +# To run tests for datastore, the following index.yaml must be declared for +# the project using: +# $ gcloud datastore indexes create test/index.yaml +indexes: +# Needed by tests in: test/db/e2e/db_test_impl.dart +- kind: User + ancestor: no + properties: + - name: name + direction: asc + - name: nickname + direction: desc +- kind: User + ancestor: no + properties: + - name: name + direction: desc + - name: nickname + direction: desc +- kind: User + ancestor: no + properties: + - name: name + direction: desc + - name: nickname + direction: asc +- kind: User + ancestor: no + properties: + - name: language + direction: asc + - name: name + direction: asc +# Needed by tests in: test/datastore/e2e/datastore_test_impl.dart +- kind: TestQueryKind + ancestor: no + properties: + - name: indexedProp + direction: asc + - name: blobPropertyIndexed + direction: asc +- kind: TestQueryKind + ancestor: no + properties: + - name: listproperty + - name: test_property + direction: desc From 478e68831c2f8e9b636ab674e250d4e4323bc6e2 Mon Sep 17 00:00:00 2001 From: Jonas Finnemann Jensen Date: Tue, 6 Nov 2018 14:18:53 +0100 Subject: [PATCH 114/239] Update package:test for dart 2.0 compatibility --- pkgs/gcloud/pubspec.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index a6514dad..b4131404 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -14,7 +14,7 @@ dev_dependencies: googleapis_auth: '>=0.2.3 <0.3.0' http_parser: '>=2.0.0 <4.0.0' mime: '>=0.9.0+3 <0.10.0' - test: '>=0.12.0 <0.13.0' + test: ^1.5.1 transformers: - $dart2js: $include: [] From 008dfb296333125daab4c2180fb86a7c6eb09e59 Mon Sep 17 00:00:00 2001 From: Jonas Finnemann Jensen Date: Tue, 6 Nov 2018 14:19:22 +0100 Subject: [PATCH 115/239] Reduced bucket.create request frequency, to improve test reliablility --- pkgs/gcloud/test/storage/e2e_test.dart | 47 +++++++++++++------------- 1 file changed, 23 insertions(+), 24 deletions(-) diff --git a/pkgs/gcloud/test/storage/e2e_test.dart b/pkgs/gcloud/test/storage/e2e_test.dart index c08e499c..1bfaf55e 100644 --- a/pkgs/gcloud/test/storage/e2e_test.dart +++ b/pkgs/gcloud/test/storage/e2e_test.dart @@ -70,32 +70,31 @@ void main() { })); }); - test('create-with-predefined-acl-delete', () { - Future test(PredefinedAcl predefinedAcl, expectedLength) { + test('create-with-predefined-acl-delete', () async { + for (var e in { + // See documentation: + // https://cloud.google.com/storage/docs/access-control/lists + PredefinedAcl.authenticatedRead: 2, + PredefinedAcl.private: 1, + PredefinedAcl.projectPrivate: 3, + PredefinedAcl.publicRead: 2, + PredefinedAcl.publicReadWrite: 2, + }.entries) { + var predefinedAcl = e.key; + var expectedLength = e.value; var bucketName = generateBucketName(); - return storage - .createBucket(bucketName, predefinedAcl: predefinedAcl) - .then(expectAsync1((result) { - expect(result, isNull); - return storage.bucketInfo(bucketName).then(expectAsync1((info) { - var acl = info.acl; - expect(info.bucketName, bucketName); - expect(acl.entries.length, expectedLength); - return storage.deleteBucket(bucketName).then(expectAsync1((result) { - expect(result, isNull); - })); - })); - })); + // Sleep for 2 seconds to avoid bucket request limit, see: + // https://cloud.google.com/storage/quotas#buckets + await Future.delayed(Duration(seconds: 2)); + var r1 = await storage.createBucket(bucketName, + predefinedAcl: predefinedAcl); + expect(r1, isNull); + var info = await storage.bucketInfo(bucketName); + expect(info.bucketName, bucketName); + expect(info.acl.entries.length, expectedLength); + var r2 = await storage.deleteBucket(bucketName); + expect(r2, isNull); } - - return Future.forEach([ - // See documentation: https://cloud.google.com/storage/docs/access-control/lists - () => test(PredefinedAcl.authenticatedRead, 2), - () => test(PredefinedAcl.private, 1), - () => test(PredefinedAcl.projectPrivate, 3), - () => test(PredefinedAcl.publicRead, 2), - () => test(PredefinedAcl.publicReadWrite, 2), - ], (f) => f().then(expectAsync1((_) {}))); }); test('create-error', () { From 4c8198bef27c82cca6e217263c89ad27ea1062b8 Mon Sep 17 00:00:00 2001 From: Jonas Finnemann Jensen Date: Tue, 6 Nov 2018 15:10:51 +0100 Subject: [PATCH 116/239] Fixes from review --- pkgs/gcloud/README.md | 2 +- pkgs/gcloud/test/storage/e2e_test.dart | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/pkgs/gcloud/README.md b/pkgs/gcloud/README.md index b90ac0a5..b18070b2 100644 --- a/pkgs/gcloud/README.md +++ b/pkgs/gcloud/README.md @@ -233,7 +233,7 @@ of the Google Cloud project to use. The value of the environment variable `GCLOUD_E2E_TEST_KEY` is a Google Cloud Storage path (starting with `gs://`) to a JSON key file for a service account providing access to the Cloud Project. -To pass the datastore tests you will also need to create indexes as follows: +You will also need to create indexes as follows: ```bash gcloud --project "$GCLOUD_E2E_TEST_PROJECT" datastore indexes create test/index.yaml diff --git a/pkgs/gcloud/test/storage/e2e_test.dart b/pkgs/gcloud/test/storage/e2e_test.dart index 1bfaf55e..03384b1d 100644 --- a/pkgs/gcloud/test/storage/e2e_test.dart +++ b/pkgs/gcloud/test/storage/e2e_test.dart @@ -71,7 +71,7 @@ void main() { }); test('create-with-predefined-acl-delete', () async { - for (var e in { + final cases = { // See documentation: // https://cloud.google.com/storage/docs/access-control/lists PredefinedAcl.authenticatedRead: 2, @@ -79,7 +79,8 @@ void main() { PredefinedAcl.projectPrivate: 3, PredefinedAcl.publicRead: 2, PredefinedAcl.publicReadWrite: 2, - }.entries) { + }; + for (var e in cases.entries) { var predefinedAcl = e.key; var expectedLength = e.value; var bucketName = generateBucketName(); From 59479fc81deaca66aeb38ee512576b1a38ebce67 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Este-Gracias?= Date: Wed, 5 Dec 2018 12:43:19 +0100 Subject: [PATCH 117/239] Update to pubsub v1 API (dart-lang/gcloud#64) --- pkgs/gcloud/lib/pubsub.dart | 2 +- pkgs/gcloud/pubspec.yaml | 1 - pkgs/gcloud/test/pubsub/pubsub_test.dart | 4 ++-- 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/pkgs/gcloud/lib/pubsub.dart b/pkgs/gcloud/lib/pubsub.dart index 3659adb1..a9c99a18 100644 --- a/pkgs/gcloud/lib/pubsub.dart +++ b/pkgs/gcloud/lib/pubsub.dart @@ -9,7 +9,7 @@ import 'dart:collection'; import 'dart:convert'; import 'package:http/http.dart' as http; -import 'package:googleapis_beta/pubsub/v1beta2.dart' as pubsub; +import 'package:googleapis/pubsub/v1.dart' as pubsub; import 'common.dart'; import 'service_scope.dart' as ss; diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index b4131404..8a2dfa50 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -8,7 +8,6 @@ environment: dependencies: _discoveryapis_commons: ^0.1.6+1 googleapis: '>=0.50.2 <1.0.0' - googleapis_beta: '>=0.45.2 <1.0.0' http: '>=0.11.0 <0.12.0' dev_dependencies: googleapis_auth: '>=0.2.3 <0.3.0' diff --git a/pkgs/gcloud/test/pubsub/pubsub_test.dart b/pkgs/gcloud/test/pubsub/pubsub_test.dart index e802de4b..15247f55 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_test.dart @@ -10,13 +10,13 @@ import 'package:test/test.dart'; import 'package:gcloud/pubsub.dart'; -import 'package:googleapis_beta/pubsub/v1beta2.dart' as pubsub; +import 'package:googleapis/pubsub/v1.dart' as pubsub; import '../common.dart'; import '../common_e2e.dart'; const String HOSTNAME = 'pubsub.googleapis.com'; -const String ROOT_PATH = '/v1beta2/'; +const String ROOT_PATH = '/v1/'; MockClient mockClient() => new MockClient(HOSTNAME, ROOT_PATH); From a4f96e34dd1e33d232e2e9498a8e4cc27d8cdbf8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Este-Gracias?= Date: Wed, 5 Dec 2018 12:46:14 +0100 Subject: [PATCH 118/239] Support pubsub emulator (dart-lang/gcloud#66) * Update to pubsub v1 API * Support pubsub emulator * Increase timeout to pull nothing * Fix unit tests --- pkgs/gcloud/lib/pubsub.dart | 8 +++++++- pkgs/gcloud/lib/src/pubsub_impl.dart | 8 ++++++-- pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart | 2 +- pkgs/gcloud/test/pubsub/pubsub_test.dart | 9 +++------ 4 files changed, 17 insertions(+), 10 deletions(-) diff --git a/pkgs/gcloud/lib/pubsub.dart b/pkgs/gcloud/lib/pubsub.dart index a9c99a18..09448756 100644 --- a/pkgs/gcloud/lib/pubsub.dart +++ b/pkgs/gcloud/lib/pubsub.dart @@ -7,6 +7,7 @@ library gcloud.pubsub; import 'dart:async'; import 'dart:collection'; import 'dart:convert'; +import 'dart:io'; import 'package:http/http.dart' as http; import 'package:googleapis/pubsub/v1.dart' as pubsub; @@ -122,7 +123,12 @@ abstract class PubSub { /// /// Returs an object providing access to Pub/Sub. The passed-in [client] will /// not be closed automatically. The caller is responsible for closing it. - factory PubSub(http.Client client, String project) = _PubSubImpl; + factory PubSub(http.Client client, String project) { + var emulator = Platform.environment['PUBSUB_EMULATOR_HOST']; + return emulator == null + ? new _PubSubImpl(client, project) + : new _PubSubImpl.rootUrl(client, project, "http://$emulator/"); + } /// The name of the project. String get project; diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart index d89ba7c8..ec68a972 100644 --- a/pkgs/gcloud/lib/src/pubsub_impl.dart +++ b/pkgs/gcloud/lib/src/pubsub_impl.dart @@ -15,6 +15,11 @@ class _PubSubImpl implements PubSub { _topicPrefix = 'projects/$project/topics/', _subscriptionPrefix = 'projects/$project/subscriptions/'; + _PubSubImpl.rootUrl(http.Client client, this.project, String rootUrl) + : _api = new pubsub.PubsubApi(client, rootUrl: rootUrl), + _topicPrefix = 'projects/$project/topics/', + _subscriptionPrefix = 'projects/$project/subscriptions/'; + String _fullTopicName(String name) { return name.startsWith('projects/') ? name : '${_topicPrefix}$name'; } @@ -24,7 +29,7 @@ class _PubSubImpl implements PubSub { } Future _createTopic(String name) { - return _api.projects.topics.create(new pubsub.Topic()..name = name, name); + return _api.projects.topics.create(null, name); } Future _deleteTopic(String name) { @@ -45,7 +50,6 @@ class _PubSubImpl implements PubSub { Future _createSubscription( String name, String topic, Uri endpoint) { var subscription = new pubsub.Subscription() - ..name = name ..topic = topic; if (endpoint != null) { var pushConfig = new pubsub.PushConfig() diff --git a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart index e2a9d23a..49f7111d 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart @@ -186,6 +186,6 @@ void main() { await pubsub.deleteSubscription(subscriptionName); await pubsub.deleteTopic(topicName); - }); + }, timeout: const Timeout(const Duration(minutes: 2))); }); } diff --git a/pkgs/gcloud/test/pubsub/pubsub_test.dart b/pkgs/gcloud/test/pubsub/pubsub_test.dart index 15247f55..675fae79 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_test.dart @@ -50,9 +50,7 @@ main() { 'PUT', 'projects/$PROJECT/topics/test-topic', expectAsync1((http.Request request) { - var requestTopic = - new pubsub.Topic.fromJson(jsonDecode(request.body) as Map); - expect(requestTopic.name, absoluteName); + expect(request.body, isEmpty); return mock.respond(new pubsub.Topic()..name = absoluteName); }, count: 2)); @@ -429,9 +427,8 @@ main() { 'PUT', 'projects/$PROJECT/subscriptions', expectAsync1((request) { - var requestSubscription = new pubsub.Subscription.fromJson( - jsonDecode(request.body) as Map); - expect(requestSubscription.name, absoluteName); + var requestSubscription = jsonDecode(request.body) as Map; + expect(requestSubscription['topic'], absoluteTopicName); return mock .respond(new pubsub.Subscription()..name = absoluteName); }, count: 2)); From 6e897986f9444ce59938e0c6cab6351cad5707e0 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Fri, 11 Jan 2019 08:42:55 -0800 Subject: [PATCH 119/239] Support the latest pkg:http (dart-lang/gcloud#67) --- pkgs/gcloud/CHANGELOG.md | 4 ++++ pkgs/gcloud/pubspec.yaml | 12 ++++++------ 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 8e8680df..7bb9e0f2 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.6.0+2 + +* Support the latest `pkg:http`. + ## 0.6.0+1 * Add explicit dependency to `package:_discoveryapis_commons` diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 8a2dfa50..a3694c8d 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,19 +1,19 @@ name: gcloud -version: 0.6.0+1 +version: 0.6.0+2 author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud + environment: - sdk: '>=2.0.0-dev.54.0 <3.0.0' + sdk: '>=2.0.0 <3.0.0' + dependencies: _discoveryapis_commons: ^0.1.6+1 googleapis: '>=0.50.2 <1.0.0' - http: '>=0.11.0 <0.12.0' + http: '>=0.11.0 <0.13.0' + dev_dependencies: googleapis_auth: '>=0.2.3 <0.3.0' http_parser: '>=2.0.0 <4.0.0' mime: '>=0.9.0+3 <0.10.0' test: ^1.5.1 -transformers: -- $dart2js: - $include: [] From 3ae668083865252a9fb3418da926283a5cdc5ceb Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Fri, 11 Jan 2019 08:35:52 -0800 Subject: [PATCH 120/239] dartfmt --- pkgs/gcloud/lib/src/pubsub_impl.dart | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart index ec68a972..e069a925 100644 --- a/pkgs/gcloud/lib/src/pubsub_impl.dart +++ b/pkgs/gcloud/lib/src/pubsub_impl.dart @@ -49,8 +49,7 @@ class _PubSubImpl implements PubSub { Future _createSubscription( String name, String topic, Uri endpoint) { - var subscription = new pubsub.Subscription() - ..topic = topic; + var subscription = new pubsub.Subscription()..topic = topic; if (endpoint != null) { var pushConfig = new pubsub.PushConfig() ..pushEndpoint = endpoint.toString(); From c00fdbd5f7f38145d14d72df328779d54ae3254c Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Fri, 11 Jan 2019 08:44:28 -0800 Subject: [PATCH 121/239] enable and fix a number of lints --- pkgs/gcloud/analysis_options.yaml | 32 +- pkgs/gcloud/lib/common.dart | 4 +- pkgs/gcloud/lib/datastore.dart | 29 +- pkgs/gcloud/lib/http.dart | 2 +- pkgs/gcloud/lib/pubsub.dart | 12 +- pkgs/gcloud/lib/service_scope.dart | 37 ++- pkgs/gcloud/lib/src/datastore_impl.dart | 146 +++++---- pkgs/gcloud/lib/src/db/annotations.dart | 46 +-- pkgs/gcloud/lib/src/db/db.dart | 218 ++++++-------- pkgs/gcloud/lib/src/db/model_db.dart | 38 +-- pkgs/gcloud/lib/src/db/model_db_impl.dart | 82 +++--- pkgs/gcloud/lib/src/db/models.dart | 73 ++--- pkgs/gcloud/lib/src/pubsub_impl.dart | 88 +++--- pkgs/gcloud/lib/src/storage_impl.dart | 108 ++++--- pkgs/gcloud/lib/storage.dart | 62 ++-- pkgs/gcloud/pubspec.yaml | 1 + pkgs/gcloud/test/common.dart | 53 ++-- pkgs/gcloud/test/common_e2e.dart | 16 +- .../datastore/e2e/datastore_test_impl.dart | 139 +++++---- pkgs/gcloud/test/datastore/e2e/utils.dart | 30 +- .../gcloud/test/datastore/error_matchers.dart | 14 +- pkgs/gcloud/test/db/db_test.dart | 6 +- pkgs/gcloud/test/db/e2e/db_test_impl.dart | 46 +-- .../test/db/e2e/metamodel_test_impl.dart | 12 +- pkgs/gcloud/test/db/model_db_test.dart | 12 +- pkgs/gcloud/test/db/properties_test.dart | 37 ++- pkgs/gcloud/test/db_all_e2e_test.dart | 10 +- pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart | 10 +- pkgs/gcloud/test/pubsub/pubsub_test.dart | 130 ++++---- pkgs/gcloud/test/service_scope_test.dart | 28 +- pkgs/gcloud/test/storage/e2e_test.dart | 44 ++- pkgs/gcloud/test/storage/storage_test.dart | 277 ++++++++---------- 32 files changed, 846 insertions(+), 996 deletions(-) diff --git a/pkgs/gcloud/analysis_options.yaml b/pkgs/gcloud/analysis_options.yaml index 022bb830..a4f33350 100644 --- a/pkgs/gcloud/analysis_options.yaml +++ b/pkgs/gcloud/analysis_options.yaml @@ -1,34 +1,4 @@ +include: package:pedantic/analysis_options.yaml analyzer: strong-mode: implicit-casts: false -linter: - rules: - - avoid_empty_else - - avoid_init_to_null - - avoid_null_checks_in_equality_operators - - await_only_futures - - camel_case_types - - cancel_subscriptions - - control_flow_in_finally - - directives_ordering - - empty_catches - - empty_constructor_bodies - - empty_statements - - iterable_contains_unrelated_type - - library_names - - library_prefixes - - list_remove_unrelated_type - - package_api_docs - - package_names - - package_prefixed_library_names - - prefer_final_fields - - prefer_is_not_empty - - super_goes_last - - test_types_in_equals - - throw_in_finally - - type_init_formals - - unawaited_futures - #- unnecessary_brace_in_string_interps - # Need to debug usage in metamodel_test_impl – might reveal a bug - - unrelated_type_equality_checks - - valid_regexps diff --git a/pkgs/gcloud/lib/common.dart b/pkgs/gcloud/lib/common.dart index 81248910..e516807b 100644 --- a/pkgs/gcloud/lib/common.dart +++ b/pkgs/gcloud/lib/common.dart @@ -26,7 +26,7 @@ abstract class Page { Future> next({int pageSize}); } -typedef Future> FirstPageProvider(int pageSize); +typedef FirstPageProvider = Future> Function(int pageSize); /// Helper class to turn a series of pages into a stream. class StreamFromPages { @@ -39,7 +39,7 @@ class StreamFromPages { StreamController _controller; StreamFromPages(this._firstPageProvider) { - _controller = new StreamController( + _controller = StreamController( sync: true, onListen: _onListen, onPause: _onPause, diff --git a/pkgs/gcloud/lib/datastore.dart b/pkgs/gcloud/lib/datastore.dart index f643b16b..12670142 100644 --- a/pkgs/gcloud/lib/datastore.dart +++ b/pkgs/gcloud/lib/datastore.dart @@ -146,8 +146,8 @@ class Key { partition = parent.partition; elements.addAll(parent.elements); } - elements.add(new KeyElement(kind, id)); - return new Key(elements, partition: partition); + elements.add(KeyElement(kind, id)); + return Key(elements, partition: partition); } int get hashCode => @@ -182,14 +182,14 @@ class Key { /// // TODO(Issue #6): Add dataset-id here. class Partition { - static const Partition DEFAULT = const Partition._default(); + static const Partition DEFAULT = Partition._default(); /// The namespace of this partition. final String namespace; Partition(this.namespace) { if (namespace == '') { - throw new ArgumentError("'namespace' must not be empty"); + throw ArgumentError("'namespace' must not be empty"); } } @@ -215,11 +215,11 @@ class KeyElement { KeyElement(this.kind, this.id) { if (kind == null) { - throw new ArgumentError("'kind' must not be null"); + throw ArgumentError("'kind' must not be null"); } if (id != null) { if (id is! int && id is! String) { - throw new ArgumentError("'id' must be either null, a String or an int"); + throw ArgumentError("'id' must be either null, a String or an int"); } } } @@ -234,12 +234,11 @@ class KeyElement { /// A relation used in query filters. class FilterRelation { - static const FilterRelation LessThan = const FilterRelation._('<'); - static const FilterRelation LessThanOrEqual = const FilterRelation._('<='); - static const FilterRelation GreatherThan = const FilterRelation._('>'); - static const FilterRelation GreatherThanOrEqual = - const FilterRelation._('>='); - static const FilterRelation Equal = const FilterRelation._('=='); + static const FilterRelation LessThan = FilterRelation._('<'); + static const FilterRelation LessThanOrEqual = FilterRelation._('<='); + static const FilterRelation GreatherThan = FilterRelation._('>'); + static const FilterRelation GreatherThanOrEqual = FilterRelation._('>='); + static const FilterRelation Equal = FilterRelation._('=='); final String name; @@ -268,8 +267,8 @@ class Filter { /// 'Order' class. /// [i.e. so one can write Order.Ascending, Order.Descending]. class OrderDirection { - static const OrderDirection Ascending = const OrderDirection._('Ascending'); - static const OrderDirection Decending = const OrderDirection._('Decending'); + static const OrderDirection Ascending = OrderDirection._('Ascending'); + static const OrderDirection Decending = OrderDirection._('Decending'); final String name; @@ -364,7 +363,7 @@ abstract class Datastore { /// If [crossEntityGroup] is `true`, the transaction can work on up to 5 /// entity groups. Otherwise the transaction will be limited to only operate /// on a single entity group. - Future beginTransaction({bool crossEntityGroup: false}); + Future beginTransaction({bool crossEntityGroup = false}); /// Make modifications to the datastore. /// diff --git a/pkgs/gcloud/lib/http.dart b/pkgs/gcloud/lib/http.dart index 35b9479d..5051807e 100644 --- a/pkgs/gcloud/lib/http.dart +++ b/pkgs/gcloud/lib/http.dart @@ -31,7 +31,7 @@ http.Client get authClientService => /// Calling this function outside of a service scope will result in an error. /// Calling this function more than once inside the same service scope is not /// allowed. -void registerAuthClientService(http.Client client, {bool close: true}) { +void registerAuthClientService(http.Client client, {bool close = true}) { ss.register(_authenticatedClientKey, client); if (close) { ss.registerScopeExitCallback(() { diff --git a/pkgs/gcloud/lib/pubsub.dart b/pkgs/gcloud/lib/pubsub.dart index 09448756..207967d7 100644 --- a/pkgs/gcloud/lib/pubsub.dart +++ b/pkgs/gcloud/lib/pubsub.dart @@ -112,7 +112,7 @@ void registerPubSubService(PubSub pubsub) { /// abstract class PubSub { /// List of required OAuth2 scopes for Pub/Sub operation. - static const SCOPES = const [pubsub.PubsubApi.PubsubScope]; + static const SCOPES = [pubsub.PubsubApi.PubsubScope]; /// Access Pub/Sub using an authenticated client. /// @@ -126,8 +126,8 @@ abstract class PubSub { factory PubSub(http.Client client, String project) { var emulator = Platform.environment['PUBSUB_EMULATOR_HOST']; return emulator == null - ? new _PubSubImpl(client, project) - : new _PubSubImpl.rootUrl(client, project, "http://$emulator/"); + ? _PubSubImpl(client, project) + : _PubSubImpl.rootUrl(client, project, "http://$emulator/"); } /// The name of the project. @@ -166,7 +166,7 @@ abstract class PubSub { /// /// Returns a `Future` which completes with a `Page` object holding the /// first page. Use the `Page` object to move to the next page of topics. - Future> pageTopics({int pageSize: 50}); + Future> pageTopics({int pageSize = 50}); /// Create a new subscription named [name] listening on topic [topic]. /// @@ -220,7 +220,7 @@ abstract class PubSub { /// first page. Use the `Page` object to move to the next page of /// subscriptions. Future> pageSubscriptions( - {String topic, int pageSize: 50}); + {String topic, int pageSize = 50}); } /// A Pub/Sub topic. @@ -337,7 +337,7 @@ abstract class Subscription { /// /// If [wait] is `false`, the method will complete the returned `Future` /// with `null` if it finds that there are no messages available. - Future pull({bool wait: true}); + Future pull({bool wait = true}); } /// The content of a Pub/Sub message. diff --git a/pkgs/gcloud/lib/service_scope.dart b/pkgs/gcloud/lib/service_scope.dart index eef569be..ea67d9f8 100644 --- a/pkgs/gcloud/lib/service_scope.dart +++ b/pkgs/gcloud/lib/service_scope.dart @@ -83,7 +83,7 @@ const Symbol _ServiceScopeKey = #gcloud.service_scope; /// /// New service scope can be created by calling [fork] on the empty /// service scope. -final _ServiceScope _emptyServiceScope = new _ServiceScope(); +final _ServiceScope _emptyServiceScope = _ServiceScope(); /// Returns the current [_ServiceScope] object. _ServiceScope get _serviceScope => @@ -114,7 +114,7 @@ Future fork(Future func(), {Function onError}) { void register(Object key, Object value, {ScopeExitCallback onScopeExit}) { var serviceScope = _serviceScope; if (serviceScope == null) { - throw new StateError('Not running inside a service scope zone.'); + throw StateError('Not running inside a service scope zone.'); } serviceScope.register(key, value, onScopeExit: onScopeExit); } @@ -126,7 +126,7 @@ void register(Object key, Object value, {ScopeExitCallback onScopeExit}) { void registerScopeExitCallback(ScopeExitCallback onScopeExitCallback) { var serviceScope = _serviceScope; if (serviceScope == null) { - throw new StateError('Not running inside a service scope zone.'); + throw StateError('Not running inside a service scope zone.'); } serviceScope.registerOnScopeExitCallback(onScopeExitCallback); } @@ -137,7 +137,7 @@ void registerScopeExitCallback(ScopeExitCallback onScopeExitCallback) { Object lookup(Object key) { var serviceScope = _serviceScope; if (serviceScope == null) { - throw new StateError('Not running inside a service scope zone.'); + throw StateError('Not running inside a service scope zone.'); } return serviceScope.lookup(key); } @@ -146,10 +146,10 @@ Object lookup(Object key) { class _ServiceScope { /// A mapping of keys to values stored inside the service scope. final Map _key2Values = - new Map(); + Map(); /// A set which indicates whether an object was copied from it's parent. - final Set _parentCopies = new Set(); + final Set _parentCopies = Set(); /// On-Scope-Exit functions which will be called in reverse insertion order. final List<_RegisteredEntry> _registeredEntries = []; @@ -175,11 +175,11 @@ class _ServiceScope { bool isParentCopy = _parentCopies.contains(serviceScopeKey); if (!isParentCopy && _key2Values.containsKey(serviceScopeKey)) { - throw new ArgumentError( + throw ArgumentError( 'Servie scope already contains key $serviceScopeKey.'); } - var entry = new _RegisteredEntry(serviceScopeKey, value, onScopeExit); + var entry = _RegisteredEntry(serviceScopeKey, value, onScopeExit); _key2Values[serviceScopeKey] = entry; if (isParentCopy) _parentCopies.remove(serviceScopeKey); @@ -194,8 +194,7 @@ class _ServiceScope { _ensureNotInDestroyingState(); if (onScopeExitCallback != null) { - _registeredEntries - .add(new _RegisteredEntry(null, null, onScopeExitCallback)); + _registeredEntries.add(_RegisteredEntry(null, null, onScopeExitCallback)); } } @@ -209,7 +208,7 @@ class _ServiceScope { return runZoned(() { var f = func(); if (f is! Future) { - throw new ArgumentError('Forking a service scope zone requires the ' + throw ArgumentError('Forking a service scope zone requires the ' 'callback function to return a future.'); } return f.whenComplete(serviceScope._runScopeExitHandlers); @@ -218,7 +217,7 @@ class _ServiceScope { void _ensureNotInDestroyingState() { if (_destroyed) { - throw new StateError( + throw StateError( 'The service scope has already been exited. It is therefore ' 'forbidden to use this service scope anymore. ' 'Please make sure that your code waits for all asynchronous tasks ' @@ -228,7 +227,7 @@ class _ServiceScope { void _ensureNotInCleaningState() { if (_cleaningUp) { - throw new StateError( + throw StateError( 'The service scope is in the process of cleaning up. It is therefore ' 'forbidden to make any modifications to the current service scope. ' 'Please make sure that your code waits for all asynchronous tasks ' @@ -239,7 +238,7 @@ class _ServiceScope { /// Copies all service scope entries to a new service scope, but not their /// on-scope-exit handlers. _ServiceScope _copy() { - var serviceScopeCopy = new _ServiceScope(); + var serviceScopeCopy = _ServiceScope(); serviceScopeCopy._key2Values.addAll(_key2Values); serviceScopeCopy._parentCopies.addAll(_key2Values.keys); return serviceScopeCopy; @@ -259,16 +258,16 @@ class _ServiceScope { _key2Values.remove(registeredEntry.key); } if (registeredEntry.scopeExitCallback != null) { - return new Future.sync(registeredEntry.scopeExitCallback) + return Future.sync(registeredEntry.scopeExitCallback) .catchError((e, s) => errors.add(e)); } else { - return new Future.value(); + return Future.value(); } }).then((_) { _cleaningUp = true; _destroyed = true; - if (errors.length > 0) { - throw new Exception( + if (errors.isNotEmpty) { + throw Exception( 'The following errors occured while running scope exit handlers' ': $errors'); } @@ -276,7 +275,7 @@ class _ServiceScope { } } -typedef Future ScopeExitCallback(); +typedef ScopeExitCallback = Future Function(); class _RegisteredEntry { final Object key; diff --git a/pkgs/gcloud/lib/src/datastore_impl.dart b/pkgs/gcloud/lib/src/datastore_impl.dart index 6beeacee..840409ed 100644 --- a/pkgs/gcloud/lib/src/datastore_impl.dart +++ b/pkgs/gcloud/lib/src/datastore_impl.dart @@ -18,7 +18,7 @@ class TransactionImpl implements datastore.Transaction { } class DatastoreImpl implements datastore.Datastore { - static const List SCOPES = const [ + static const List SCOPES = [ api.DatastoreApi.DatastoreScope, api.DatastoreApi.CloudPlatformScope, ]; @@ -29,18 +29,18 @@ class DatastoreImpl implements datastore.Datastore { /// The [project] parameter is the name of the cloud project (it should not /// start with a `s~`). DatastoreImpl(http.Client client, String project) - : _api = new api.DatastoreApi(client), + : _api = api.DatastoreApi(client), _project = project; - api.Key _convertDatastore2ApiKey(datastore.Key key, {bool enforceId: true}) { - var apiKey = new api.Key(); + api.Key _convertDatastore2ApiKey(datastore.Key key, {bool enforceId = true}) { + var apiKey = api.Key(); - apiKey.partitionId = new api.PartitionId() + apiKey.partitionId = api.PartitionId() ..projectId = _project ..namespaceId = key.partition.namespace; apiKey.path = key.elements.map((datastore.KeyElement element) { - final part = new api.PathElement(); + final part = api.PathElement(); part.kind = element.kind; final id = element.id; if (id is int) { @@ -48,7 +48,7 @@ class DatastoreImpl implements datastore.Datastore { } else if (id is String) { part.name = id; } else if (enforceId) { - throw new datastore.ApplicationError( + throw datastore.ApplicationError( 'Error while encoding entity key: Using `null` as the id is not ' 'allowed.'); } @@ -61,21 +61,21 @@ class DatastoreImpl implements datastore.Datastore { static datastore.Key _convertApi2DatastoreKey(api.Key key) { var elements = key.path.map((api.PathElement element) { if (element.id != null) { - return new datastore.KeyElement(element.kind, int.parse(element.id)); + return datastore.KeyElement(element.kind, int.parse(element.id)); } else if (element.name != null) { - return new datastore.KeyElement(element.kind, element.name); + return datastore.KeyElement(element.kind, element.name); } else { - throw new datastore.DatastoreError( + throw datastore.DatastoreError( 'Invalid server response: Expected allocated name/id.'); } }).toList(); datastore.Partition partition; if (key.partitionId != null) { - partition = new datastore.Partition(key.partitionId.namespaceId); + partition = datastore.Partition(key.partitionId.namespaceId); // TODO: assert projectId. } - return new datastore.Key(elements, partition: partition); + return datastore.Key(elements, partition: partition); } bool _compareApiKey(api.Key a, api.Key b) { @@ -99,8 +99,8 @@ class DatastoreImpl implements datastore.Datastore { } api.Value _convertDatastore2ApiPropertyValue(value, bool indexed, - {bool lists: true}) { - var apiValue = new api.Value()..excludeFromIndexes = !indexed; + {bool lists = true}) { + var apiValue = api.Value()..excludeFromIndexes = !indexed; if (value == null) { return apiValue..nullValue = "NULL_VALUE"; } else if (value is bool) { @@ -121,17 +121,17 @@ class DatastoreImpl implements datastore.Datastore { } else if (value is List) { if (!lists) { // FIXME(Issue #3): Consistently handle exceptions. - throw new Exception('List values are not allowed.'); + throw Exception('List values are not allowed.'); } convertItem(i) => _convertDatastore2ApiPropertyValue(i, indexed, lists: false); - return new api.Value() + return api.Value() ..arrayValue = - (new api.ArrayValue()..values = value.map(convertItem).toList()); + (api.ArrayValue()..values = value.map(convertItem).toList()); } else { - throw new UnsupportedError( + throw UnsupportedError( 'Types ${value.runtimeType} cannot be used for serializing.'); } } @@ -148,7 +148,7 @@ class DatastoreImpl implements datastore.Datastore { else if (value.timestampValue != null) return DateTime.parse(value.timestampValue); else if (value.blobValue != null) - return new datastore.BlobValue(value.blobValueAsBytes); + return datastore.BlobValue(value.blobValueAsBytes); else if (value.keyValue != null) return _convertApi2DatastoreKey(value.keyValue); else if (value.arrayValue != null && value.arrayValue.values != null) @@ -156,14 +156,14 @@ class DatastoreImpl implements datastore.Datastore { .map(_convertApi2DatastoreProperty) .toList(); else if (value.entityValue != null) - throw new UnsupportedError('Entity values are not supported.'); + throw UnsupportedError('Entity values are not supported.'); else if (value.geoPointValue != null) - throw new UnsupportedError('GeoPoint values are not supported.'); + throw UnsupportedError('GeoPoint values are not supported.'); return null; } static datastore.Entity _convertApi2DatastoreEntity(api.Entity entity) { - var unindexedProperties = new Set(); + var unindexedProperties = Set(); var properties = {}; if (entity.properties != null) { @@ -174,14 +174,13 @@ class DatastoreImpl implements datastore.Datastore { } }); } - return new datastore.Entity( - _convertApi2DatastoreKey(entity.key), properties, + return datastore.Entity(_convertApi2DatastoreKey(entity.key), properties, unIndexedProperties: unindexedProperties); } api.Entity _convertDatastore2ApiEntity(datastore.Entity entity, - {bool enforceId: false}) { - var apiEntity = new api.Entity(); + {bool enforceId = false}) { + var apiEntity = api.Entity(); apiEntity.key = _convertDatastore2ApiKey(entity.key, enforceId: enforceId); apiEntity.properties = {}; @@ -208,34 +207,34 @@ class DatastoreImpl implements datastore.Datastore { }; api.Filter _convertDatastore2ApiFilter(datastore.Filter filter) { - var pf = new api.PropertyFilter(); + var pf = api.PropertyFilter(); var operator = relationMapping[filter.relation]; if (operator == null) { - throw new ArgumentError('Unknown filter relation: ${filter.relation}.'); + throw ArgumentError('Unknown filter relation: ${filter.relation}.'); } pf.op = operator; - pf.property = new api.PropertyReference()..name = filter.name; + pf.property = api.PropertyReference()..name = filter.name; pf.value = _convertDatastore2ApiPropertyValue(filter.value, true, lists: false); - return new api.Filter()..propertyFilter = pf; + return api.Filter()..propertyFilter = pf; } api.Filter _convertDatastoreAncestorKey2ApiFilter(datastore.Key key) { - var pf = new api.PropertyFilter(); + var pf = api.PropertyFilter(); pf.op = 'HAS_ANCESTOR'; - pf.property = new api.PropertyReference()..name = '__key__'; - pf.value = new api.Value() + pf.property = api.PropertyReference()..name = '__key__'; + pf.value = api.Value() ..keyValue = _convertDatastore2ApiKey(key, enforceId: true); - return new api.Filter()..propertyFilter = pf; + return api.Filter()..propertyFilter = pf; } api.Filter _convertDatastore2ApiFilters( List filters, datastore.Key ancestorKey) { - if ((filters == null || filters.length == 0) && ancestorKey == null) { + if ((filters == null || filters.isEmpty) && ancestorKey == null) { return null; } - var compFilter = new api.CompositeFilter(); + var compFilter = api.CompositeFilter(); if (filters != null) { compFilter.filters = filters.map(_convertDatastore2ApiFilter).toList(); } @@ -248,15 +247,15 @@ class DatastoreImpl implements datastore.Datastore { } } compFilter.op = 'AND'; - return new api.Filter()..compositeFilter = compFilter; + return api.Filter()..compositeFilter = compFilter; } api.PropertyOrder _convertDatastore2ApiOrder(datastore.Order order) { - var property = new api.PropertyReference()..name = order.propertyName; + var property = api.PropertyReference()..name = order.propertyName; var direction = order.direction == datastore.OrderDirection.Ascending ? 'ASCENDING' : 'DESCENDING'; - return new api.PropertyOrder() + return api.PropertyOrder() ..direction = direction ..property = property; } @@ -271,22 +270,21 @@ class DatastoreImpl implements datastore.Datastore { static Future _handleError(error, StackTrace stack) { if (error is api.DetailedApiRequestError) { if (error.status == 400) { - return new Future.error( - new datastore.ApplicationError(error.message), stack); + return Future.error(datastore.ApplicationError(error.message), stack); } else if (error.status == 409) { // NOTE: This is reported as: // "too much contention on these datastore entities" // TODO: - return new Future.error(new datastore.TransactionAbortedError(), stack); + return Future.error(datastore.TransactionAbortedError(), stack); } else if (error.status == 412) { - return new Future.error(new datastore.NeedIndexError(), stack); + return Future.error(datastore.NeedIndexError(), stack); } } - return new Future.error(error, stack); + return Future.error(error, stack); } Future> allocateIds(List keys) { - var request = new api.AllocateIdsRequest(); + var request = api.AllocateIdsRequest(); request ..keys = keys.map((key) { return _convertDatastore2ApiKey(key, enforceId: false); @@ -297,10 +295,10 @@ class DatastoreImpl implements datastore.Datastore { } Future beginTransaction( - {bool crossEntityGroup: false}) { - var request = new api.BeginTransactionRequest(); + {bool crossEntityGroup = false}) { + var request = api.BeginTransactionRequest(); return _api.projects.beginTransaction(request, _project).then((result) { - return new TransactionImpl(result.transaction); + return TransactionImpl(result.transaction); }, onError: _handleError); } @@ -309,7 +307,7 @@ class DatastoreImpl implements datastore.Datastore { List autoIdInserts, List deletes, datastore.Transaction transaction}) { - var request = new api.CommitRequest(); + var request = api.CommitRequest(); if (transaction != null) { request.mode = 'TRANSACTIONAL'; @@ -321,7 +319,7 @@ class DatastoreImpl implements datastore.Datastore { var mutations = request.mutations = []; if (inserts != null) { for (int i = 0; i < inserts.length; i++) { - mutations.add(new api.Mutation() + mutations.add(api.Mutation() ..upsert = _convertDatastore2ApiEntity(inserts[i], enforceId: true)); } } @@ -329,20 +327,20 @@ class DatastoreImpl implements datastore.Datastore { if (autoIdInserts != null) { autoIdStartIndex = mutations.length; for (int i = 0; i < autoIdInserts.length; i++) { - mutations.add(new api.Mutation() + mutations.add(api.Mutation() ..insert = _convertDatastore2ApiEntity(autoIdInserts[i], enforceId: false)); } } if (deletes != null) { for (int i = 0; i < deletes.length; i++) { - mutations.add(new api.Mutation() + mutations.add(api.Mutation() ..delete = _convertDatastore2ApiKey(deletes[i], enforceId: true)); } } return _api.projects.commit(request, _project).then((result) { List keys; - if (autoIdInserts != null && autoIdInserts.length > 0) { + if (autoIdInserts != null && autoIdInserts.isNotEmpty) { List mutationResults = result.mutationResults; assert(autoIdStartIndex != -1); assert(mutationResults.length >= @@ -354,7 +352,7 @@ class DatastoreImpl implements datastore.Datastore { (api.MutationResult r) => _convertApi2DatastoreKey(r.key)) .toList(); } - return new datastore.CommitResult(keys); + return datastore.CommitResult(keys); }, onError: _handleError); } @@ -363,16 +361,16 @@ class DatastoreImpl implements datastore.Datastore { var apiKeys = keys.map((key) { return _convertDatastore2ApiKey(key, enforceId: true); }).toList(); - var request = new api.LookupRequest(); + var request = api.LookupRequest(); request.keys = apiKeys; if (transaction != null) { // TODO: Make readOptions more configurable. - request.readOptions = new api.ReadOptions(); + request.readOptions = api.ReadOptions(); request.readOptions.transaction = (transaction as TransactionImpl).data; } return _api.projects.lookup(request, _project).then((response) { - if (response.deferred != null && response.deferred.length > 0) { - throw new datastore.DatastoreError( + if (response.deferred != null && response.deferred.isNotEmpty) { + throw datastore.DatastoreError( 'Could not successfully look up all keys due to resource ' 'constraints.'); } @@ -392,7 +390,7 @@ class DatastoreImpl implements datastore.Datastore { // // A list of keys that were not looked up due to resource constraints. // repeated Key deferred = 3; // } - var entities = new List(apiKeys.length); + var entities = List(apiKeys.length); for (int i = 0; i < apiKeys.length; i++) { var apiKey = apiKeys[i]; @@ -421,7 +419,7 @@ class DatastoreImpl implements datastore.Datastore { } if (!found) { - throw new datastore.DatastoreError('Invalid server response: ' + throw datastore.DatastoreError('Invalid server response: ' 'Tried to lookup ${apiKey.toJson()} but entity was neither in ' 'missing nor in found.'); } @@ -434,24 +432,24 @@ class DatastoreImpl implements datastore.Datastore { {datastore.Partition partition, datastore.Transaction transaction}) { // NOTE: We explicitly do not set 'limit' here, since this is handled by // QueryPageImpl.runQuery. - var apiQuery = new api.Query() + var apiQuery = api.Query() ..filter = _convertDatastore2ApiFilters(query.filters, query.ancestorKey) ..order = _convertDatastore2ApiOrders(query.orders) ..offset = query.offset; if (query.kind != null) { - apiQuery.kind = [new api.KindExpression()..name = query.kind]; + apiQuery.kind = [api.KindExpression()..name = query.kind]; } - var request = new api.RunQueryRequest(); + var request = api.RunQueryRequest(); request.query = apiQuery; if (transaction != null) { // TODO: Make readOptions more configurable. - request.readOptions = new api.ReadOptions(); + request.readOptions = api.ReadOptions(); request.readOptions.transaction = (transaction as TransactionImpl).data; } if (partition != null) { - request.partitionId = new api.PartitionId() + request.partitionId = api.PartitionId() ..namespaceId = partition.namespace; } @@ -461,7 +459,7 @@ class DatastoreImpl implements datastore.Datastore { Future rollback(datastore.Transaction transaction) { // TODO: Handle [transaction] - var request = new api.RollbackRequest() + var request = api.RollbackRequest() ..transaction = (transaction as TransactionImpl).data; return _api.projects.rollback(request, _project).catchError(_handleError); } @@ -511,13 +509,13 @@ class QueryPageImpl implements Page { if (request.query.offset != null && request.query.offset > 0 && request.query.offset != response.batch.skippedResults) { - throw new datastore.DatastoreError( + throw datastore.DatastoreError( 'Server did not skip over the specified ${request.query.offset} ' 'entities.'); } if (limit != null && returnedEntities.length > limit) { - throw new datastore.DatastoreError( + throw datastore.DatastoreError( 'Server returned more entities then the limit for the request' '(${request.query.limit}) was.'); } @@ -557,7 +555,7 @@ class QueryPageImpl implements Page { // FIXME: TODO: Big hack! // It looks like Apiary/Atlas is currently broken. - if (moreBatches && returnedEntities.length == 0) { + if (moreBatches && returnedEntities.isEmpty) { print('Warning: Api to Google Cloud Datastore returned bogus response. ' 'Trying a workaround.'); isLast = true; @@ -565,13 +563,13 @@ class QueryPageImpl implements Page { } if (!isLast && response.batch.endCursor == null) { - throw new datastore.DatastoreError( + throw datastore.DatastoreError( 'Server did not supply an end cursor, even though the query ' 'is not done.'); } if (isLast) { - return new QueryPageImpl( + return QueryPageImpl( api, project, request, returnedEntities, true, null); } else { // NOTE: We reuse the old RunQueryRequest object here . @@ -584,7 +582,7 @@ class QueryPageImpl implements Page { // result batch, so we can continue where we left off. request.query.startCursor = batch.endCursor; - return new QueryPageImpl( + return QueryPageImpl( api, project, request, returnedEntities, false, remainingEntities); } }); @@ -599,8 +597,8 @@ class QueryPageImpl implements Page { // really use is `query.limit`, but this is user-specified when making // the query. if (isLast) { - return new Future.sync(() { - throw new ArgumentError('Cannot call next() on last page.'); + return Future.sync(() { + throw ArgumentError('Cannot call next() on last page.'); }); } diff --git a/pkgs/gcloud/lib/src/db/annotations.dart b/pkgs/gcloud/lib/src/db/annotations.dart index b5206be3..5f027694 100644 --- a/pkgs/gcloud/lib/src/db/annotations.dart +++ b/pkgs/gcloud/lib/src/db/annotations.dart @@ -43,16 +43,16 @@ class Kind { /// /// If `name` is omitted, it will default to the name of class to which this /// annotation is attached to. - const Kind({this.name, this.idType: IdType.Integer}); + const Kind({this.name, this.idType = IdType.Integer}); } /// The type used for id's of an entity. class IdType { /// Use integer ids for identifying entities. - static const IdType Integer = const IdType('Integer'); + static const IdType Integer = IdType('Integer'); /// Use string ids for identifying entities. - static const IdType String = const IdType('String'); + static const IdType String = IdType('String'); final core.String _type; @@ -83,14 +83,15 @@ abstract class Property { /// `true`. final bool indexed; - const Property({this.propertyName, this.required: false, this.indexed: true}); + const Property( + {this.propertyName, this.required = false, this.indexed = true}); bool validate(ModelDB db, Object value) { if (required && value == null) return false; return true; } - Object encodeValue(ModelDB db, Object value, {bool forComparison: false}); + Object encodeValue(ModelDB db, Object value, {bool forComparison = false}); Object decodePrimitiveValue(ModelDB db, Object value); } @@ -99,10 +100,10 @@ abstract class Property { /// within a composed `ListProperty`. abstract class PrimitiveProperty extends Property { const PrimitiveProperty( - {String propertyName, bool required: false, bool indexed: true}) + {String propertyName, bool required = false, bool indexed = true}) : super(propertyName: propertyName, required: required, indexed: indexed); - Object encodeValue(ModelDB db, Object value, {bool forComparison: false}) => + Object encodeValue(ModelDB db, Object value, {bool forComparison = false}) => value; Object decodePrimitiveValue(ModelDB db, Object value) => value; @@ -114,7 +115,7 @@ abstract class PrimitiveProperty extends Property { /// datastore and when reading them back. class BoolProperty extends PrimitiveProperty { const BoolProperty( - {String propertyName, bool required: false, bool indexed: true}) + {String propertyName, bool required = false, bool indexed = true}) : super(propertyName: propertyName, required: required, indexed: indexed); bool validate(ModelDB db, Object value) => @@ -127,7 +128,7 @@ class BoolProperty extends PrimitiveProperty { /// datastore and when reading them back. class IntProperty extends PrimitiveProperty { const IntProperty( - {String propertyName, bool required: false, bool indexed: true}) + {String propertyName, bool required = false, bool indexed = true}) : super(propertyName: propertyName, required: required, indexed: indexed); bool validate(ModelDB db, Object value) => @@ -140,7 +141,7 @@ class IntProperty extends PrimitiveProperty { /// datastore and when reading them back. class DoubleProperty extends PrimitiveProperty { const DoubleProperty( - {String propertyName, bool required: false, bool indexed: true}) + {String propertyName, bool required = false, bool indexed = true}) : super(propertyName: propertyName, required: required, indexed: indexed); bool validate(ModelDB db, Object value) => @@ -153,7 +154,7 @@ class DoubleProperty extends PrimitiveProperty { /// datastore and when reading them back. class StringProperty extends PrimitiveProperty { const StringProperty( - {String propertyName, bool required: false, bool indexed: true}) + {String propertyName, bool required = false, bool indexed = true}) : super(propertyName: propertyName, required: required, indexed: indexed); bool validate(ModelDB db, Object value) => @@ -166,13 +167,13 @@ class StringProperty extends PrimitiveProperty { /// datastore and when reading them back. class ModelKeyProperty extends PrimitiveProperty { const ModelKeyProperty( - {String propertyName, bool required: false, bool indexed: true}) + {String propertyName, bool required = false, bool indexed = true}) : super(propertyName: propertyName, required: required, indexed: indexed); bool validate(ModelDB db, Object value) => super.validate(db, value) && (value == null || value is Key); - Object encodeValue(ModelDB db, Object value, {bool forComparison: false}) { + Object encodeValue(ModelDB db, Object value, {bool forComparison = false}) { if (value == null) return null; return db.toDatastoreKey(value as Key); } @@ -189,7 +190,7 @@ class ModelKeyProperty extends PrimitiveProperty { /// datastore and when reading them back. Blob values will be represented by /// List. class BlobProperty extends PrimitiveProperty { - const BlobProperty({String propertyName, bool required: false}) + const BlobProperty({String propertyName, bool required = false}) : super(propertyName: propertyName, required: required, indexed: false); // NOTE: We don't validate that the entries of the list are really integers @@ -199,9 +200,9 @@ class BlobProperty extends PrimitiveProperty { bool validate(ModelDB db, Object value) => super.validate(db, value) && (value == null || value is List); - Object encodeValue(ModelDB db, Object value, {bool forComparison: false}) { + Object encodeValue(ModelDB db, Object value, {bool forComparison = false}) { if (value == null) return null; - return new ds.BlobValue(value as List); + return ds.BlobValue(value as List); } Object decodePrimitiveValue(ModelDB db, Object value) { @@ -217,7 +218,7 @@ class BlobProperty extends PrimitiveProperty { /// datastore and when reading them back. class DateTimeProperty extends PrimitiveProperty { const DateTimeProperty( - {String propertyName, bool required: false, bool indexed: true}) + {String propertyName, bool required = false, bool indexed = true}) : super(propertyName: propertyName, required: required, indexed: indexed); bool validate(ModelDB db, Object value) => @@ -225,8 +226,7 @@ class DateTimeProperty extends PrimitiveProperty { Object decodePrimitiveValue(ModelDB db, Object value) { if (value is int) { - return new DateTime.fromMillisecondsSinceEpoch(value ~/ 1000, - isUtc: true); + return DateTime.fromMillisecondsSinceEpoch(value ~/ 1000, isUtc: true); } return value; } @@ -243,7 +243,7 @@ class ListProperty extends Property { // TODO: We want to support optional list properties as well. // Get rid of "required: true" here. const ListProperty(this.subProperty, - {String propertyName, bool indexed: true}) + {String propertyName, bool indexed = true}) : super(propertyName: propertyName, required: true, indexed: indexed); bool validate(ModelDB db, Object value) { @@ -255,7 +255,7 @@ class ListProperty extends Property { return true; } - Object encodeValue(ModelDB db, Object value, {bool forComparison: false}) { + Object encodeValue(ModelDB db, Object value, {bool forComparison = false}) { if (forComparison) { // If we have comparison of list properties (i.e. repeated property names) // the comparison object must not be a list, but the value itself. @@ -279,7 +279,7 @@ class ListProperty extends Property { if (value == null) return null; List list = value; - if (list.length == 0) return null; + if (list.isEmpty) return null; if (list.length == 1) return subProperty.encodeValue(db, list[0]); return list.map((value) => subProperty.encodeValue(db, value)).toList(); } @@ -295,7 +295,7 @@ class ListProperty extends Property { /// A convenience [Property] for list of strings. class StringListProperty extends ListProperty { - const StringListProperty({String propertyName, bool indexed: true}) + const StringListProperty({String propertyName, bool indexed = true}) : super(const StringProperty(), propertyName: propertyName, indexed: indexed); diff --git a/pkgs/gcloud/lib/src/db/db.dart b/pkgs/gcloud/lib/src/db/db.dart index e466c4a1..6f98ef06 100644 --- a/pkgs/gcloud/lib/src/db/db.dart +++ b/pkgs/gcloud/lib/src/db/db.dart @@ -4,21 +4,17 @@ part of gcloud.db; -/** - * A function definition for transactional functions. - * - * The function will be given a [Transaction] object which can be used to make - * lookups/queries and queue modifications (inserts/updates/deletes). - */ -typedef Future TransactionHandler(Transaction transaction); - -/** - * A datastore transaction. - * - * It can be used for making lookups/queries and queue modifications - * (inserts/updates/deletes). Finally the transaction can be either committed - * or rolled back. - */ +/// A function definition for transactional functions. +/// +/// The function will be given a [Transaction] object which can be used to make +/// lookups/queries and queue modifications (inserts/updates/deletes). +typedef TransactionHandler = Future Function(Transaction transaction); + +/// A datastore transaction. +/// +/// It can be used for making lookups/queries and queue modifications +/// (inserts/updates/deletes). Finally the transaction can be either committed +/// or rolled back. class Transaction { static const int _TRANSACTION_STARTED = 0; static const int _TRANSACTION_ROLLED_BACK = 1; @@ -34,17 +30,13 @@ class Transaction { Transaction(this.db, this._datastoreTransaction); - /** - * Looks up [keys] within this transaction. - */ + /// Looks up [keys] within this transaction. Future> lookup(List keys) { return _lookupHelper(db, keys, datastoreTransaction: _datastoreTransaction); } - /** - * Enqueues [inserts] and [deletes] which should be committed at commit time. - */ + /// Enqueues [inserts] and [deletes] which should be committed at commit time. void queueMutations({List inserts, List deletes}) { _checkSealed(); if (inserts != null) { @@ -55,39 +47,33 @@ class Transaction { } } - /** - * Query for [kind] models with [ancestorKey]. - * - * Note that [ancestorKey] is required, since a transaction is not allowed to - * touch/look at an arbitrary number of rows. - */ + /// Query for [kind] models with [ancestorKey]. + /// + /// Note that [ancestorKey] is required, since a transaction is not allowed to + /// touch/look at an arbitrary number of rows. Query query(Key ancestorKey, {Partition partition}) { // TODO(#25): The `partition` element is redundant and should be removed. if (partition == null) { partition = ancestorKey.partition; } else if (ancestorKey.partition != partition) { - throw new ArgumentError( + throw ArgumentError( 'Ancestor queries must have the same partition in the ancestor key ' 'as the partition where the query executes in.'); } _checkSealed(); - return new Query(db, + return Query(db, partition: partition, ancestorKey: ancestorKey, datastoreTransaction: _datastoreTransaction); } - /** - * Rolls this transaction back. - */ + /// Rolls this transaction back. Future rollback() { _checkSealed(changeState: _TRANSACTION_ROLLED_BACK); return db.datastore.rollback(_datastoreTransaction); } - /** - * Commits this transaction including all of the queued mutations. - */ + /// Commits this transaction including all of the queued mutations. Future commit() { _checkSealed(changeState: _TRANSACTION_COMMITTED); return _commitHelper(db, @@ -98,9 +84,9 @@ class Transaction { _checkSealed({int changeState}) { if (_transactionState == _TRANSACTION_COMMITTED) { - throw new StateError('The transaction has already been committed.'); + throw StateError('The transaction has already been committed.'); } else if (_transactionState == _TRANSACTION_ROLLED_BACK) { - throw new StateError('The transaction has already been rolled back.'); + throw StateError('The transaction has already been rolled back.'); } if (changeState != null) { _transactionState = changeState; @@ -139,24 +125,22 @@ class Query { _ancestorKey = ancestorKey, _transaction = datastoreTransaction; - /** - * Adds a filter to this [Query]. - * - * [filterString] has form "name OP" where 'name' is a fieldName of the - * model and OP is an operator. The following operators are supported: - * - * * '<' (less than) - * * '<=' (less than or equal) - * * '>' (greater than) - * * '>=' (greater than or equal) - * * '=' (equal) - * - * [comparisonObject] is the object for comparison. - */ + /// Adds a filter to this [Query]. + /// + /// [filterString] has form "name OP" where 'name' is a fieldName of the + /// model and OP is an operator. The following operators are supported: + /// + /// * '<' (less than) + /// * '<=' (less than or equal) + /// * '>' (greater than) + /// * '>=' (greater than or equal) + /// * '=' (equal) + /// + /// [comparisonObject] is the object for comparison. void filter(String filterString, Object comparisonObject) { var parts = filterString.split(' '); if (parts.length != 2 || !_relationMapping.containsKey(parts[1])) { - throw new ArgumentError("Invalid filter string '$filterString'."); + throw ArgumentError("Invalid filter string '$filterString'."); } var name = parts[0]; @@ -170,58 +154,50 @@ class Query { comparisonObject = _db.modelDB .toDatastoreValue(_kind, name, comparisonObject, forComparison: true); } - _filters.add(new ds.Filter( + _filters.add(ds.Filter( _relationMapping[comparison], propertyName, comparisonObject)); } - /** - * Adds an order to this [Query]. - * - * [orderString] has the form "-name" where 'name' is a fieldName of the model - * and the optional '-' says whether the order is descending or ascending. - */ + /// Adds an order to this [Query]. + /// + /// [orderString] has the form "-name" where 'name' is a fieldName of the model + /// and the optional '-' says whether the order is descending or ascending. void order(String orderString) { // TODO: validate [orderString] (e.g. is name valid) if (orderString.startsWith('-')) { - _orders.add(new ds.Order(ds.OrderDirection.Decending, + _orders.add(ds.Order(ds.OrderDirection.Decending, _convertToDatastoreName(orderString.substring(1)))); } else { - _orders.add(new ds.Order( + _orders.add(ds.Order( ds.OrderDirection.Ascending, _convertToDatastoreName(orderString))); } } - /** - * Sets the [offset] of this [Query]. - * - * When running this query, [offset] results will be skipped. - */ + /// Sets the [offset] of this [Query]. + /// + /// When running this query, [offset] results will be skipped. void offset(int offset) { _offset = offset; } - /** - * Sets the [limit] of this [Query]. - * - * When running this query, a maximum of [limit] results will be returned. - */ + /// Sets the [limit] of this [Query]. + /// + /// When running this query, a maximum of [limit] results will be returned. void limit(int limit) { _limit = limit; } - /** - * Execute this [Query] on the datastore. - * - * Outside of transactions this method might return stale data or may not - * return the newest updates performed on the datastore since updates - * will be reflected in the indices in an eventual consistent way. - */ + /// Execute this [Query] on the datastore. + /// + /// Outside of transactions this method might return stale data or may not + /// return the newest updates performed on the datastore since updates + /// will be reflected in the indices in an eventual consistent way. Stream run() { ds.Key ancestorKey; if (_ancestorKey != null) { ancestorKey = _db.modelDB.toDatastoreKey(_ancestorKey); } - var query = new ds.Query( + var query = ds.Query( ancestorKey: ancestorKey, kind: _kind, filters: _filters, @@ -231,10 +207,10 @@ class Query { ds.Partition partition; if (_partition != null) { - partition = new ds.Partition(_partition.namespace); + partition = ds.Partition(_partition.namespace); } - return new StreamFromPages((int pageSize) { + return StreamFromPages((int pageSize) { return _db.datastore .query(query, transaction: _transaction, partition: partition); }).stream.map(_db.modelDB.fromDatastoreEntity); @@ -248,7 +224,7 @@ class Query { String _convertToDatastoreName(String name) { var propertyName = _db.modelDB.fieldNameToPropertyName(_kind, name); if (propertyName == null) { - throw new ArgumentError("Field $name is not available for kind $_kind"); + throw ArgumentError("Field $name is not available for kind $_kind"); } return propertyName; } @@ -260,55 +236,43 @@ class DatastoreDB { Partition _defaultPartition; DatastoreDB(this.datastore, {ModelDB modelDB, Partition defaultPartition}) - : _modelDB = modelDB != null ? modelDB : new ModelDBImpl() { + : _modelDB = modelDB != null ? modelDB : ModelDBImpl() { _defaultPartition = - defaultPartition != null ? defaultPartition : new Partition(null); + defaultPartition != null ? defaultPartition : Partition(null); } - /** - * The [ModelDB] used to serialize/deserialize objects. - */ + /// The [ModelDB] used to serialize/deserialize objects. ModelDB get modelDB => _modelDB; - /** - * Gets the empty key using the default [Partition]. - * - * Model keys with parent set to [emptyKey] will create their own entity - * groups. - */ + /// Gets the empty key using the default [Partition]. + /// + /// Model keys with parent set to [emptyKey] will create their own entity + /// groups. Key get emptyKey => defaultPartition.emptyKey; - /** - * Gets the default [Partition]. - */ + /// Gets the default [Partition]. Partition get defaultPartition => _defaultPartition; - /** - * Creates a new [Partition] with namespace [namespace]. - */ + /// Creates a new [Partition] with namespace [namespace]. Partition newPartition(String namespace) { - return new Partition(namespace); + return Partition(namespace); } - /** - * Begins a new a new transaction. - * - * A transaction can touch only a limited number of entity groups. This limit - * is currently 5. - */ + /// Begins a new a new transaction. + /// + /// A transaction can touch only a limited number of entity groups. This limit + /// is currently 5. // TODO: Add retries and/or auto commit/rollback. Future withTransaction(TransactionHandler transactionHandler) { return datastore .beginTransaction(crossEntityGroup: true) .then((datastoreTransaction) { - var transaction = new Transaction(this, datastoreTransaction); + var transaction = Transaction(this, datastoreTransaction); return transactionHandler(transaction); }); } - /** - * Build a query for [kind] models. - */ + /// Build a query for [kind] models. Query query({Partition partition, Key ancestorKey}) { // TODO(#26): There is only one case where `partition` is not redundant // Namely if `ancestorKey == null` and `partition != null`. We could @@ -321,33 +285,29 @@ class DatastoreDB { partition = defaultPartition; } } else if (ancestorKey != null && partition != ancestorKey.partition) { - throw new ArgumentError( + throw ArgumentError( 'Ancestor queries must have the same partition in the ancestor key ' 'as the partition where the query executes in.'); } - return new Query(this, partition: partition, ancestorKey: ancestorKey); + return Query(this, partition: partition, ancestorKey: ancestorKey); } - /** - * Looks up [keys] in the datastore and returns a list of [Model] objects. - * - * For transactions, please use [beginTransaction] and call the [lookup] - * method on it's returned [Transaction] object. - */ + /// Looks up [keys] in the datastore and returns a list of [Model] objects. + /// + /// For transactions, please use [beginTransaction] and call the [lookup] + /// method on it's returned [Transaction] object. Future> lookup(List keys) { return _lookupHelper(this, keys); } - /** - * Add [inserts] to the datastore and remove [deletes] from it. - * - * The order of inserts and deletes is not specified. When the commit is done - * direct lookups will see the effect but non-ancestor queries will see the - * change in an eventual consistent way. - * - * For transactions, please use `beginTransaction` and it's returned - * [Transaction] object. - */ + /// Add [inserts] to the datastore and remove [deletes] from it. + /// + /// The order of inserts and deletes is not specified. When the commit is done + /// direct lookups will see the effect but non-ancestor queries will see the + /// change in an eventual consistent way. + /// + /// For transactions, please use `beginTransaction` and it's returned + /// [Transaction] object. Future commit({List inserts, List deletes}) { return _commitHelper(this, inserts: inserts, deletes: deletes); } @@ -390,7 +350,7 @@ Future _commitHelper(DatastoreDB db, deletes: entityDeletes, transaction: datastoreTransaction) .then((ds.CommitResult result) { - if (entityAutoIdInserts != null && entityAutoIdInserts.length > 0) { + if (entityAutoIdInserts != null && entityAutoIdInserts.isNotEmpty) { for (var i = 0; i < result.autoIdInsertKeys.length; i++) { var key = db.modelDB.fromDatastoreKey(result.autoIdInsertKeys[i]); autoIdModelInserts[i].parentKey = key.parent; diff --git a/pkgs/gcloud/lib/src/db/model_db.dart b/pkgs/gcloud/lib/src/db/model_db.dart index 07cd077a..4d7c44ee 100644 --- a/pkgs/gcloud/lib/src/db/model_db.dart +++ b/pkgs/gcloud/lib/src/db/model_db.dart @@ -4,46 +4,30 @@ part of gcloud.db; -/** - * A database of all registered models. - * - * Responsible for converting between dart model objects and datastore entities. - */ +/// A database of all registered models. +/// +/// Responsible for converting between dart model objects and datastore entities. abstract class ModelDB { - /** - * Converts a [ds.Key] to a [Key]. - */ + /// Converts a [ds.Key] to a [Key]. Key fromDatastoreKey(ds.Key datastoreKey); - /** - * Converts a [Key] to a [ds.Key]. - */ + /// Converts a [Key] to a [ds.Key]. ds.Key toDatastoreKey(Key dbKey); - /** - * Converts a [Model] instance to a [ds.Entity]. - */ + /// Converts a [Model] instance to a [ds.Entity]. ds.Entity toDatastoreEntity(Model model); - /** - * Converts a [ds.Entity] to a [Model] instance. - */ + /// Converts a [ds.Entity] to a [Model] instance. T fromDatastoreEntity(ds.Entity entity); - /** - * Returns the kind name for instances of [type]. - */ + /// Returns the kind name for instances of [type]. String kindName(Type type); - /** - * Returns the property name used for [fieldName] - */ + /// Returns the property name used for [fieldName] // TODO: Get rid of this eventually. String fieldNameToPropertyName(String kind, String fieldName); - /** - * Converts [value] according to the [Property] named [fieldName] in [kind]. - */ + /// Converts [value] according to the [Property] named [fieldName] in [kind]. Object toDatastoreValue(String kind, String fieldName, Object value, - {bool forComparison: false}); + {bool forComparison = false}); } diff --git a/pkgs/gcloud/lib/src/db/model_db_impl.dart b/pkgs/gcloud/lib/src/db/model_db_impl.dart index b32a084e..efccc235 100644 --- a/pkgs/gcloud/lib/src/db/model_db_impl.dart +++ b/pkgs/gcloud/lib/src/db/model_db_impl.dart @@ -58,12 +58,12 @@ class ModelDBImpl implements ModelDB { /// Converts a [ds.Key] to a [Key]. Key fromDatastoreKey(ds.Key datastoreKey) { - var namespace = new Partition(datastoreKey.partition.namespace); + var namespace = Partition(datastoreKey.partition.namespace); Key key = namespace.emptyKey; for (var element in datastoreKey.elements) { var type = _type2ModelDesc[_kind2ModelDesc[element.kind]]; if (type == null) { - throw new StateError( + throw StateError( 'Could not find a model associated with kind "${element.kind}". ' 'Please ensure a model class was annotated with ' '`@Kind(name: "${element.kind}")`.'); @@ -86,20 +86,20 @@ class ModelDBImpl implements ModelDB { bool useIntegerId = modelDescription.useIntegerId; if (useIntegerId && id != null && id is! int) { - throw new ArgumentError('Expected an integer id property but ' + throw ArgumentError('Expected an integer id property but ' 'id was of type ${id.runtimeType}'); } if (!useIntegerId && (id != null && id is! String)) { - throw new ArgumentError('Expected a string id property but ' + throw ArgumentError('Expected a string id property but ' 'id was of type ${id.runtimeType}'); } - elements.add(new ds.KeyElement(kind, id)); + elements.add(ds.KeyElement(kind, id)); currentKey = currentKey.parent; } Partition partition = currentKey._parent; - return new ds.Key(elements.reversed.toList(), - partition: new ds.Partition(partition.namespace)); + return ds.Key(elements.reversed.toList(), + partition: ds.Partition(partition.namespace)); } /// Converts a [Model] instance to a [ds.Entity]. @@ -108,7 +108,7 @@ class ModelDBImpl implements ModelDB { var modelDescription = _modelDescriptionForType(model.runtimeType); return modelDescription.encodeModel(this, model); } catch (error, stack) { - throw new ArgumentError('Error while encoding entity ($error, $stack).'); + throw ArgumentError('Error while encoding entity ($error, $stack).'); } } @@ -120,14 +120,14 @@ class ModelDBImpl implements ModelDB { var kind = entity.key.elements.last.kind; var modelDescription = _kind2ModelDesc[kind]; if (modelDescription == null) { - throw new StateError('Trying to deserialize entity of kind ' + throw StateError('Trying to deserialize entity of kind ' '$kind, but no Model class available for it.'); } try { return modelDescription.decodeEntity(this, key, entity); } catch (error, stack) { - throw new StateError('Error while decoding entity ($error, $stack).'); + throw StateError('Error while decoding entity ($error, $stack).'); } } @@ -137,8 +137,7 @@ class ModelDBImpl implements ModelDB { String kindName(Type type) { var kind = _modelDesc2Type[type]?.kind; if (kind == null) { - throw new ArgumentError( - 'The class $type was not associated with a kind.'); + throw ArgumentError('The class $type was not associated with a kind.'); } return kind; } @@ -148,17 +147,17 @@ class ModelDBImpl implements ModelDB { String fieldNameToPropertyName(String kind, String fieldName) { var modelDescription = _kind2ModelDesc[kind]; if (modelDescription == null) { - throw new ArgumentError('The kind "$kind" is unknown.'); + throw ArgumentError('The kind "$kind" is unknown.'); } return modelDescription.fieldNameToPropertyName(fieldName); } /// Converts [value] according to the [Property] named [name] in [type]. Object toDatastoreValue(String kind, String fieldName, Object value, - {bool forComparison: false}) { + {bool forComparison = false}) { var modelDescription = _kind2ModelDesc[kind]; if (modelDescription == null) { - throw new ArgumentError('The kind "$kind" is unknown.'); + throw ArgumentError('The kind "$kind" is unknown.'); } return modelDescription.encodeField(this, fieldName, value, forComparison: forComparison); @@ -202,7 +201,7 @@ class ModelDBImpl implements ModelDB { for (var modelDescription in _modelDescriptions) { var kindName = modelDescription.kindName(this); if (_kind2ModelDesc.containsKey(kindName)) { - throw new StateError('Cannot have two ModelDescriptions ' + throw StateError('Cannot have two ModelDescriptions ' 'with the same kind ($kindName)'); } _kind2ModelDesc[kindName] = modelDescription; @@ -214,7 +213,7 @@ class ModelDBImpl implements ModelDB { for (mirrors.InstanceMirror instance in classMirror.metadata) { if (instance.reflectee.runtimeType == Kind) { if (kindAnnotation != null) { - throw new StateError( + throw StateError( 'Cannot have more than one ModelMetadata() annotation ' 'on a Model class'); } @@ -245,9 +244,9 @@ class ModelDBImpl implements ModelDB { _ModelDescription modelDesc; if (_isExpandoClass(modelClass)) { - modelDesc = new _ExpandoModelDescription(name, useIntegerId); + modelDesc = _ExpandoModelDescription(name, useIntegerId); } else { - modelDesc = new _ModelDescription(name, useIntegerId); + modelDesc = _ModelDescription(name, useIntegerId); } _type2ModelDesc[modelDesc] = modelClass.reflectedType; @@ -262,23 +261,22 @@ class ModelDBImpl implements ModelDB { if (declaration is mirrors.MethodMirror) { if (declaration.isConstructor && declaration.constructorName == const Symbol('') && - declaration.parameters.length == 0) { + declaration.parameters.isEmpty) { defaultConstructorFound = true; break; } } } if (!defaultConstructorFound) { - throw new StateError( - 'Class ${modelClass.simpleName} does not have a default ' + throw StateError('Class ${modelClass.simpleName} does not have a default ' 'constructor.'); } } Map _propertiesFromModelDescription( mirrors.ClassMirror modelClassMirror) { - var properties = new Map(); - var propertyNames = new Set(); + var properties = Map(); + var propertyNames = Set(); // Loop over all classes in the inheritance path up to the Object class. while (modelClassMirror.superclass != null) { @@ -296,7 +294,7 @@ class ModelDBImpl implements ModelDB { .toList(); if (propertyAnnotations.length > 1) { - throw new StateError( + throw StateError( 'Cannot have more than one Property annotation on a model ' 'field.'); } else if (propertyAnnotations.length == 1) { @@ -310,13 +308,13 @@ class ModelDBImpl implements ModelDB { if (propertyName == null) propertyName = fieldName; if (properties.containsKey(fieldName)) { - throw new StateError( + throw StateError( 'Cannot have two Property objects describing the same field ' 'in a model object class hierarchy.'); } if (propertyNames.contains(propertyName)) { - throw new StateError( + throw StateError( 'Cannot have two Property objects mapping to the same ' 'datastore property name "$propertyName".'); } @@ -340,17 +338,15 @@ class ModelDBImpl implements ModelDB { } modelClass = modelClass.superclass; } - throw new StateError('This should be unreachable.'); + throw StateError('This should be unreachable.'); } } class _ModelDescription { - final HashMap _property2FieldName = - new HashMap(); - final HashMap _field2PropertyName = - new HashMap(); - final Set _indexedProperties = new Set(); - final Set _unIndexedProperties = new Set(); + final HashMap _property2FieldName = HashMap(); + final HashMap _field2PropertyName = HashMap(); + final Set _indexedProperties = Set(); + final Set _unIndexedProperties = Set(); final String kind; final bool useIntegerId; @@ -394,7 +390,7 @@ class _ModelDescription { _encodeProperty(db, model, mirror, properties, fieldName, prop); }); - return new ds.Entity(key, properties, + return ds.Entity(key, properties, unIndexedProperties: _unIndexedProperties); } @@ -406,7 +402,7 @@ class _ModelDescription { var value = mirror.getField(mirrors.MirrorSystem.getSymbol(fieldName)).reflectee; if (!prop.validate(db, value)) { - throw new StateError('Property validation failed for ' + throw StateError('Property validation failed for ' 'property $fieldName while trying to serialize entity of kind ' '${model.runtimeType}. '); } @@ -438,7 +434,7 @@ class _ModelDescription { var value = prop.decodePrimitiveValue(db, rawValue); if (!prop.validate(db, value)) { - throw new StateError('Property validation failed while ' + throw StateError('Property validation failed while ' 'trying to deserialize entity of kind ' '${entity.key.elements.last.kind} (property name: $propertyName)'); } @@ -455,13 +451,13 @@ class _ModelDescription { } Object encodeField(ModelDBImpl db, String fieldName, Object value, - {bool enforceFieldExists: true, bool forComparison: false}) { + {bool enforceFieldExists = true, bool forComparison = false}) { Property property = db._propertiesForModel(this)[fieldName]; if (property != null) { return property.encodeValue(db, value, forComparison: forComparison); } if (enforceFieldExists) { - throw new ArgumentError( + throw ArgumentError( 'A field named "$fieldName" does not exist in kind "$kind".'); } return null; @@ -487,9 +483,9 @@ class _ExpandoModelDescription extends _ModelDescription { void initialize(ModelDBImpl db) { super.initialize(db); - realFieldNames = new Set.from(_field2PropertyName.keys); - realPropertyNames = new Set.from(_property2FieldName.keys); - usedNames = new Set()..addAll(realFieldNames)..addAll(realPropertyNames); + realFieldNames = Set.from(_field2PropertyName.keys); + realPropertyNames = Set.from(_property2FieldName.keys); + usedNames = Set()..addAll(realFieldNames)..addAll(realPropertyNames); } ds.Entity encodeModel(ModelDBImpl db, ExpandoModel model) { @@ -535,7 +531,7 @@ class _ExpandoModelDescription extends _ModelDescription { } Object encodeField(ModelDBImpl db, String fieldName, Object value, - {bool enforceFieldExists: true, bool forComparison: false}) { + {bool enforceFieldExists = true, bool forComparison = false}) { // The [enforceFieldExists] argument is intentionally ignored. Object primitiveValue = super.encodeField(db, fieldName, value, diff --git a/pkgs/gcloud/lib/src/db/models.dart b/pkgs/gcloud/lib/src/db/models.dart index b0278ef1..95f645a9 100644 --- a/pkgs/gcloud/lib/src/db/models.dart +++ b/pkgs/gcloud/lib/src/db/models.dart @@ -4,12 +4,10 @@ part of gcloud.db; -/** - * Represents a unique identifier for a [Model] stored in a datastore. - * - * The [Key] can be incomplete if it's id is `null`. In this case the id will - * be automatically allocated and set at commit time. - */ +/// Represents a unique identifier for a [Model] stored in a datastore. +/// +/// The [Key] can be incomplete if it's id is `null`. In this case the id will +/// be automatically allocated and set at commit time. class Key { // Either KeyImpl or PartitionImpl final Object _parent; @@ -19,11 +17,10 @@ class Key { Key(Key parent, this.type, this.id) : _parent = parent { if (type == null) { - throw new ArgumentError('The type argument must not be null.'); + throw ArgumentError('The type argument must not be null.'); } if (id != null && id is! String && id is! int) { - throw new ArgumentError( - 'The id argument must be an integer or a String.'); + throw ArgumentError('The id argument must be an integer or a String.'); } } @@ -32,9 +29,7 @@ class Key { type = null, id = null; - /** - * Parent of this [Key]. - */ + /// Parent of this [Key]. Key get parent { if (_parent is Key) { return _parent as Key; @@ -42,9 +37,7 @@ class Key { return null; } - /** - * The partition of this [Key]. - */ + /// The partition of this [Key]. Partition get partition { var obj = _parent; while (obj is! Partition) { @@ -54,7 +47,7 @@ class Key { } Key append(Type modelType, {Object id}) { - return new Key(this, modelType, id); + return Key(this, modelType, id); } bool get isEmpty => _parent is Partition; @@ -69,28 +62,24 @@ class Key { int get hashCode => _parent.hashCode ^ type.hashCode ^ id.hashCode; } -/** - * Represents a datastore partition. - * - * A datastore is partitioned into namespaces. The default namespace is - * `null`. - */ +/// Represents a datastore partition. +/// +/// A datastore is partitioned into namespaces. The default namespace is +/// `null`. class Partition { final String namespace; Partition(this.namespace) { if (namespace == '') { - throw new ArgumentError('The namespace must not be an empty string'); + throw ArgumentError('The namespace must not be an empty string'); } } - /** - * Returns an empty [Key]. - * - * Entities where the parent [Key] is empty will create their own entity - * group. - */ - Key get emptyKey => new Key.emptyKey(this); + /// Returns an empty [Key]. + /// + /// Entities where the parent [Key] is empty will create their own entity + /// group. + Key get emptyKey => Key.emptyKey(this); operator ==(Object other) { return other is Partition && namespace == other.namespace; @@ -99,12 +88,10 @@ class Partition { int get hashCode => namespace.hashCode; } -/** - * Superclass for all model classes. - * - * Every model class has a [id] -- which must be an integer or a string, and - * a [parentKey]. The [key] getter is returning the key for the model object. - */ +/// Superclass for all model classes. +/// +/// Every model class has a [id] -- which must be an integer or a string, and +/// a [parentKey]. The [key] getter is returning the key for the model object. abstract class Model { Object id; Key parentKey; @@ -112,13 +99,11 @@ abstract class Model { Key get key => parentKey.append(this.runtimeType, id: id); } -/** - * Superclass for all expanded model classes. - * - * The [ExpandoModel] class adds support for having dynamic properties. You can - * set arbitrary fields on these models. The expanded values must be values - * accepted by the [RawDatastore] implementation. - */ +/// Superclass for all expanded model classes. +/// +/// The [ExpandoModel] class adds support for having dynamic properties. You can +/// set arbitrary fields on these models. The expanded values must be values +/// accepted by the [RawDatastore] implementation. abstract class ExpandoModel extends Model { final Map additionalProperties = {}; @@ -132,7 +117,7 @@ abstract class ExpandoModel extends Model { additionalProperties[name] = value; return value; } else { - throw new ArgumentError('Unsupported noSuchMethod call on ExpandoModel'); + throw ArgumentError('Unsupported noSuchMethod call on ExpandoModel'); } } } diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart index e069a925..d103ce7c 100644 --- a/pkgs/gcloud/lib/src/pubsub_impl.dart +++ b/pkgs/gcloud/lib/src/pubsub_impl.dart @@ -11,12 +11,12 @@ class _PubSubImpl implements PubSub { final String _subscriptionPrefix; _PubSubImpl(http.Client client, this.project) - : _api = new pubsub.PubsubApi(client), + : _api = pubsub.PubsubApi(client), _topicPrefix = 'projects/$project/topics/', _subscriptionPrefix = 'projects/$project/subscriptions/'; _PubSubImpl.rootUrl(http.Client client, this.project, String rootUrl) - : _api = new pubsub.PubsubApi(client, rootUrl: rootUrl), + : _api = pubsub.PubsubApi(client, rootUrl: rootUrl), _topicPrefix = 'projects/$project/topics/', _subscriptionPrefix = 'projects/$project/subscriptions/'; @@ -49,10 +49,9 @@ class _PubSubImpl implements PubSub { Future _createSubscription( String name, String topic, Uri endpoint) { - var subscription = new pubsub.Subscription()..topic = topic; + var subscription = pubsub.Subscription()..topic = topic; if (endpoint != null) { - var pushConfig = new pubsub.PushConfig() - ..pushEndpoint = endpoint.toString(); + var pushConfig = pubsub.PushConfig()..pushEndpoint = endpoint.toString(); subscription.pushConfig = pushConfig; } return _api.projects.subscriptions.create(subscription, name); @@ -76,17 +75,17 @@ class _PubSubImpl implements PubSub { } Future _modifyPushConfig(String subscription, Uri endpoint) { - var pushConfig = new pubsub.PushConfig() + var pushConfig = pubsub.PushConfig() ..pushEndpoint = endpoint != null ? endpoint.toString() : null; - var request = new pubsub.ModifyPushConfigRequest()..pushConfig = pushConfig; + var request = pubsub.ModifyPushConfigRequest()..pushConfig = pushConfig; return _api.projects.subscriptions.modifyPushConfig(request, subscription); } Future _publish( String topic, List message, Map attributes) { - var request = new pubsub.PublishRequest() + var request = pubsub.PublishRequest() ..messages = [ - (new pubsub.PubsubMessage() + (pubsub.PubsubMessage() ..dataAsBytes = message ..attributes = attributes) ]; @@ -96,14 +95,14 @@ class _PubSubImpl implements PubSub { Future _pull( String subscription, bool returnImmediately) { - var request = new pubsub.PullRequest() + var request = pubsub.PullRequest() ..maxMessages = 1 ..returnImmediately = returnImmediately; return _api.projects.subscriptions.pull(request, subscription); } Future _ack(String ackId, String subscription) { - var request = new pubsub.AcknowledgeRequest()..ackIds = [ackId]; + var request = pubsub.AcknowledgeRequest()..ackIds = [ackId]; // The Pub/Sub acknowledge API returns an instance of Empty. return _api.projects.subscriptions .acknowledge(request, subscription) @@ -112,24 +111,24 @@ class _PubSubImpl implements PubSub { void _checkTopicName(String name) { if (name.startsWith('projects/') && !name.contains('/topics/')) { - throw new ArgumentError( + throw ArgumentError( "Illegal topic name. Absolute topic names must have the form " "'projects/[project-id]/topics/[topic-name]"); } if (name.endsWith('/topics/')) { - throw new ArgumentError( + throw ArgumentError( 'Illegal topic name. Relative part of the name cannot be empty'); } } void _checkSubscriptionName(String name) { if (name.startsWith('projects/') && !name.contains('/subscriptions/')) { - throw new ArgumentError( + throw ArgumentError( "Illegal subscription name. Absolute subscription names must have " "the form 'projects/[project-id]/subscriptions/[subscription-name]"); } if (name.endsWith('/subscriptions/')) { - throw new ArgumentError( + throw ArgumentError( 'Illegal subscription name. Relative part of the name cannot be ' 'empty'); } @@ -138,7 +137,7 @@ class _PubSubImpl implements PubSub { Future createTopic(String name) { _checkTopicName(name); return _createTopic(_fullTopicName(name)) - .then((top) => new _TopicImpl(this, top)); + .then((top) => _TopicImpl(this, top)); } Future deleteTopic(String name) { @@ -148,22 +147,21 @@ class _PubSubImpl implements PubSub { Future lookupTopic(String name) { _checkTopicName(name); - return _getTopic(_fullTopicName(name)) - .then((top) => new _TopicImpl(this, top)); + return _getTopic(_fullTopicName(name)).then((top) => _TopicImpl(this, top)); } Stream listTopics() { Future> firstPage(int pageSize) { return _listTopics(pageSize, null) - .then((response) => new _TopicPageImpl(this, pageSize, response)); + .then((response) => _TopicPageImpl(this, pageSize, response)); } - return new StreamFromPages(firstPage).stream; + return StreamFromPages(firstPage).stream; } - Future> pageTopics({int pageSize: 50}) { + Future> pageTopics({int pageSize = 50}) { return _listTopics(pageSize, null).then((response) { - return new _TopicPageImpl(this, pageSize, response); + return _TopicPageImpl(this, pageSize, response); }); } @@ -173,7 +171,7 @@ class _PubSubImpl implements PubSub { _checkTopicName(topic); return _createSubscription( _fullSubscriptionName(name), _fullTopicName(topic), endpoint) - .then((sub) => new _SubscriptionImpl(this, sub)); + .then((sub) => _SubscriptionImpl(this, sub)); } Future deleteSubscription(String name) { @@ -184,22 +182,22 @@ class _PubSubImpl implements PubSub { Future lookupSubscription(String name) { _checkSubscriptionName(name); return _getSubscription(_fullSubscriptionName(name)) - .then((sub) => new _SubscriptionImpl(this, sub)); + .then((sub) => _SubscriptionImpl(this, sub)); } Stream listSubscriptions([String query]) { Future> firstPage(int pageSize) { - return _listSubscriptions(query, pageSize, null).then((response) => - new _SubscriptionPageImpl(this, query, pageSize, response)); + return _listSubscriptions(query, pageSize, null).then( + (response) => _SubscriptionPageImpl(this, query, pageSize, response)); } - return new StreamFromPages(firstPage).stream; + return StreamFromPages(firstPage).stream; } Future> pageSubscriptions( - {String topic, int pageSize: 50}) { + {String topic, int pageSize = 50}) { return _listSubscriptions(topic, pageSize, null).then((response) { - return new _SubscriptionPageImpl(this, topic, pageSize, response); + return _SubscriptionPageImpl(this, topic, pageSize, response); }); } } @@ -290,7 +288,7 @@ class _PullEventImpl implements PullEvent { _PullEventImpl( this._api, this._subscriptionName, pubsub.PullResponse response) : this._response = response, - message = new _PullMessage(response.receivedMessages[0].message); + message = _PullMessage(response.receivedMessages[0].message); Future acknowledge() { return _api._ack(_response.receivedMessages[0].ackId, _subscriptionName); @@ -314,7 +312,7 @@ class _PushEventImpl implements PushEvent { factory _PushEventImpl.fromJson(String json) { Map body = jsonDecode(json); String data = body['message']['data']; - Map labels = new HashMap(); + Map labels = HashMap(); body['message']['labels'].forEach((label) { String key = label['key']; var value = label['strValue']; @@ -327,7 +325,7 @@ class _PushEventImpl implements PushEvent { if (!subscription.startsWith(PREFIX)) { subscription = PREFIX + subscription; } - return new _PushEventImpl(new _PushMessage(data, labels), subscription); + return _PushEventImpl(_PushMessage(data, labels), subscription); } } @@ -383,22 +381,22 @@ class _SubscriptionImpl implements Subscription { String get absoluteName => _subscription.name; Topic get topic { - var topic = new pubsub.Topic()..name = _subscription.topic; - return new _TopicImpl(_api, topic); + var topic = pubsub.Topic()..name = _subscription.topic; + return _TopicImpl(_api, topic); } Future delete() => _api._deleteSubscription(_subscription.name); - Future pull({bool wait: true}) { + Future pull({bool wait = true}) { return _api._pull(_subscription.name, !wait).then((response) { // The documentation says 'Returns an empty list if there are no // messages available in the backlog'. However the receivedMessages // property can also be null in that case. if (response.receivedMessages == null || - response.receivedMessages.length == 0) { + response.receivedMessages.isEmpty) { return null; } - return new _PullEventImpl(_api, _subscription.name, response); + return _PullEventImpl(_api, _subscription.name, response); }).catchError((e) => null, test: (e) => e is pubsub.DetailedApiRequestError && e.status == 400); } @@ -421,11 +419,11 @@ class _TopicPageImpl implements Page { final List items; _TopicPageImpl(this._api, this._pageSize, pubsub.ListTopicsResponse response) - : items = new List(response.topics != null ? response.topics.length : 0), + : items = List(response.topics != null ? response.topics.length : 0), _nextPageToken = response.nextPageToken { if (response.topics != null) { for (int i = 0; i < response.topics.length; i++) { - items[i] = new _TopicImpl(_api, response.topics[i]); + items[i] = _TopicImpl(_api, response.topics[i]); } } } @@ -433,11 +431,11 @@ class _TopicPageImpl implements Page { bool get isLast => _nextPageToken == null; Future> next({int pageSize}) { - if (isLast) return new Future.value(null); + if (isLast) return Future.value(null); if (pageSize == null) pageSize = this._pageSize; return _api._listTopics(pageSize, _nextPageToken).then((response) { - return new _TopicPageImpl(_api, pageSize, response); + return _TopicPageImpl(_api, pageSize, response); }); } } @@ -451,12 +449,12 @@ class _SubscriptionPageImpl implements Page { _SubscriptionPageImpl(this._api, this._topic, this._pageSize, pubsub.ListSubscriptionsResponse response) - : items = new List( + : items = List( response.subscriptions != null ? response.subscriptions.length : 0), _nextPageToken = response.nextPageToken { if (response.subscriptions != null) { for (int i = 0; i < response.subscriptions.length; i++) { - items[i] = new _SubscriptionImpl(_api, response.subscriptions[i]); + items[i] = _SubscriptionImpl(_api, response.subscriptions[i]); } } } @@ -464,13 +462,13 @@ class _SubscriptionPageImpl implements Page { bool get isLast => _nextPageToken == null; Future> next({int pageSize}) { - if (_nextPageToken == null) return new Future.value(null); + if (_nextPageToken == null) return Future.value(null); if (pageSize == null) pageSize = this._pageSize; return _api ._listSubscriptions(_topic, pageSize, _nextPageToken) .then((response) { - return new _SubscriptionPageImpl(_api, _topic, pageSize, response); + return _SubscriptionPageImpl(_api, _topic, pageSize, response); }); } } diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index a190d044..8daef73d 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -15,16 +15,16 @@ class _AbsoluteName { _AbsoluteName.parse(String absoluteName) { if (!absoluteName.startsWith(_ABSOLUTE_PREFIX)) { - throw new FormatException("Absolute name '$absoluteName' does not start " + throw FormatException("Absolute name '$absoluteName' does not start " "with '$_ABSOLUTE_PREFIX'"); } int index = absoluteName.indexOf('/', _ABSOLUTE_PREFIX.length); if (index == -1 || index == _ABSOLUTE_PREFIX.length) { - throw new FormatException("Absolute name '$absoluteName' does not have " + throw FormatException("Absolute name '$absoluteName' does not have " "a bucket name"); } if (index == absoluteName.length - 1) { - throw new FormatException("Absolute name '$absoluteName' does not have " + throw FormatException("Absolute name '$absoluteName' does not have " "an object name"); } bucketName = absoluteName.substring(_ABSOLUTE_PREFIX.length, index); @@ -38,11 +38,11 @@ class _StorageImpl implements Storage { final storage_api.StorageApi _api; _StorageImpl(http.Client client, this.project) - : _api = new storage_api.StorageApi(client); + : _api = storage_api.StorageApi(client); Future createBucket(String bucketName, {PredefinedAcl predefinedAcl, Acl acl}) { - var bucket = new storage_api.Bucket()..name = bucketName; + var bucket = storage_api.Bucket()..name = bucketName; var predefinedName = predefinedAcl != null ? predefinedAcl._name : null; if (acl != null) { bucket.acl = acl._toBucketAccessControlList(); @@ -58,7 +58,7 @@ class _StorageImpl implements Storage { Bucket bucket(String bucketName, {PredefinedAcl defaultPredefinedObjectAcl, Acl defaultObjectAcl}) { - return new _BucketImpl( + return _BucketImpl( this, bucketName, defaultPredefinedObjectAcl, defaultObjectAcl); } @@ -76,27 +76,27 @@ class _StorageImpl implements Storage { Future bucketInfo(String bucketName) { return _api.buckets .get(bucketName, projection: 'full') - .then((bucket) => new _BucketInfoImpl(bucket)); + .then((bucket) => _BucketInfoImpl(bucket)); } Stream listBucketNames() { Future<_BucketPageImpl> firstPage(int pageSize) { return _listBuckets(pageSize, null) - .then((response) => new _BucketPageImpl(this, pageSize, response)); + .then((response) => _BucketPageImpl(this, pageSize, response)); } - return new StreamFromPages(firstPage).stream; + return StreamFromPages(firstPage).stream; } - Future> pageBucketNames({int pageSize: 50}) { + Future> pageBucketNames({int pageSize = 50}) { return _listBuckets(pageSize, null).then((response) { - return new _BucketPageImpl(this, pageSize, response); + return _BucketPageImpl(this, pageSize, response); }); } Future copyObject(String src, String dest) { - var srcName = new _AbsoluteName.parse(src); - var destName = new _AbsoluteName.parse(dest); + var srcName = _AbsoluteName.parse(src); + var destName = _AbsoluteName.parse(dest); return _api.objects .copy(null, srcName.bucketName, srcName.objectName, destName.bucketName, destName.objectName) @@ -122,7 +122,7 @@ class _BucketInfoImpl implements BucketInfo { String get id => _bucket.id; - Acl get acl => new Acl._fromBucketAcl(_bucket); + Acl get acl => Acl._fromBucketAcl(_bucket); } /// Bucket API implementation providing access to objects. @@ -148,7 +148,7 @@ class _BucketImpl implements Bucket { String contentType}) { storage_api.Object object; if (metadata == null) { - metadata = new _ObjectMetadata(acl: acl, contentType: contentType); + metadata = _ObjectMetadata(acl: acl, contentType: contentType); } else { if (acl != null) { metadata = metadata.replace(acl: acl); @@ -176,7 +176,7 @@ class _BucketImpl implements Bucket { // Fill properties not passed in metadata. object.name = objectName; - var sink = new _MediaUploadStreamSink( + var sink = _MediaUploadStreamSink( _api, bucketName, objectName, object, predefinedName, length); return sink; } @@ -202,22 +202,21 @@ class _BucketImpl implements Bucket { } if (offset != 0 && length == null) { - throw new ArgumentError( - 'length must have a value if offset is non-zero.'); + throw ArgumentError('length must have a value if offset is non-zero.'); } var options = storage_api.DownloadOptions.FullMedia; if (length != null) { if (length <= 0) { - throw new ArgumentError.value( + throw ArgumentError.value( length, 'length', 'If provided, length must greater than zero.'); } // For ByteRange, end is *inclusive*. var end = offset + length - 1; - var range = new storage_api.ByteRange(offset, end); + var range = storage_api.ByteRange(offset, end); assert(range.length == length); - options = new storage_api.PartialDownloadOptions(range); + options = storage_api.PartialDownloadOptions(range); } commons.Media media = await _api.objects @@ -229,7 +228,7 @@ class _BucketImpl implements Bucket { Future info(String objectName) { return _api.objects .get(bucketName, objectName, projection: 'full') - .then((object) => new _ObjectInfoImpl(object as storage_api.Object)); + .then((object) => _ObjectInfoImpl(object as storage_api.Object)); } Future delete(String objectName) { @@ -239,18 +238,18 @@ class _BucketImpl implements Bucket { Stream list({String prefix}) { Future<_ObjectPageImpl> firstPage(int pageSize) { return _listObjects(bucketName, prefix, _DIRECTORY_DELIMITER, 50, null) - .then((response) => - new _ObjectPageImpl(this, prefix, pageSize, response)); + .then( + (response) => _ObjectPageImpl(this, prefix, pageSize, response)); } - return new StreamFromPages(firstPage).stream; + return StreamFromPages(firstPage).stream; } - Future> page({String prefix, int pageSize: 50}) { + Future> page({String prefix, int pageSize = 50}) { return _listObjects( bucketName, prefix, _DIRECTORY_DELIMITER, pageSize, null) .then((response) { - return new _ObjectPageImpl(this, prefix, pageSize, response); + return _ObjectPageImpl(this, prefix, pageSize, response); }); } @@ -259,10 +258,10 @@ class _BucketImpl implements Bucket { _ObjectMetadata md = metadata; var object = md._object; if (md._object.acl == null && _defaultObjectAcl == null) { - throw new ArgumentError('ACL is required for update'); + throw ArgumentError('ACL is required for update'); } if (md.contentType == null) { - throw new ArgumentError('Content-Type is required for update'); + throw ArgumentError('Content-Type is required for update'); } if (md._object.acl == null) { md._object.acl = _defaultObjectAcl._toObjectAccessControlList(); @@ -287,7 +286,7 @@ class _BucketPageImpl implements Page { final List items; _BucketPageImpl(this._storage, this._pageSize, storage_api.Buckets response) - : items = new List(response.items != null ? response.items.length : 0), + : items = List(response.items != null ? response.items.length : 0), _nextPageToken = response.nextPageToken { for (int i = 0; i < items.length; i++) { items[i] = response.items[i].name; @@ -297,11 +296,11 @@ class _BucketPageImpl implements Page { bool get isLast => _nextPageToken == null; Future> next({int pageSize}) { - if (isLast) return new Future.value(null); + if (isLast) return Future.value(null); if (pageSize == null) pageSize = this._pageSize; return _storage._listBuckets(pageSize, _nextPageToken).then((response) { - return new _BucketPageImpl(_storage, pageSize, response); + return _BucketPageImpl(_storage, pageSize, response); }); } } @@ -315,19 +314,19 @@ class _ObjectPageImpl implements Page { _ObjectPageImpl( this._bucket, this._prefix, this._pageSize, storage_api.Objects response) - : items = new List((response.items != null ? response.items.length : 0) + + : items = List((response.items != null ? response.items.length : 0) + (response.prefixes != null ? response.prefixes.length : 0)), _nextPageToken = response.nextPageToken { var prefixes = 0; if (response.prefixes != null) { for (int i = 0; i < response.prefixes.length; i++) { - items[i] = new BucketEntry._directory(response.prefixes[i]); + items[i] = BucketEntry._directory(response.prefixes[i]); } prefixes = response.prefixes.length; } if (response.items != null) { for (int i = 0; i < response.items.length; i++) { - items[prefixes + i] = new BucketEntry._object(response.items[i].name); + items[prefixes + i] = BucketEntry._object(response.items[i].name); } } } @@ -335,14 +334,14 @@ class _ObjectPageImpl implements Page { bool get isLast => _nextPageToken == null; Future> next({int pageSize}) { - if (isLast) return new Future.value(null); + if (isLast) return Future.value(null); if (pageSize == null) pageSize = this._pageSize; return _bucket ._listObjects(_bucket.bucketName, _prefix, _DIRECTORY_DELIMITER, pageSize, _nextPageToken) .then((response) { - return new _ObjectPageImpl(_bucket, _prefix, pageSize, response); + return _ObjectPageImpl(_bucket, _prefix, pageSize, response); }); } } @@ -362,7 +361,7 @@ class _ObjectInfoImpl implements ObjectInfo { _ObjectInfoImpl(storage_api.Object object) : _object = object, - _metadata = new _ObjectMetadata._(object); + _metadata = _ObjectMetadata._(object); String get name => _object.name; @@ -388,7 +387,7 @@ class _ObjectInfoImpl implements ObjectInfo { ObjectGeneration get generation { if (_generation == null) { - _generation = new _ObjectGenerationImpl( + _generation = _ObjectGenerationImpl( _object.generation, int.parse(_object.metageneration)); } return _generation; @@ -412,7 +411,7 @@ class _ObjectMetadata implements ObjectMetadata { String contentDisposition, String contentLanguage, Map custom}) - : _object = new storage_api.Object() { + : _object = storage_api.Object() { _object.acl = acl != null ? acl._toObjectAccessControlList() : null; _object.contentType = contentType; _object.contentEncoding = contentEncoding; @@ -426,7 +425,7 @@ class _ObjectMetadata implements ObjectMetadata { Acl get acl { if (_cachedAcl == null) { - _cachedAcl = new Acl._fromObjectAcl(_object); + _cachedAcl = Acl._fromObjectAcl(_object); } return _cachedAcl; } @@ -443,7 +442,7 @@ class _ObjectMetadata implements ObjectMetadata { ObjectGeneration get generation { if (_cachedGeneration == null) { - _cachedGeneration = new ObjectGeneration( + _cachedGeneration = ObjectGeneration( _object.generation, int.parse(_object.metageneration)); } return _cachedGeneration; @@ -452,7 +451,7 @@ class _ObjectMetadata implements ObjectMetadata { Map get custom { if (_object.metadata == null) return null; if (_cachedCustom == null) { - _cachedCustom = new UnmodifiableMapView(_object.metadata); + _cachedCustom = UnmodifiableMapView(_object.metadata); } return _cachedCustom; } @@ -465,7 +464,7 @@ class _ObjectMetadata implements ObjectMetadata { String contentDisposition, String contentLanguage, Map custom}) { - return new _ObjectMetadata( + return _ObjectMetadata( acl: acl != null ? acl : this.acl, contentType: contentType != null ? contentType : this.contentType, contentEncoding: @@ -476,7 +475,7 @@ class _ObjectMetadata implements ObjectMetadata { : this.contentEncoding, contentLanguage: contentLanguage != null ? contentLanguage : this.contentEncoding, - custom: custom != null ? new Map.from(custom) : this.custom); + custom: custom != null ? Map.from(custom) : this.custom); } } @@ -493,11 +492,11 @@ class _MediaUploadStreamSink implements StreamSink> { final int _length; final int _maxNormalUploadLength; int _bufferLength = 0; - final List> buffer = new List>(); - final _controller = new StreamController>(sync: true); + final List> buffer = List>(); + final _controller = StreamController>(sync: true); StreamSubscription _subscription; StreamController> _resumableController; - final _doneCompleter = new Completer(); + final _doneCompleter = Completer(); static const int _STATE_LENGTH_KNOWN = 0; static const int _STATE_PROBING_LENGTH = 1; @@ -552,7 +551,7 @@ class _MediaUploadStreamSink implements StreamSink> { if (_bufferLength > _maxNormalUploadLength) { // Start resumable upload. // TODO: Avoid using another stream-controller. - _resumableController = new StreamController>(sync: true); + _resumableController = StreamController>(sync: true); buffer.forEach(_resumableController.add); _startResumableUpload(_resumableController.stream, _length); _state = _STATE_DECIDED_RESUMABLE; @@ -567,8 +566,7 @@ class _MediaUploadStreamSink implements StreamSink> { if (_state == _STATE_PROBING_LENGTH) { // As the data is already cached don't bother to wait on somebody // listening on the stream before adding the data. - _startNormalUpload( - new Stream>.fromIterable(buffer), _bufferLength); + _startNormalUpload(Stream>.fromIterable(buffer), _bufferLength); } else { _resumableController.close(); } @@ -596,7 +594,7 @@ class _MediaUploadStreamSink implements StreamSink> { var contentType = _object.contentType != null ? _object.contentType : 'application/octet-stream'; - var media = new storage_api.Media(stream, length, contentType: contentType); + var media = storage_api.Media(stream, length, contentType: contentType); _api.objects .insert(_object, _bucketName, name: _objectName, @@ -604,7 +602,7 @@ class _MediaUploadStreamSink implements StreamSink> { uploadMedia: media, uploadOptions: storage_api.UploadOptions.Default) .then((response) { - _doneCompleter.complete(new _ObjectInfoImpl(response)); + _doneCompleter.complete(_ObjectInfoImpl(response)); }, onError: _completeError); } @@ -612,7 +610,7 @@ class _MediaUploadStreamSink implements StreamSink> { var contentType = _object.contentType != null ? _object.contentType : 'application/octet-stream'; - var media = new storage_api.Media(stream, length, contentType: contentType); + var media = storage_api.Media(stream, length, contentType: contentType); _api.objects .insert(_object, _bucketName, name: _objectName, @@ -620,7 +618,7 @@ class _MediaUploadStreamSink implements StreamSink> { uploadMedia: media, uploadOptions: storage_api.UploadOptions.Resumable) .then((response) { - _doneCompleter.complete(new _ObjectInfoImpl(response)); + _doneCompleter.complete(_ObjectInfoImpl(response)); }, onError: _completeError); } } diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index d3edf2f3..9ab01ce0 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -113,26 +113,26 @@ class Acl { int _cachedHashCode; /// The entries in the ACL. - List get entries => new UnmodifiableListView(_entries); + List get entries => UnmodifiableListView(_entries); /// Create a new ACL with a list of ACL entries. - Acl(Iterable entries) : _entries = new List.from(entries); + Acl(Iterable entries) : _entries = List.from(entries); Acl._fromBucketAcl(storage_api.Bucket bucket) - : _entries = new List(bucket.acl == null ? 0 : bucket.acl.length) { + : _entries = List(bucket.acl == null ? 0 : bucket.acl.length) { if (bucket.acl != null) { for (int i = 0; i < bucket.acl.length; i++) { - _entries[i] = new AclEntry(_aclScopeFromEntity(bucket.acl[i].entity), + _entries[i] = AclEntry(_aclScopeFromEntity(bucket.acl[i].entity), _aclPermissionFromRole(bucket.acl[i].role)); } } } Acl._fromObjectAcl(storage_api.Object object) - : _entries = new List(object.acl == null ? 0 : object.acl.length) { + : _entries = List(object.acl == null ? 0 : object.acl.length) { if (object.acl != null) { for (int i = 0; i < object.acl.length; i++) { - _entries[i] = new AclEntry(_aclScopeFromEntity(object.acl[i].entity), + _entries[i] = AclEntry(_aclScopeFromEntity(object.acl[i].entity), _aclPermissionFromRole(object.acl[i].role)); } } @@ -143,14 +143,14 @@ class Acl { String tmp = entity.substring(5); int at = tmp.indexOf('@'); if (at != -1) { - return new AccountScope(tmp); + return AccountScope(tmp); } else { - return new StorageIdScope(tmp); + return StorageIdScope(tmp); } } else if (entity.startsWith('group-')) { - return new GroupScope(entity.substring(6)); + return GroupScope(entity.substring(6)); } else if (entity.startsWith('domain-')) { - return new DomainScope(entity.substring(7)); + return DomainScope(entity.substring(7)); } else if (entity.startsWith('allAuthenticatedUsers-')) { return AclScope.allAuthenticated; } else if (entity.startsWith('allUsers-')) { @@ -159,18 +159,17 @@ class Acl { String tmp = entity.substring(8); int dash = tmp.indexOf('-'); if (dash != -1) { - return new ProjectScope( - tmp.substring(dash + 1), tmp.substring(0, dash)); + return ProjectScope(tmp.substring(dash + 1), tmp.substring(0, dash)); } } - return new OpaqueScope(entity); + return OpaqueScope(entity); } AclPermission _aclPermissionFromRole(String role) { if (role == 'READER') return AclPermission.READ; if (role == 'WRITER') return AclPermission.WRITE; if (role == 'OWNER') return AclPermission.FULL_CONTROL; - throw new UnsupportedError( + throw UnsupportedError( "Server returned a unsupported permission role '$role'"); } @@ -216,14 +215,14 @@ class AclEntry { AclEntry(this.scope, this.permission); storage_api.BucketAccessControl _toBucketAccessControl() { - var acl = new storage_api.BucketAccessControl(); + var acl = storage_api.BucketAccessControl(); acl.entity = scope._storageEntity; acl.role = permission._storageBucketRole; return acl; } storage_api.ObjectAccessControl _toObjectAccessControl() { - var acl = new storage_api.ObjectAccessControl(); + var acl = storage_api.ObjectAccessControl(); acl.entity = scope._storageEntity; acl.role = permission._storageObjectRole; return acl; @@ -291,10 +290,10 @@ abstract class AclScope { final int _type; /// ACL scope for all authenticated users. - static AllAuthenticatedScope allAuthenticated = new AllAuthenticatedScope(); + static AllAuthenticatedScope allAuthenticated = AllAuthenticatedScope(); /// ACL scope for all users. - static AllUsersScope allUsers = new AllUsersScope(); + static AllUsersScope allUsers = AllUsersScope(); AclScope._(this._type, this._id); @@ -397,17 +396,17 @@ class AllUsersScope extends AclScope { /// Permissions for individual scopes in an ACL. class AclPermission { /// Provide read access. - static const READ = const AclPermission._('READER'); + static const READ = AclPermission._('READER'); /// Provide write access. /// /// For objects this permission is the same as [FULL_CONTROL]. - static const WRITE = const AclPermission._('WRITER'); + static const WRITE = AclPermission._('WRITER'); /// Provide full control. /// /// For objects this permission is the same as [WRITE]. - static const FULL_CONTROL = const AclPermission._('OWNER'); + static const FULL_CONTROL = AclPermission._('OWNER'); final String _id; @@ -442,34 +441,33 @@ class PredefinedAcl { /// Predefined ACL for the 'authenticated-read' ACL. Applies to both buckets /// and objects. static const PredefinedAcl authenticatedRead = - const PredefinedAcl._('authenticatedRead'); + PredefinedAcl._('authenticatedRead'); /// Predefined ACL for the 'private' ACL. Applies to both buckets /// and objects. - static const PredefinedAcl private = const PredefinedAcl._('private'); + static const PredefinedAcl private = PredefinedAcl._('private'); /// Predefined ACL for the 'project-private' ACL. Applies to both buckets /// and objects. - static const PredefinedAcl projectPrivate = - const PredefinedAcl._('projectPrivate'); + static const PredefinedAcl projectPrivate = PredefinedAcl._('projectPrivate'); /// Predefined ACL for the 'public-read' ACL. Applies to both buckets /// and objects. - static const PredefinedAcl publicRead = const PredefinedAcl._('publicRead'); + static const PredefinedAcl publicRead = PredefinedAcl._('publicRead'); /// Predefined ACL for the 'public-read-write' ACL. Applies only to buckets. static const PredefinedAcl publicReadWrite = - const PredefinedAcl._('publicReadWrite'); + PredefinedAcl._('publicReadWrite'); /// Predefined ACL for the 'bucket-owner-full-control' ACL. Applies only to /// objects. static const PredefinedAcl bucketOwnerFullControl = - const PredefinedAcl._('bucketOwnerFullControl'); + PredefinedAcl._('bucketOwnerFullControl'); /// Predefined ACL for the 'bucket-owner-read' ACL. Applies only to /// objects. static const PredefinedAcl bucketOwnerRead = - const PredefinedAcl._('bucketOwnerRead'); + PredefinedAcl._('bucketOwnerRead'); String toString() => 'PredefinedAcl($_name)'; } @@ -495,7 +493,7 @@ abstract class BucketInfo { /// Access to Cloud Storage abstract class Storage { /// List of required OAuth2 scopes for Cloud Storage operation. - static const List SCOPES = const [ + static const List SCOPES = [ storage_api.StorageApi.DevstorageFullControlScope ]; @@ -567,7 +565,7 @@ abstract class Storage { /// /// Returns a [Future] which completes with a `Page` object holding the /// first page. Use the `Page` object to move to the next page of buckets. - Future> pageBucketNames({int pageSize: 50}); + Future> pageBucketNames({int pageSize = 50}); /// Copy an object. /// @@ -797,5 +795,5 @@ abstract class Bucket { /// /// Returns a `Future` which completes with a `Page` object holding the /// first page. Use the `Page` object to move to the next page. - Future> page({String prefix, int pageSize: 50}); + Future> page({String prefix, int pageSize = 50}); } diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index a3694c8d..3fa88476 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -16,4 +16,5 @@ dev_dependencies: googleapis_auth: '>=0.2.3 <0.3.0' http_parser: '>=2.0.0 <4.0.0' mime: '>=0.9.0+3 <0.10.0' + pedantic: ^1.4.0 test: ^1.5.1 diff --git a/pkgs/gcloud/test/common.dart b/pkgs/gcloud/test/common.dart index 0c516add..3b37c548 100644 --- a/pkgs/gcloud/test/common.dart +++ b/pkgs/gcloud/test/common.dart @@ -13,12 +13,12 @@ import 'package:test/test.dart'; const CONTENT_TYPE_JSON_UTF8 = 'application/json; charset=utf-8'; -const RESPONSE_HEADERS = const {'content-type': CONTENT_TYPE_JSON_UTF8}; +const RESPONSE_HEADERS = {'content-type': CONTENT_TYPE_JSON_UTF8}; class MockClient extends http.BaseClient { - static const bytes = const [1, 2, 3, 4, 5]; + static const bytes = [1, 2, 3, 4, 5]; - final _bytesHeaderRegexp = new RegExp(r"bytes=(\d+)-(\d+)"); + final _bytesHeaderRegexp = RegExp(r"bytes=(\d+)-(\d+)"); final String hostname; final String rootPath; @@ -31,14 +31,14 @@ class MockClient extends http.BaseClient { : hostname = hostname, rootPath = rootPath, rootUri = Uri.parse('https://$hostname$rootPath') { - client = new http_testing.MockClient(handler); + client = http_testing.MockClient(handler); } void register( String method, Pattern path, http_testing.MockClientHandler handler) { - var map = mocks.putIfAbsent(method, () => new Map()); + var map = mocks.putIfAbsent(method, () => Map()); if (path is RegExp) { - map[new RegExp('$rootPath${path.pattern}')] = handler; + map[RegExp('$rootPath${path.pattern}')] = handler; } else { map['$rootPath$path'] = handler; } @@ -46,13 +46,13 @@ class MockClient extends http.BaseClient { void registerUpload( String method, Pattern path, http_testing.MockClientHandler handler) { - var map = mocks.putIfAbsent(method, () => new Map()); + var map = mocks.putIfAbsent(method, () => Map()); map['/upload$rootPath$path'] = handler; } void registerResumableUpload( String method, Pattern path, http_testing.MockClientHandler handler) { - var map = mocks.putIfAbsent(method, () => new Map()); + var map = mocks.putIfAbsent(method, () => Map()); map['/resumable/upload$rootPath$path'] = handler; } @@ -86,35 +86,32 @@ class MockClient extends http.BaseClient { } Future respond(response) { - return new Future.value(new http.Response( - jsonEncode(response.toJson()), 200, + return Future.value(http.Response(jsonEncode(response.toJson()), 200, headers: RESPONSE_HEADERS)); } Future respondEmpty() { - return new Future.value( - new http.Response('', 200, headers: RESPONSE_HEADERS)); + return Future.value(http.Response('', 200, headers: RESPONSE_HEADERS)); } Future respondInitiateResumableUpload(project) { - final headers = new Map.from(RESPONSE_HEADERS); + final headers = Map.from(RESPONSE_HEADERS); headers['location'] = 'https://www.googleapis.com/resumable/upload$rootPath' 'b/$project/o?uploadType=resumable&alt=json&' 'upload_id=AEnB2UqucpaWy7d5cr5iVQzmbQcQlLDIKiClrm0SAX3rJ7UN' 'Mu5bEoC9b4teJcJUKpqceCUeqKzuoP_jz2ps_dV0P0nT8OTuZQ'; - return new Future.value(new http.Response('', 200, headers: headers)); + return Future.value(http.Response('', 200, headers: headers)); } Future respondContinueResumableUpload() { - return new Future.value( - new http.Response('', 308, headers: RESPONSE_HEADERS)); + return Future.value(http.Response('', 308, headers: RESPONSE_HEADERS)); } Future respondBytes(http.Request request) async { expect(request.url.queryParameters['alt'], 'media'); var myBytes = bytes; - var headers = new Map.from(RESPONSE_HEADERS); + var headers = Map.from(RESPONSE_HEADERS); var range = request.headers['range']; if (range != null) { @@ -128,32 +125,32 @@ class MockClient extends http.BaseClient { headers['content-range'] = 'bytes $start-$end/'; } - return new http.Response.bytes(myBytes, 200, headers: headers); + return http.Response.bytes(myBytes, 200, headers: headers); } Future respondError(int statusCode) { var error = { 'error': {'code': statusCode, 'message': 'error'} }; - return new Future.value(new http.Response(jsonEncode(error), statusCode, + return Future.value(http.Response(jsonEncode(error), statusCode, headers: RESPONSE_HEADERS)); } Future processNormalMediaUpload(http.Request request) { - var completer = new Completer(); + var completer = Completer(); var contentType = - new http_parser.MediaType.parse(request.headers['content-type']); + http_parser.MediaType.parse(request.headers['content-type']); expect(contentType.mimeType, 'multipart/related'); var boundary = contentType.parameters['boundary']; var partCount = 0; String json; - new Stream.fromIterable([ + Stream.fromIterable([ request.bodyBytes, [13, 10] ]) - .transform(new mime.MimeMultipartTransformer(boundary)) + .transform(mime.MimeMultipartTransformer(boundary)) .listen(((mime.MimeMultipart mimeMultipart) { var contentType = mimeMultipart.headers['content-type']; partCount++; @@ -171,7 +168,7 @@ class MockClient extends http.BaseClient { .fold('', (p, e) => '$p$e') .then(base64.decode) .then((bytes) { - completer.complete(new NormalMediaUpload(json, bytes, contentType)); + completer.complete(NormalMediaUpload(json, bytes, contentType)); }); } else { // Exactly two parts expected. @@ -203,15 +200,15 @@ class TraceClient extends http.BaseClient { print('--- START REQUEST ---'); print(utf8.decode(body)); print('--- END REQUEST ---'); - var r = new RequestImpl(request.method, request.url, body); + var r = RequestImpl(request.method, request.url, body); r.headers.addAll(request.headers); return client.send(r).then((http.StreamedResponse rr) { return rr.stream.toBytes().then((body) { print('--- START RESPONSE ---'); print(utf8.decode(body)); print('--- END RESPONSE ---'); - return new http.StreamedResponse( - new http.ByteStream.fromBytes(body), rr.statusCode, + return http.StreamedResponse( + http.ByteStream.fromBytes(body), rr.statusCode, headers: rr.headers); }); }); @@ -231,6 +228,6 @@ class RequestImpl extends http.BaseRequest { http.ByteStream finalize() { super.finalize(); - return new http.ByteStream.fromBytes(_body); + return http.ByteStream.fromBytes(_body); } } diff --git a/pkgs/gcloud/test/common_e2e.dart b/pkgs/gcloud/test/common_e2e.dart index d3909935..a4e70fd2 100644 --- a/pkgs/gcloud/test/common_e2e.dart +++ b/pkgs/gcloud/test/common_e2e.dart @@ -33,7 +33,7 @@ const String DEFAULT_KEY_LOCATION = // // So this can make tests flaky. The following delay is introduced as an // attempt to account for that. -const STORAGE_LIST_DELAY = const Duration(seconds: 5); +const STORAGE_LIST_DELAY = Duration(seconds: 5); bool onBot() { // When running on the package-bot the current user is chrome-bot. @@ -49,7 +49,7 @@ bool onBot() { // Get the service key from the specified location. Future serviceKeyJson(String serviceKeyLocation) { if (!serviceKeyLocation.startsWith('gs://')) { - return new File(serviceKeyLocation).readAsString(); + return File(serviceKeyLocation).readAsString(); } Future future; if (onBot()) { @@ -62,21 +62,21 @@ Future serviceKeyJson(String serviceKeyLocation) { } return future.then((result) { if (result.exitCode != 0) { - throw new Exception('Failed to run gsutil, ${result.stderr}'); + throw Exception('Failed to run gsutil, ${result.stderr}'); } return result.stdout.toString(); }); } -typedef Future AuthCallback(String project, http.Client client); +typedef AuthCallback = Future Function(String project, http.Client client); Future withAuthClient(List scopes, AuthCallback callback, - {bool trace: false}) { + {bool trace = false}) { String project = Platform.environment[PROJECT_ENV]; String serviceKeyLocation = Platform.environment[SERVICE_KEY_LOCATION_ENV]; if (!onBot() && (project == null || serviceKeyLocation == null)) { - throw new StateError( + throw StateError( 'Environment variables $PROJECT_ENV and $SERVICE_KEY_LOCATION_ENV ' 'required when not running on the package bot'); } @@ -86,11 +86,11 @@ Future withAuthClient(List scopes, AuthCallback callback, serviceKeyLocation != null ? serviceKeyLocation : DEFAULT_KEY_LOCATION; return serviceKeyJson(serviceKeyLocation).then((keyJson) { - var creds = new auth.ServiceAccountCredentials.fromJson(keyJson); + var creds = auth.ServiceAccountCredentials.fromJson(keyJson); return auth .clientViaServiceAccount(creds, scopes) .then((http.Client client) { - if (trace) client = new TraceClient(client); + if (trace) client = TraceClient(client); return callback(project, client); }); }); diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index deabb916..93b098e6 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -39,24 +39,25 @@ import '../error_matchers.dart'; import 'utils.dart'; Future sleep(Duration duration) { - var completer = new Completer(); - new Timer(duration, completer.complete); + var completer = Completer(); + Timer(duration, completer.complete); return completer.future; } Future> consumePages(FirstPageProvider provider) { - return new StreamFromPages(provider).stream.toList(); + return StreamFromPages(provider).stream.toList(); } void runTests(Datastore datastore, String namespace) { - Partition partition = new Partition(namespace); + Partition partition = Partition(namespace); - Future withTransaction(FutureOr f(Transaction t), {bool xg: false}) { + Future withTransaction(FutureOr f(Transaction t), + {bool xg = false}) { return datastore.beginTransaction(crossEntityGroup: xg).then(f); } Future> insert(List entities, List autoIdEntities, - {bool transactional: true}) { + {bool transactional = true}) { if (transactional) { return withTransaction((Transaction transaction) { return datastore @@ -65,7 +66,7 @@ void runTests(Datastore datastore, String namespace) { autoIdInserts: autoIdEntities, transaction: transaction) .then((result) { - if (autoIdEntities != null && autoIdEntities.length > 0) { + if (autoIdEntities != null && autoIdEntities.isNotEmpty) { expect( result.autoIdInsertKeys.length, equals(autoIdEntities.length)); } @@ -76,7 +77,7 @@ void runTests(Datastore datastore, String namespace) { return datastore .commit(inserts: entities, autoIdInserts: autoIdEntities) .then((result) { - if (autoIdEntities != null && autoIdEntities.length > 0) { + if (autoIdEntities != null && autoIdEntities.isNotEmpty) { expect(result.autoIdInsertKeys.length, equals(autoIdEntities.length)); } return result.autoIdInsertKeys; @@ -84,7 +85,7 @@ void runTests(Datastore datastore, String namespace) { } } - Future delete(List keys, {bool transactional: true}) { + Future delete(List keys, {bool transactional = true}) { if (transactional) { return withTransaction((Transaction t) { return datastore @@ -96,7 +97,7 @@ void runTests(Datastore datastore, String namespace) { } } - Future> lookup(List keys, {bool transactional: true}) { + Future> lookup(List keys, {bool transactional = true}) { if (transactional) { return withTransaction((Transaction transaction) { return datastore.lookup(keys, transaction: transaction); @@ -106,8 +107,8 @@ void runTests(Datastore datastore, String namespace) { } } - bool isValidKey(Key key, {bool ignoreIds: false}) { - if (key.elements.length == 0) return false; + bool isValidKey(Key key, {bool ignoreIds = false}) { + if (key.elements.isEmpty) return false; for (var element in key.elements) { if (element.kind == null || element.kind is! String) return false; @@ -121,7 +122,7 @@ void runTests(Datastore datastore, String namespace) { return true; } - bool compareKey(Key a, Key b, {bool ignoreIds: false}) { + bool compareKey(Key a, Key b, {bool ignoreIds = false}) { if (a.partition != b.partition) return false; if (a.elements.length != b.elements.length) return false; for (int i = 0; i < a.elements.length; i++) { @@ -131,7 +132,7 @@ void runTests(Datastore datastore, String namespace) { return true; } - bool compareEntity(Entity a, Entity b, {bool ignoreIds: false}) { + bool compareEntity(Entity a, Entity b, {bool ignoreIds = false}) { if (!compareKey(a.key, b.key, ignoreIds: ignoreIds)) return false; if (a.properties.length != b.properties.length) return false; for (var key in a.properties.keys) { @@ -166,7 +167,7 @@ void runTests(Datastore datastore, String namespace) { group('e2e_datastore', () { group('insert', () { Future> testInsert(List entities, - {bool transactional: false, bool xg: false, bool unnamed: true}) { + {bool transactional = false, bool xg = false, bool unnamed = true}) { Future> test(Transaction transaction) { return datastore .commit(autoIdInserts: entities, transaction: transaction) @@ -194,7 +195,7 @@ void runTests(Datastore datastore, String namespace) { } Future> testInsertNegative(List entities, - {bool transactional: false, bool xg: false}) { + {bool transactional = false, bool xg = false}) { test(Transaction transaction) { expect( datastore.commit( @@ -300,10 +301,10 @@ void runTests(Datastore datastore, String namespace) { group('lookup', () { Future testLookup(List keysToLookup, List entitiesToLookup, - {bool transactional: false, - bool xg: false, - bool negative: false, - bool named: false}) { + {bool transactional = false, + bool xg = false, + bool negative = false, + bool named = false}) { expect(keysToLookup.length, equals(entitiesToLookup.length)); for (var i = 0; i < keysToLookup.length; i++) { expect( @@ -391,7 +392,7 @@ void runTests(Datastore datastore, String namespace) { group('delete', () { Future testDelete(List keys, - {bool transactional: false, bool xg: false}) { + {bool transactional = false, bool xg = false}) { Future test(Transaction transaction) { return datastore.commit(deletes: keys).then((_) { if (transaction != null) { @@ -458,7 +459,7 @@ void runTests(Datastore datastore, String namespace) { }); group('rollback', () { - Future testRollback(List keys, {bool xg: false}) { + Future testRollback(List keys, {bool xg = false}) { return withTransaction((Transaction transaction) { return datastore .lookup(keys, transaction: transaction) @@ -487,7 +488,7 @@ void runTests(Datastore datastore, String namespace) { group('empty_commit', () { Future testEmptyCommit(List keys, - {bool transactional: false, bool xg: false}) { + {bool transactional = false, bool xg = false}) { Future test(Transaction transaction) { return datastore .lookup(keys, transaction: transaction) @@ -538,17 +539,17 @@ void runTests(Datastore datastore, String namespace) { group('conflicting_transaction', () { Future testConflictingTransaction(List entities, - {bool xg: false}) { + {bool xg = false}) { Future test(List entities, Transaction transaction, value) { // Change entities: - var changedEntities = new List(entities.length); + var changedEntities = List(entities.length); for (int i = 0; i < entities.length; i++) { var entity = entities[i]; - var newProperties = new Map.from(entity.properties); + var newProperties = Map.from(entity.properties); for (var prop in newProperties.keys) { newProperties[prop] = "${newProperties[prop]}conflict$value"; } - changedEntities[i] = new Entity(entity.key, newProperties); + changedEntities[i] = Entity(entity.key, newProperties); } return datastore.commit( inserts: changedEntities, transaction: transaction); @@ -605,12 +606,12 @@ void runTests(Datastore datastore, String namespace) { Future> testQuery(String kind, {List filters, List orders, - bool transactional: false, - bool xg: false, + bool transactional = false, + bool xg = false, int offset, int limit}) { Future> test(Transaction transaction) { - var query = new Query( + var query = Query( kind: kind, filters: filters, orders: orders, @@ -637,9 +638,9 @@ void runTests(Datastore datastore, String namespace) { Future testQueryAndCompare(String kind, List expectedEntities, {List filters, List orders, - bool transactional: false, - bool xg: false, - bool correctOrder: true, + bool transactional = false, + bool xg = false, + bool correctOrder = true, int offset, int limit}) { return testQuery(kind, @@ -671,7 +672,7 @@ void runTests(Datastore datastore, String namespace) { } Future testOffsetLimitQuery(String kind, List expectedEntities, - {List orders, bool transactional: false, bool xg: false}) { + {List orders, bool transactional = false, bool xg = false}) { // We query for all subsets of expectedEntities // NOTE: This is O(0.5 * n^2) queries, but n is currently only 6. List queryTests = []; @@ -744,24 +745,22 @@ void runTests(Datastore datastore, String namespace) { assert(indexedEntity.length == 1); var filters = [ - new Filter(FilterRelation.GreatherThan, QUERY_KEY, QUERY_LOWER_BOUND), - new Filter(FilterRelation.LessThan, QUERY_KEY, QUERY_UPPER_BOUND), + Filter(FilterRelation.GreatherThan, QUERY_KEY, QUERY_LOWER_BOUND), + Filter(FilterRelation.LessThan, QUERY_KEY, QUERY_UPPER_BOUND), ]; var listFilters = [ - new Filter(FilterRelation.Equal, TEST_LIST_PROPERTY, QUERY_LIST_ENTRY) + Filter(FilterRelation.Equal, TEST_LIST_PROPERTY, QUERY_LIST_ENTRY) ]; var indexedPropertyFilter = [ - new Filter( - FilterRelation.Equal, TEST_INDEXED_PROPERTY, QUERY_INDEX_VALUE), - new Filter(FilterRelation.Equal, TEST_BLOB_INDEXED_PROPERTY, + Filter(FilterRelation.Equal, TEST_INDEXED_PROPERTY, QUERY_INDEX_VALUE), + Filter(FilterRelation.Equal, TEST_BLOB_INDEXED_PROPERTY, TEST_BLOB_INDEXED_VALUE) ]; var unIndexedPropertyFilter = [ - new Filter( - FilterRelation.Equal, TEST_UNINDEXED_PROPERTY, QUERY_INDEX_VALUE) + Filter(FilterRelation.Equal, TEST_UNINDEXED_PROPERTY, QUERY_INDEX_VALUE) ]; - var orders = [new Order(OrderDirection.Decending, QUERY_KEY)]; + var orders = [Order(OrderDirection.Decending, QUERY_KEY)]; test('query', () { return insert(stringNamedEntities, []).then((keys) { @@ -874,17 +873,16 @@ void runTests(Datastore datastore, String namespace) { * + SubSubKind:1 -- This is a real entity of kind SubSubKind * + SubSubKind2:1 -- This is a real entity of kind SubSubKind2 */ - var rootKey = - new Key([new KeyElement('RootKind', 1)], partition: partition); - var subKey = new Key.fromParent('SubKind', 1, parent: rootKey); - var subSubKey = new Key.fromParent('SubSubKind', 1, parent: subKey); - var subSubKey2 = new Key.fromParent('SubSubKind2', 1, parent: subKey); + var rootKey = Key([KeyElement('RootKind', 1)], partition: partition); + var subKey = Key.fromParent('SubKind', 1, parent: rootKey); + var subSubKey = Key.fromParent('SubSubKind', 1, parent: subKey); + var subSubKey2 = Key.fromParent('SubSubKind2', 1, parent: subKey); var properties = {'foo': 'bar'}; - var entity = new Entity(subSubKey, properties); - var entity2 = new Entity(subSubKey2, properties); + var entity = Entity(subSubKey, properties); + var entity2 = Entity(subSubKey2, properties); - var orders = [new Order(OrderDirection.Ascending, '__key__')]; + var orders = [Order(OrderDirection.Ascending, '__key__')]; return datastore.commit(inserts: [entity, entity2]).then((_) { var futures = [ @@ -912,8 +910,7 @@ void runTests(Datastore datastore, String namespace) { // Query by ancestor. // - by [rootKey] () { - var ancestorQuery = - new Query(ancestorKey: rootKey, orders: orders); + var ancestorQuery = Query(ancestorKey: rootKey, orders: orders); return consumePages((_) => datastore.query(ancestorQuery, partition: partition)) .then((results) { @@ -924,8 +921,7 @@ void runTests(Datastore datastore, String namespace) { }, // - by [subKey] () { - var ancestorQuery = - new Query(ancestorKey: subKey, orders: orders); + var ancestorQuery = Query(ancestorKey: subKey, orders: orders); return consumePages((_) => datastore.query(ancestorQuery, partition: partition)) .then((results) { @@ -936,7 +932,7 @@ void runTests(Datastore datastore, String namespace) { }, // - by [subSubKey] () { - var ancestorQuery = new Query(ancestorKey: subSubKey); + var ancestorQuery = Query(ancestorKey: subSubKey); return consumePages((_) => datastore.query(ancestorQuery, partition: partition)) .then((results) { @@ -946,7 +942,7 @@ void runTests(Datastore datastore, String namespace) { }, // - by [subSubKey2] () { - var ancestorQuery = new Query(ancestorKey: subSubKey2); + var ancestorQuery = Query(ancestorKey: subSubKey2); return consumePages((_) => datastore.query(ancestorQuery, partition: partition)) .then((results) { @@ -958,7 +954,7 @@ void runTests(Datastore datastore, String namespace) { // Query by ancestor and kind. // - by [rootKey] + 'SubSubKind' () { - var query = new Query(ancestorKey: rootKey, kind: 'SubSubKind'); + var query = Query(ancestorKey: rootKey, kind: 'SubSubKind'); return consumePages( (_) => datastore.query(query, partition: partition)) .then((List results) { @@ -968,7 +964,7 @@ void runTests(Datastore datastore, String namespace) { }, // - by [rootKey] + 'SubSubKind2' () { - var query = new Query(ancestorKey: rootKey, kind: 'SubSubKind2'); + var query = Query(ancestorKey: rootKey, kind: 'SubSubKind2'); return consumePages( (_) => datastore.query(query, partition: partition)) .then((List results) { @@ -978,7 +974,7 @@ void runTests(Datastore datastore, String namespace) { }, // - by [subSubKey] + 'SubSubKind' () { - var query = new Query(ancestorKey: subSubKey, kind: 'SubSubKind'); + var query = Query(ancestorKey: subSubKey, kind: 'SubSubKind'); return consumePages( (_) => datastore.query(query, partition: partition)) .then((List results) { @@ -988,8 +984,7 @@ void runTests(Datastore datastore, String namespace) { }, // - by [subSubKey2] + 'SubSubKind2' () { - var query = - new Query(ancestorKey: subSubKey2, kind: 'SubSubKind2'); + var query = Query(ancestorKey: subSubKey2, kind: 'SubSubKind2'); return consumePages( (_) => datastore.query(query, partition: partition)) .then((List results) { @@ -999,8 +994,7 @@ void runTests(Datastore datastore, String namespace) { }, // - by [subSubKey] + 'SubSubKind2' () { - var query = - new Query(ancestorKey: subSubKey, kind: 'SubSubKind2'); + var query = Query(ancestorKey: subSubKey, kind: 'SubSubKind2'); return consumePages( (_) => datastore.query(query, partition: partition)) .then((List results) { @@ -1009,8 +1003,7 @@ void runTests(Datastore datastore, String namespace) { }, // - by [subSubKey2] + 'SubSubKind' () { - var query = - new Query(ancestorKey: subSubKey2, kind: 'SubSubKind'); + var query = Query(ancestorKey: subSubKey2, kind: 'SubSubKind'); return consumePages( (_) => datastore.query(query, partition: partition)) .then((List results) { @@ -1032,8 +1025,8 @@ void runTests(Datastore datastore, String namespace) { Future cleanupDB(Datastore db, String namespace) { Future> getKinds(String namespace) { - var partition = new Partition(namespace); - var q = new Query(kind: '__kind__'); + var partition = Partition(namespace); + var q = Query(kind: '__kind__'); return consumePages((_) => db.query(q, partition: partition)) .then((List entities) { return entities @@ -1045,11 +1038,11 @@ Future cleanupDB(Datastore db, String namespace) { // cleanup() will call itself again as long as the DB is not clean. cleanup(String namespace, String kind) { - var partition = new Partition(namespace); - var q = new Query(kind: kind, limit: 500); + var partition = Partition(namespace); + var q = Query(kind: kind, limit: 500); return consumePages((_) => db.query(q, partition: partition)) .then((List entities) { - if (entities.length == 0) return null; + if (entities.isEmpty) return null; print('[cleanupDB]: Removing left-over ${entities.length} entities'); var deletes = entities.map((e) => e.key).toList(); @@ -1080,7 +1073,7 @@ Future waitUntilEntitiesHelper( } Future waitForKeys(String kind, List keys) { - var q = new Query(kind: kind); + var q = Query(kind: kind); return consumePages((_) => db.query(q, partition: p)).then((entities) { for (var key in keys) { bool found = false; @@ -1108,7 +1101,7 @@ Future main() async { var scopes = datastore_impl.DatastoreImpl.SCOPES; await withAuthClient(scopes, (String project, Client httpClient) { - datastore = new datastore_impl.DatastoreImpl(httpClient, project); + datastore = datastore_impl.DatastoreImpl(httpClient, project); client = httpClient; return cleanupDB(datastore, null); }); diff --git a/pkgs/gcloud/test/datastore/e2e/utils.dart b/pkgs/gcloud/test/datastore/e2e/utils.dart index 05664388..2a42e4af 100644 --- a/pkgs/gcloud/test/datastore/e2e/utils.dart +++ b/pkgs/gcloud/test/datastore/e2e/utils.dart @@ -16,12 +16,12 @@ const TEST_INDEXED_PROPERTY = 'indexedProp'; const TEST_INDEXED_PROPERTY_VALUE_PREFIX = 'indexedValue'; const TEST_UNINDEXED_PROPERTY = 'unindexedProp'; const TEST_BLOB_INDEXED_PROPERTY = 'blobPropertyIndexed'; -final TEST_BLOB_INDEXED_VALUE = new BlobValue([0xaa, 0xaa, 0xff, 0xff]); +final TEST_BLOB_INDEXED_VALUE = BlobValue([0xaa, 0xaa, 0xff, 0xff]); Key buildKey(int i, - {Function idFunction, String kind: TEST_KIND, Partition p}) { - var path = [new KeyElement(kind, idFunction == null ? null : idFunction(i))]; - return new Key(path, partition: p); + {Function idFunction, String kind = TEST_KIND, Partition p}) { + var path = [KeyElement(kind, idFunction == null ? null : idFunction(i))]; + return Key(path, partition: p); } Map buildProperties(int i) { @@ -40,7 +40,7 @@ Map buildProperties(int i) { } List buildKeys(int from, int to, - {Function idFunction, String kind: TEST_KIND, Partition partition}) { + {Function idFunction, String kind = TEST_KIND, Partition partition}) { var keys = []; for (var i = from; i < to; i++) { keys.add(buildKey(i, idFunction: idFunction, kind: kind, p: partition)); @@ -49,23 +49,23 @@ List buildKeys(int from, int to, } List buildEntities(int from, int to, - {Function idFunction, String kind: TEST_KIND, Partition partition}) { + {Function idFunction, String kind = TEST_KIND, Partition partition}) { var entities = []; - var unIndexedProperties = new Set(); + var unIndexedProperties = Set(); for (var i = from; i < to; i++) { var key = buildKey(i, idFunction: idFunction, kind: kind, p: partition); var properties = buildProperties(i); unIndexedProperties.add(TEST_UNINDEXED_PROPERTY); - entities.add( - new Entity(key, properties, unIndexedProperties: unIndexedProperties)); + entities + .add(Entity(key, properties, unIndexedProperties: unIndexedProperties)); } return entities; } List buildEntityWithAllProperties(int from, int to, - {String kind: TEST_KIND, Partition partition}) { + {String kind = TEST_KIND, Partition partition}) { var us42 = const Duration(microseconds: 42); - var unIndexed = new Set.from(['blobProperty']); + var unIndexed = Set.from(['blobProperty']); Map buildProperties(int i) { return { @@ -74,10 +74,10 @@ List buildEntityWithAllProperties(int from, int to, 'intProperty': 42, 'doubleProperty': 4.2, 'stringProperty': 'foobar', - 'blobProperty': new BlobValue([0xff, 0xff, 0xaa, 0xaa]), - 'blobPropertyIndexed': new BlobValue([0xaa, 0xaa, 0xff, 0xff]), + 'blobProperty': BlobValue([0xff, 0xff, 0xaa, 0xaa]), + 'blobPropertyIndexed': BlobValue([0xaa, 0xaa, 0xff, 0xff]), 'dateProperty': - new DateTime.fromMillisecondsSinceEpoch(1, isUtc: true).add(us42), + DateTime.fromMillisecondsSinceEpoch(1, isUtc: true).add(us42), 'keyProperty': buildKey(1, idFunction: (i) => 's$i', kind: kind), 'listProperty': [ 42, @@ -93,7 +93,7 @@ List buildEntityWithAllProperties(int from, int to, var key = buildKey(i, idFunction: (i) => 'allprop$i', kind: kind, p: partition); var properties = buildProperties(i); - entities.add(new Entity(key, properties, unIndexedProperties: unIndexed)); + entities.add(Entity(key, properties, unIndexedProperties: unIndexed)); } return entities; } diff --git a/pkgs/gcloud/test/datastore/error_matchers.dart b/pkgs/gcloud/test/datastore/error_matchers.dart index 9442e71f..085268b5 100644 --- a/pkgs/gcloud/test/datastore/error_matchers.dart +++ b/pkgs/gcloud/test/datastore/error_matchers.dart @@ -9,13 +9,13 @@ import 'dart:io'; import 'package:test/test.dart'; import 'package:gcloud/datastore.dart'; -const isApplicationError = const TypeMatcher(); +const isApplicationError = TypeMatcher(); -const isDataStoreError = const TypeMatcher(); -const isTransactionAbortedError = const TypeMatcher(); -const isNeedIndexError = const TypeMatcher(); -const isTimeoutError = const TypeMatcher(); +const isDataStoreError = TypeMatcher(); +const isTransactionAbortedError = TypeMatcher(); +const isNeedIndexError = TypeMatcher(); +const isTimeoutError = TypeMatcher(); -const isInt = const TypeMatcher(); +const isInt = TypeMatcher(); -const isSocketException = const TypeMatcher(); +const isSocketException = TypeMatcher(); diff --git a/pkgs/gcloud/test/db/db_test.dart b/pkgs/gcloud/test/db/db_test.dart index c8346d7b..92b2307a 100644 --- a/pkgs/gcloud/test/db/db_test.dart +++ b/pkgs/gcloud/test/db/db_test.dart @@ -13,7 +13,7 @@ class Foobar extends Model {} main() { group('db', () { test('default-partition', () { - var db = new DatastoreDB(null); + var db = DatastoreDB(null); // Test defaultPartition expect(db.defaultPartition.namespace, isNull); @@ -30,8 +30,8 @@ main() { }); test('non-default-partition', () { - var nsDb = new DatastoreDB(null, - defaultPartition: new Partition('foobar-namespace')); + var nsDb = + DatastoreDB(null, defaultPartition: Partition('foobar-namespace')); // Test defaultPartition expect(nsDb.defaultPartition.namespace, 'foobar-namespace'); diff --git a/pkgs/gcloud/test/db/e2e/db_test_impl.dart b/pkgs/gcloud/test/db/e2e/db_test_impl.dart index 5d4c4092..a4daa94c 100644 --- a/pkgs/gcloud/test/db/e2e/db_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/db_test_impl.dart @@ -136,13 +136,13 @@ class ExpandoPerson extends db.ExpandoModel { } } -Future sleep(Duration duration) => new Future.delayed(duration); +Future sleep(Duration duration) => Future.delayed(duration); void runTests(db.DatastoreDB store, String namespace) { var partition = store.newPartition(namespace); void compareModels(List expectedModels, List models, - {bool anyOrder: false}) { + {bool anyOrder = false}) { expect(models.length, equals(expectedModels.length)); if (anyOrder) { // Do expensive O(n^2) search. @@ -164,7 +164,7 @@ void runTests(db.DatastoreDB store, String namespace) { } Future testInsertLookupDelete(List objects, - {bool transactional: false}) { + {bool transactional = false}) { var keys = objects.map((db.Model obj) => obj.key).toList(); if (transactional) { @@ -217,7 +217,7 @@ void runTests(db.DatastoreDB store, String namespace) { var root = partition.emptyKey; var persons = []; for (var i = 1; i <= 10; i++) { - persons.add(new Person() + persons.add(Person() ..id = i ..parentKey = root ..age = 42 + i @@ -230,7 +230,7 @@ void runTests(db.DatastoreDB store, String namespace) { var root = partition.emptyKey; var users = []; for (var i = 1; i <= 10; i++) { - users.add(new User() + users.add(User() ..id = i ..parentKey = root ..age = 42 + i @@ -243,7 +243,7 @@ void runTests(db.DatastoreDB store, String namespace) { var root = partition.emptyKey; var expandoPersons = []; for (var i = 1; i <= 10; i++) { - dynamic expandoPerson = new ExpandoPerson() + dynamic expandoPerson = ExpandoPerson() ..parentKey = root ..id = i ..name = 'user$i'; @@ -259,18 +259,18 @@ void runTests(db.DatastoreDB store, String namespace) { var root = partition.emptyKey; var models = []; - models.add(new Person() + models.add(Person() ..id = 1 ..parentKey = root ..age = 1 ..name = 'user1'); - models.add(new User() + models.add(User() ..id = 2 ..parentKey = root ..age = 2 ..name = 'user2' ..nickname = 'nickname2'); - dynamic expandoPerson = new ExpandoPerson() + dynamic expandoPerson = ExpandoPerson() ..parentKey = root ..id = 3 ..name = 'user1'; @@ -284,7 +284,7 @@ void runTests(db.DatastoreDB store, String namespace) { var root = partition.emptyKey; var users = []; for (var i = 333; i <= 334; i++) { - users.add(new User() + users.add(User() ..id = i ..parentKey = root ..age = 42 + i @@ -293,7 +293,7 @@ void runTests(db.DatastoreDB store, String namespace) { } var persons = []; for (var i = 335; i <= 336; i++) { - persons.add(new Person() + persons.add(Person() ..id = i ..parentKey = root ..age = 42 + i @@ -312,23 +312,23 @@ void runTests(db.DatastoreDB store, String namespace) { test('auto_ids', () { var root = partition.emptyKey; var persons = []; - persons.add(new Person() + persons.add(Person() ..id = 42 ..parentKey = root ..age = 80 ..name = 'user80'); // Auto id person with parentKey - persons.add(new Person() + persons.add(Person() ..parentKey = root ..age = 81 ..name = 'user81'); // Auto id person with non-root parentKey var fatherKey = persons.first.parentKey; - persons.add(new Person() + persons.add(Person() ..parentKey = fatherKey ..age = 82 ..name = 'user82'); - persons.add(new Person() + persons.add(Person() ..id = 43 ..parentKey = root ..age = 83 @@ -390,7 +390,7 @@ void runTests(db.DatastoreDB store, String namespace) { } else if (i == 10) { languages = ['foo', 'bar']; } - users.add(new User() + users.add(User() ..id = i ..parentKey = root ..wife = root.append(User, id: 42 + i) @@ -402,7 +402,7 @@ void runTests(db.DatastoreDB store, String namespace) { var expandoPersons = []; for (var i = 1; i <= 3; i++) { - dynamic expandoPerson = new ExpandoPerson() + dynamic expandoPerson = ExpandoPerson() ..parentKey = root ..id = i ..name = 'user$i' @@ -416,14 +416,14 @@ void runTests(db.DatastoreDB store, String namespace) { var LOWER_BOUND = 'user2'; - var usersSortedNameDescNicknameAsc = new List.from(users); + var usersSortedNameDescNicknameAsc = List.from(users); usersSortedNameDescNicknameAsc.sort((User a, User b) { var result = b.name.compareTo(a.name); if (result == 0) return a.nickname.compareTo(b.nickname); return result; }); - var usersSortedNameDescNicknameDesc = new List.from(users); + var usersSortedNameDescNicknameDesc = List.from(users); usersSortedNameDescNicknameDesc.sort((User a, User b) { var result = b.name.compareTo(a.name); if (result == 0) return b.nickname.compareTo(a.nickname); @@ -621,7 +621,7 @@ Future> runQueryWithExponentialBackoff( for (int i = 0; i <= 6; i++) { if (i > 0) { // Wait for 0.1s, 0.2s, ..., 12.8s - var duration = new Duration(milliseconds: 100 * (2 << i)); + var duration = Duration(milliseconds: 100 * (2 << i)); print("Running query did return less results than expected." "Using exponential backoff: Sleeping for $duration."); await sleep(duration); @@ -633,7 +633,7 @@ Future> runQueryWithExponentialBackoff( } } - throw new Exception( + throw Exception( "Tried running a query with exponential backoff, giving up now."); } @@ -686,9 +686,9 @@ Future main() async { var scopes = datastore_impl.DatastoreImpl.SCOPES; await withAuthClient(scopes, (String project, httpClient) { - var datastore = new datastore_impl.DatastoreImpl(httpClient, project); + var datastore = datastore_impl.DatastoreImpl(httpClient, project); return datastore_test.cleanupDB(datastore, null).then((_) { - store = new db.DatastoreDB(datastore); + store = db.DatastoreDB(datastore); }); }); diff --git a/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart index 130cf2f5..1a576019 100644 --- a/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart @@ -15,12 +15,12 @@ import 'package:gcloud/db/metamodel.dart'; List buildEntitiesWithDifferentNamespaces() { newKey(String namespace, String kind, int id) { - var partition = new Partition(namespace); - return new Key([new KeyElement(kind, id)], partition: partition); + var partition = Partition(namespace); + return Key([KeyElement(kind, id)], partition: partition); } - newEntity(String namespace, String kind, {int id: 1}) { - return new Entity(newKey(namespace, kind, id), {'ping': 'pong'}); + newEntity(String namespace, String kind, {int id = 1}) { + return Entity(newKey(namespace, kind, id), {'ping': 'pong'}); } return [ @@ -40,8 +40,8 @@ List buildEntitiesWithDifferentNamespaces() { } Future sleep(Duration duration) { - var completer = new Completer(); - new Timer(duration, completer.complete); + var completer = Completer(); + Timer(duration, completer.complete); return completer.future; } diff --git a/pkgs/gcloud/test/db/model_db_test.dart b/pkgs/gcloud/test/db/model_db_test.dart index 22b1bbbf..5b2e403d 100644 --- a/pkgs/gcloud/test/db/model_db_test.dart +++ b/pkgs/gcloud/test/db/model_db_test.dart @@ -20,32 +20,32 @@ main() { // all the Model/ModelDescription classes. assert([test1.A, test2.A, test3.A, test4.A, test5.A] != null); - newModelDB(Symbol symbol) => new ModelDBImpl.fromLibrary(symbol); + newModelDB(Symbol symbol) => ModelDBImpl.fromLibrary(symbol); group('model_db', () { group('from_library', () { test('duplicate_kind', () { - expect(new Future.sync(() { + expect(Future.sync(() { newModelDB(#gcloud.db.model_test.duplicate_kind); }), throwsA(isStateError)); }); test('duplicate_property', () { - expect(new Future.sync(() { + expect(Future.sync(() { newModelDB(#gcloud.db.model_test.duplicate_property); }), throwsA(isStateError)); }); test('multiple_annotations', () { - expect(new Future.sync(() { + expect(Future.sync(() { newModelDB(#gcloud.db.model_test.multiple_annotations); }), throwsA(isStateError)); }); test('duplicate_fieldname', () { - expect(new Future.sync(() { + expect(Future.sync(() { newModelDB(#gcloud.db.model_test.duplicate_fieldname); }), throwsA(isStateError)); }); test('no_default_constructor', () { - expect(new Future.sync(() { + expect(Future.sync(() { newModelDB(#gcloud.db.model_test.no_default_constructor); }), throwsA(isStateError)); }); diff --git a/pkgs/gcloud/test/db/properties_test.dart b/pkgs/gcloud/test/db/properties_test.dart index 83944234..db3f132b 100644 --- a/pkgs/gcloud/test/db/properties_test.dart +++ b/pkgs/gcloud/test/db/properties_test.dart @@ -80,23 +80,23 @@ main() { expect((prop.encodeValue(null, [1, 2]) as datastore.BlobValue).bytes, equals([1, 2])); expect( - (prop.encodeValue(null, new Uint8List.fromList([1, 2])) + (prop.encodeValue(null, Uint8List.fromList([1, 2])) as datastore.BlobValue) .bytes, equals([1, 2])); expect(prop.decodePrimitiveValue(null, null), equals(null)); - expect(prop.decodePrimitiveValue(null, new datastore.BlobValue([])), - equals([])); - expect(prop.decodePrimitiveValue(null, new datastore.BlobValue([5, 6])), + expect( + prop.decodePrimitiveValue(null, datastore.BlobValue([])), equals([])); + expect(prop.decodePrimitiveValue(null, datastore.BlobValue([5, 6])), equals([5, 6])); expect( prop.decodePrimitiveValue( - null, new datastore.BlobValue(new Uint8List.fromList([5, 6]))), + null, datastore.BlobValue(Uint8List.fromList([5, 6]))), equals([5, 6])); }); test('datetime_property', () { - var utc99 = new DateTime.fromMillisecondsSinceEpoch(99, isUtc: true); + var utc99 = DateTime.fromMillisecondsSinceEpoch(99, isUtc: true); var prop = const DateTimeProperty(required: true); expect(prop.validate(null, null), isFalse); @@ -113,7 +113,7 @@ main() { }); test('list_property', () { - var prop = const ListProperty(const BoolProperty()); + var prop = const ListProperty(BoolProperty()); expect(prop.validate(null, null), isFalse); expect(prop.validate(null, []), isTrue); @@ -134,10 +134,10 @@ main() { }); test('composed_list_property', () { - var prop = const ListProperty(const CustomProperty()); + var prop = const ListProperty(CustomProperty()); - var c1 = new Custom()..customValue = 'c1'; - var c2 = new Custom()..customValue = 'c2'; + var c1 = Custom()..customValue = 'c1'; + var c2 = Custom()..customValue = 'c2'; expect(prop.validate(null, null), isFalse); expect(prop.validate(null, []), isTrue); @@ -156,11 +156,10 @@ main() { }); test('modelkey_property', () { - var datastoreKey = new datastore.Key( - [new datastore.KeyElement('MyKind', 42)], - partition: new datastore.Partition('foonamespace')); - var dbKey = new KeyMock(datastoreKey); - var modelDBMock = new ModelDBMock(datastoreKey, dbKey); + var datastoreKey = datastore.Key([datastore.KeyElement('MyKind', 42)], + partition: datastore.Partition('foonamespace')); + var dbKey = KeyMock(datastoreKey); + var modelDBMock = ModelDBMock(datastoreKey, dbKey); var prop = const ModelKeyProperty(required: true); expect(prop.validate(modelDBMock, null), isFalse); @@ -190,7 +189,7 @@ class Custom { class CustomProperty extends StringProperty { const CustomProperty( - {String propertyName, bool required: false, bool indexed: true}); + {String propertyName, bool required = false, bool indexed = true}); bool validate(ModelDB db, Object value) { if (required && value == null) return false; @@ -199,10 +198,10 @@ class CustomProperty extends StringProperty { Object decodePrimitiveValue(ModelDB db, Object value) { if (value == null) return null; - return new Custom()..customValue = value as String; + return Custom()..customValue = value as String; } - Object encodeValue(ModelDB db, Object value, {bool forComparison: false}) { + Object encodeValue(ModelDB db, Object value, {bool forComparison = false}) { if (value == null) return null; return (value as Custom).customValue; } @@ -248,6 +247,6 @@ class ModelDBMock implements ModelDB { String fieldNameToPropertyName(String kind, String fieldName) => null; String kindName(Type type) => null; Object toDatastoreValue(String kind, String fieldName, Object value, - {bool forComparison: false}) => + {bool forComparison = false}) => null; } diff --git a/pkgs/gcloud/test/db_all_e2e_test.dart b/pkgs/gcloud/test/db_all_e2e_test.dart index db4efd35..62fe793a 100644 --- a/pkgs/gcloud/test/db_all_e2e_test.dart +++ b/pkgs/gcloud/test/db_all_e2e_test.dart @@ -19,7 +19,7 @@ import 'db/e2e/metamodel_test_impl.dart' as db_metamodel_test; Future main() async { var scopes = datastore_impl.DatastoreImpl.SCOPES; - var now = new DateTime.now().millisecondsSinceEpoch; + var now = DateTime.now().millisecondsSinceEpoch; String namespace = '${Platform.operatingSystem}${now}'; datastore_impl.DatastoreImpl datastore; @@ -27,8 +27,8 @@ Future main() async { Client client; await withAuthClient(scopes, (String project, httpClient) { - datastore = new datastore_impl.DatastoreImpl(httpClient, project); - datastoreDB = new db.DatastoreDB(datastore); + datastore = datastore_impl.DatastoreImpl(httpClient, project); + datastoreDB = db.DatastoreDB(datastore); client = httpClient; }); @@ -45,7 +45,7 @@ Future main() async { }); test('sleep-between-test-suites', () { - expect(new Future.delayed(const Duration(seconds: 10)), completes); + expect(Future.delayed(const Duration(seconds: 10)), completes); }); group('datastore_test', () { @@ -53,7 +53,7 @@ Future main() async { }); test('sleep-between-test-suites', () { - expect(new Future.delayed(const Duration(seconds: 10)), completes); + expect(Future.delayed(const Duration(seconds: 10)), completes); }); group('datastore_test', () { diff --git a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart index 49f7111d..84b7a076 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart @@ -16,12 +16,12 @@ void main() { setUpAll(() { // Generate a unique prefix for all names generated by the tests. - var id = new DateTime.now().millisecondsSinceEpoch; + var id = DateTime.now().millisecondsSinceEpoch; prefix = 'dart-e2e-test-$id'; return withAuthClient(PubSub.SCOPES, (String _project, httpClient) async { // Share the same pubsub connection for all tests. - pubsub = new PubSub(httpClient, _project); + pubsub = PubSub(httpClient, _project); project = _project; client = httpClient; }); @@ -91,12 +91,12 @@ void main() { }); String generateTopicName() { - var id = new DateTime.now().millisecondsSinceEpoch; + var id = DateTime.now().millisecondsSinceEpoch; return '$prefix-topic-$id'; } String generateSubscriptionName() { - var id = new DateTime.now().millisecondsSinceEpoch; + var id = DateTime.now().millisecondsSinceEpoch; return '$prefix-subscription-$id'; } @@ -186,6 +186,6 @@ void main() { await pubsub.deleteSubscription(subscriptionName); await pubsub.deleteTopic(topicName); - }, timeout: const Timeout(const Duration(minutes: 2))); + }, timeout: const Timeout(Duration(minutes: 2))); }); } diff --git a/pkgs/gcloud/test/pubsub/pubsub_test.dart b/pkgs/gcloud/test/pubsub/pubsub_test.dart index 675fae79..4b46dd29 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_test.dart @@ -18,7 +18,7 @@ import '../common_e2e.dart'; const String HOSTNAME = 'pubsub.googleapis.com'; const String ROOT_PATH = '/v1/'; -MockClient mockClient() => new MockClient(HOSTNAME, ROOT_PATH); +MockClient mockClient() => MockClient(HOSTNAME, ROOT_PATH); main() { group('api', () { @@ -51,10 +51,10 @@ main() { 'projects/$PROJECT/topics/test-topic', expectAsync1((http.Request request) { expect(request.body, isEmpty); - return mock.respond(new pubsub.Topic()..name = absoluteName); + return mock.respond(pubsub.Topic()..name = absoluteName); }, count: 2)); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); return api.createTopic(name).then(expectAsync1((topic) { expect(topic.name, name); expect(topic.project, PROJECT); @@ -68,7 +68,7 @@ main() { test('create-error', () { var mock = mockClient(); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); badTopicNames.forEach((name) { expect(() => api.createTopic(name), throwsArgumentError); }); @@ -87,7 +87,7 @@ main() { return mock.respondEmpty(); }, count: 2)); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); return api.deleteTopic(name).then(expectAsync1((result) { expect(result, isNull); return api.deleteTopic(absoluteName).then(expectAsync1((topic) { @@ -98,7 +98,7 @@ main() { test('delete-error', () { var mock = mockClient(); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); badTopicNames.forEach((name) { expect(() => api.deleteTopic(name), throwsArgumentError); }); @@ -114,10 +114,10 @@ main() { 'projects/test-project/topics/test-topic', expectAsync1((request) { expect(request.body.length, 0); - return mock.respond(new pubsub.Topic()..name = absoluteName); + return mock.respond(pubsub.Topic()..name = absoluteName); }, count: 2)); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); return api.lookupTopic(name).then(expectAsync1((topic) { expect(topic.name, name); expect(topic.project, PROJECT); @@ -131,7 +131,7 @@ main() { test('lookup-error', () { var mock = mockClient(); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); badTopicNames.forEach((name) { expect(() => api.lookupTopic(name), throwsArgumentError); }); @@ -144,8 +144,7 @@ main() { addTopics(pubsub.ListTopicsResponse response, int first, int count) { response.topics = []; for (int i = 0; i < count; i++) { - response.topics - .add(new pubsub.Topic()..name = 'topic-${first + i}'); + response.topics.add(pubsub.Topic()..name = 'topic-${first + i}'); } } @@ -173,7 +172,7 @@ main() { expect(request.url.queryParameters['pageToken'], 'next-page'); } - var response = new pubsub.ListTopicsResponse(); + var response = pubsub.ListTopicsResponse(); var first = (pageCount - 1) * pageSize + 1; if (pageCount < totalPages) { response.nextPageToken = 'next-page'; @@ -190,7 +189,7 @@ main() { var mock = mockClient(); registerQueryMock(mock, count, 50); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); return api .listTopics() .listen(expectAsync1((_) => null, count: count)) @@ -214,7 +213,7 @@ main() { var mock = mockClient(); registerQueryMock(mock, 70, 50); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); api.listTopics().listen(expectAsync1(((_) => null), count: 70), onDone: expectAsync0(() => null)) ..pause() @@ -227,7 +226,7 @@ main() { var mock = mockClient(); registerQueryMock(mock, 70, 50); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); var count = 0; var subscription; subscription = api.listTopics().listen( @@ -252,7 +251,7 @@ main() { var mock = mockClient(); registerQueryMock(mock, 70, 50, 1); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); api.listTopics().listen((_) => throw 'Unexpected', onDone: () => throw 'Unexpected') ..cancel(); @@ -262,7 +261,7 @@ main() { var mock = mockClient(); registerQueryMock(mock, 170, 50, 1); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); var subscription; subscription = api.listTopics().listen( expectAsync1((_) => subscription.cancel()), @@ -277,7 +276,7 @@ main() { expectAsync1((request) { return mock.respondError(500); })); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); var subscription; subscription = api.listTopics().listen((_) => throw 'Unexpected', onDone: expectAsync0(() => null), @@ -299,7 +298,7 @@ main() { var mock = mockClient(); registerQueryMock(mock, 51, 50, 1); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); int count = 0; var subscription; @@ -334,7 +333,7 @@ main() { var mock = mockClient(); registerQueryMock(mock, 0, 50); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); return api.pageTopics().then(expectAsync1((page) { expect(page.items.length, 0); expect(page.isLast, isTrue); @@ -354,7 +353,7 @@ main() { var mock = mockClient(); registerQueryMock(mock, 10, 50); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); return api.pageTopics().then(expectAsync1((page) { expect(page.items.length, 10); expect(page.isLast, isTrue); @@ -375,7 +374,7 @@ main() { var totalPages = (n + pageSize - 1) ~/ pageSize; var pageCount = 0; - var completer = new Completer(); + var completer = Completer(); var mock = mockClient(); registerQueryMock(mock, n, pageSize); @@ -394,7 +393,7 @@ main() { })); } - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); api.pageTopics(pageSize: pageSize).then(expectAsync1(handlePage)); return completer.future; @@ -429,11 +428,10 @@ main() { expectAsync1((request) { var requestSubscription = jsonDecode(request.body) as Map; expect(requestSubscription['topic'], absoluteTopicName); - return mock - .respond(new pubsub.Subscription()..name = absoluteName); + return mock.respond(pubsub.Subscription()..name = absoluteName); }, count: 2)); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); return api .createSubscription(name, topicName) .then(expectAsync1((subscription) { @@ -451,7 +449,7 @@ main() { test('create-error', () { var mock = mockClient(); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); badSubscriptionNames.forEach((name) { expect(() => api.createSubscription(name, 'test-topic'), throwsArgumentError); @@ -472,7 +470,7 @@ main() { return mock.respondEmpty(); }, count: 2)); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); return api.deleteSubscription(name).then(expectAsync1((result) { expect(result, isNull); return api @@ -485,7 +483,7 @@ main() { test('delete-error', () { var mock = mockClient(); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); badSubscriptionNames.forEach((name) { expect(() => api.deleteSubscription(name), throwsArgumentError); }); @@ -498,14 +496,13 @@ main() { var mock = mockClient(); mock.register( 'GET', - new RegExp('projects/$PROJECT/subscriptions'), + RegExp('projects/$PROJECT/subscriptions'), expectAsync1((request) { expect(request.body.length, 0); - return mock - .respond(new pubsub.Subscription()..name = absoluteName); + return mock.respond(pubsub.Subscription()..name = absoluteName); }, count: 2)); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); return api.lookupSubscription(name).then(expectAsync1((subscription) { expect(subscription.name, name); expect(subscription.absoluteName, absoluteName); @@ -521,7 +518,7 @@ main() { test('lookup-error', () { var mock = mockClient(); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); badSubscriptionNames.forEach((name) { expect(() => api.lookupSubscription(name), throwsArgumentError); }); @@ -535,8 +532,8 @@ main() { pubsub.ListSubscriptionsResponse response, int first, int count) { response.subscriptions = []; for (int i = 0; i < count; i++) { - response.subscriptions.add( - new pubsub.Subscription()..name = 'subscription-${first + i}'); + response.subscriptions + .add(pubsub.Subscription()..name = 'subscription-${first + i}'); } } @@ -564,7 +561,7 @@ main() { expect(request.url.queryParameters['pageToken'], 'next-page'); } - var response = new pubsub.ListSubscriptionsResponse(); + var response = pubsub.ListSubscriptionsResponse(); var first = (pageCount - 1) * pageSize + 1; if (pageCount < totalPages) { response.nextPageToken = 'next-page'; @@ -582,7 +579,7 @@ main() { var mock = mockClient(); registerQueryMock(mock, count, 50, topic: topic); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); return api .listSubscriptions(topic) .listen(expectAsync1((_) => null, count: count)) @@ -616,7 +613,7 @@ main() { var mock = mockClient(); registerQueryMock(mock, 70, 50); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); api.listSubscriptions().listen( expectAsync1(((_) => null), count: 70), onDone: expectAsync0(() => null)) @@ -630,7 +627,7 @@ main() { var mock = mockClient(); registerQueryMock(mock, 70, 50); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); var count = 0; var subscription; subscription = api.listSubscriptions().listen( @@ -655,7 +652,7 @@ main() { var mock = mockClient(); registerQueryMock(mock, 70, 50, totalCalls: 1); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); api.listSubscriptions().listen((_) => throw 'Unexpected', onDone: () => throw 'Unexpected') ..cancel(); @@ -665,7 +662,7 @@ main() { var mock = mockClient(); registerQueryMock(mock, 170, 50, totalCalls: 1); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); var subscription; subscription = api.listSubscriptions().listen( expectAsync1((_) => subscription.cancel()), @@ -680,7 +677,7 @@ main() { expectAsync1((request) { return mock.respondError(500); })); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); var subscription; subscription = api.listSubscriptions().listen( (_) => throw 'Unexpected', @@ -703,7 +700,7 @@ main() { var mock = mockClient(); registerQueryMock(mock, 51, 50, totalCalls: 1); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); int count = 0; var subscription; @@ -738,7 +735,7 @@ main() { var mock = mockClient(); registerQueryMock(mock, 0, 50, topic: topic); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); return api .pageSubscriptions(topic: topic) .then(expectAsync1((page) { @@ -767,7 +764,7 @@ main() { var mock = mockClient(); registerQueryMock(mock, 10, 50, topic: topic); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); return api .pageSubscriptions(topic: topic) .then(expectAsync1((page) { @@ -796,7 +793,7 @@ main() { var totalPages = (n + pageSize - 1) ~/ pageSize; var pageCount = 0; - var completer = new Completer(); + var completer = Completer(); var mock = mockClient(); registerQueryMock(mock, n, pageSize, topic: topic); @@ -815,7 +812,7 @@ main() { }); } - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); api .pageSubscriptions(topic: topic, pageSize: pageSize) .then(handlingPage); @@ -861,7 +858,7 @@ main() { registerLookup(MockClient mock) { mock.register('GET', absoluteName, expectAsync1((request) { expect(request.body.length, 0); - return mock.respond(new pubsub.Topic()..name = absoluteName); + return mock.respond(pubsub.Topic()..name = absoluteName); })); } @@ -871,8 +868,8 @@ main() { 'POST', 'projects/test-project/topics/test-topic:publish', expectAsync1((request) { - var publishRequest = new pubsub.PublishRequest.fromJson( - jsonDecode(request.body) as Map); + var publishRequest = + pubsub.PublishRequest.fromJson(jsonDecode(request.body) as Map); return fn(publishRequest); }, count: count)); } @@ -881,14 +878,14 @@ main() { var mock = mockClient(); registerLookup(mock); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); return api.lookupTopic(name).then(expectAsync1((topic) { mock.clear(); registerPublish(mock, 4, ((request) { expect(request.messages.length, 1); expect(request.messages[0].data, messageBase64); expect(request.messages[0].attributes, isNull); - return mock.respond(new pubsub.PublishResponse()..messageIds = ['0']); + return mock.respond(pubsub.PublishResponse()..messageIds = ['0']); })); return topic.publishString(message).then(expectAsync1((result) { @@ -896,11 +893,11 @@ main() { return topic.publishBytes(messageBytes).then(expectAsync1((result) { expect(result, isNull); return topic - .publish(new Message.withString(message)) + .publish(Message.withString(message)) .then(expectAsync1((result) { expect(result, isNull); return topic - .publish(new Message.withBytes(messageBytes)) + .publish(Message.withBytes(messageBytes)) .then(expectAsync1((result) { expect(result, isNull); })); @@ -914,7 +911,7 @@ main() { var mock = mockClient(); registerLookup(mock); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); return api.lookupTopic(name).then(expectAsync1((topic) { mock.clear(); registerPublish(mock, 4, ((request) { @@ -923,7 +920,7 @@ main() { expect(request.messages[0].attributes, isNotNull); expect(request.messages[0].attributes.length, attributes.length); expect(request.messages[0].attributes, attributes); - return mock.respond(new pubsub.PublishResponse()..messageIds = ['0']); + return mock.respond(pubsub.PublishResponse()..messageIds = ['0']); })); return topic @@ -935,13 +932,12 @@ main() { .then(expectAsync1((result) { expect(result, isNull); return topic - .publish( - new Message.withString(message, attributes: attributes)) + .publish(Message.withString(message, attributes: attributes)) .then(expectAsync1((result) { expect(result, isNull); return topic - .publish(new Message.withBytes(messageBytes, - attributes: attributes)) + .publish( + Message.withBytes(messageBytes, attributes: attributes)) .then(expectAsync1((result) { expect(result, isNull); })); @@ -955,10 +951,10 @@ main() { var mock = mockClient(); mock.register('GET', absoluteName, expectAsync1((request) { expect(request.body.length, 0); - return mock.respond(new pubsub.Topic()..name = absoluteName); + return mock.respond(pubsub.Topic()..name = absoluteName); })); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); return api.lookupTopic(name).then(expectAsync1((topic) { expect(topic.name, name); expect(topic.absoluteName, absoluteName); @@ -983,10 +979,10 @@ main() { var mock = mockClient(); mock.register('GET', absoluteName, expectAsync1((request) { expect(request.body.length, 0); - return mock.respond(new pubsub.Topic()..name = absoluteName); + return mock.respond(pubsub.Topic()..name = absoluteName); })); - var api = new PubSub(mock, PROJECT); + var api = PubSub(mock, PROJECT); return api.lookupSubscription(name).then(expectAsync1((subscription) { expect(subscription.name, name); expect(subscription.absoluteName, absoluteName); @@ -1026,7 +1022,7 @@ main() { "subscription":"$absoluteSubscriptionName" } '''; - var event = new PushEvent.fromJson(requestBody); + var event = PushEvent.fromJson(requestBody); expect(event.message.asString, "Hello, world 30 of 50!"); expect(event.message.attributes['messageNo'], '30'); expect(event.message.attributes['test'], 'hello'); @@ -1052,7 +1048,7 @@ main() { "subscription":"$relativeSubscriptionName" } '''; - var event = new PushEvent.fromJson(requestBody); + var event = PushEvent.fromJson(requestBody); expect(event.message.asString, "Hello, world 30 of 50!"); expect(event.message.attributes['messageNo'], '30'); expect(event.message.attributes['test'], 'hello'); diff --git a/pkgs/gcloud/test/service_scope_test.dart b/pkgs/gcloud/test/service_scope_test.dart index 65bafb49..2bf287ce 100644 --- a/pkgs/gcloud/test/service_scope_test.dart +++ b/pkgs/gcloud/test/service_scope_test.dart @@ -15,10 +15,10 @@ main() { expect(() => ss.registerScopeExitCallback(() {}), throwsA(isStateError)); expect(() => ss.lookup(1), throwsA(isStateError)); - var c = new Completer.sync(); + var c = Completer.sync(); ss.fork(expectAsync0(() { c.complete(); - return new Future.value(); + return Future.value(); })); // Assert that after fork()ing we still don't have a service scope outside @@ -33,7 +33,7 @@ main() { test('non-existent-key', () { return ss.fork(expectAsync0(() { expect(ss.lookup(1), isNull); - return new Future.value(); + return Future.value(); })); }); @@ -44,14 +44,14 @@ main() { test('error-on-double-insert', () { // Ensure that inserting twice with the same key results in an error. - return ss.fork(expectAsync0(() => new Future.sync(() { + return ss.fork(expectAsync0(() => Future.sync(() { ss.register(1, 'firstValue'); expect(() => ss.register(1, 'firstValue'), throwsA(isArgumentError)); }))); }); test('only-cleanup', () { - return ss.fork(expectAsync0(() => new Future.sync(() { + return ss.fork(expectAsync0(() => Future.sync(() { ss.registerScopeExitCallback(expectAsync0(() {})); }))); }); @@ -60,7 +60,7 @@ main() { // Ensure cleanup functions are called in the reverse order of inserting // their entries. int insertions = 0; - return ss.fork(expectAsync0(() => new Future.value(() { + return ss.fork(expectAsync0(() => Future.value(() { int NUM = 10; for (int i = 0; i < NUM; i++) { @@ -104,7 +104,7 @@ main() { expect(ss.lookup(1), 'value1'); expect(ss.lookup(2), 'value2'); })); - return new Future.value(); + return Future.value(); })); }); @@ -115,7 +115,7 @@ main() { // failed cleanup() calls. int insertions = 0; return ss - .fork(() => new Future.sync(() { + .fork(() => Future.sync(() { for (int i = 0; i < 10; i++) { insertions++; ss.register(i, 'value$i'); @@ -136,7 +136,7 @@ main() { test('service-scope-destroyed-after-callback-completes', () { // Ensure that once the closure passed to fork() completes, the service // scope is destroyed. - return ss.fork(expectAsync0(() => new Future.sync(() { + return ss.fork(expectAsync0(() => Future.sync(() { var key = 1; ss.register(key, 'firstValue'); ss.registerScopeExitCallback(Zone.current.bindCallback(() { @@ -156,12 +156,12 @@ main() { test('override-parent-value', () { // Ensure that once the closure passed to fork() completes, the service // scope is destroyed. - return ss.fork(expectAsync0(() => new Future.sync(() { + return ss.fork(expectAsync0(() => Future.sync(() { var key = 1; ss.register(key, 'firstValue'); expect(ss.lookup(key), equals('firstValue')); - return ss.fork(expectAsync0(() => new Future.sync(() { + return ss.fork(expectAsync0(() => Future.sync(() { ss.register(key, 'secondValue'); expect(ss.lookup(key), equals('secondValue')); }))); @@ -172,8 +172,8 @@ main() { // Ensure that once the closure passed to fork() completes, the service // scope is destroyed. ss.fork(expectAsync0(() { - Timer.run(() => throw new StateError('foobar')); - return new Future.value(); + Timer.run(() => throw StateError('foobar')); + return Future.value(); }), onError: expectAsync2((error, _) { expect(error, isStateError); })); @@ -201,7 +201,7 @@ main() { Future spawnChild( ownSubKey, otherSubKey, int i, ss.ScopeExitCallback cleanup) { - return ss.fork(expectAsync0(() => new Future.sync(() { + return ss.fork(expectAsync0(() => Future.sync(() { ss.register(subKey, 'fork$i'); ss.registerScopeExitCallback(cleanup); ss.register(ownSubKey, 'sub$i'); diff --git a/pkgs/gcloud/test/storage/e2e_test.dart b/pkgs/gcloud/test/storage/e2e_test.dart index 03384b1d..01f85da0 100644 --- a/pkgs/gcloud/test/storage/e2e_test.dart +++ b/pkgs/gcloud/test/storage/e2e_test.dart @@ -13,7 +13,7 @@ import 'package:test/test.dart'; import '../common_e2e.dart'; String generateBucketName() { - var id = new DateTime.now().millisecondsSinceEpoch; + var id = DateTime.now().millisecondsSinceEpoch; return 'dart-e2e-test-$id'; } @@ -24,7 +24,7 @@ const int MB = 1024 * 1024; const int maxNormalUpload = 1 * MB; const int minResumableUpload = maxNormalUpload + 1; final bytesResumableUpload = - new List.generate(minResumableUpload, (e) => e & 255); + List.generate(minResumableUpload, (e) => e & 255); void main() { Storage storage; @@ -36,7 +36,7 @@ void main() { testBucketName = generateBucketName(); // Share the same storage connection for all tests. - storage = new Storage(httpClient, project); + storage = Storage(httpClient, project); // Create a shared bucket for all object tests. return storage.createBucket(testBucketName).then((_) { @@ -48,7 +48,7 @@ void main() { tearDownAll(() { // Deleting a bucket relies on eventually consistent behaviour, hence // the delay in attempt to prevent test flakiness. - return new Future.delayed(STORAGE_LIST_DELAY, () { + return Future.delayed(STORAGE_LIST_DELAY, () { return storage.deleteBucket(testBucketName); }); }); @@ -187,26 +187,22 @@ void main() { })); } - Acl acl1 = new Acl( - [new AclEntry(AclScope.allAuthenticated, AclPermission.WRITE)]); - Acl acl2 = new Acl([ - new AclEntry(AclScope.allUsers, AclPermission.WRITE), - new AclEntry( - new AccountScope('sgjesse@google.com'), AclPermission.WRITE) + Acl acl1 = + Acl([AclEntry(AclScope.allAuthenticated, AclPermission.WRITE)]); + Acl acl2 = Acl([ + AclEntry(AclScope.allUsers, AclPermission.WRITE), + AclEntry(AccountScope('sgjesse@google.com'), AclPermission.WRITE) ]); - Acl acl3 = new Acl([ - new AclEntry(AclScope.allUsers, AclPermission.WRITE), - new AclEntry( - new AccountScope('sgjesse@google.com'), AclPermission.WRITE), - new AclEntry(new GroupScope('misc@dartlang.org'), AclPermission.READ) + Acl acl3 = Acl([ + AclEntry(AclScope.allUsers, AclPermission.WRITE), + AclEntry(AccountScope('sgjesse@google.com'), AclPermission.WRITE), + AclEntry(GroupScope('misc@dartlang.org'), AclPermission.READ) ]); - Acl acl4 = new Acl([ - new AclEntry(AclScope.allUsers, AclPermission.WRITE), - new AclEntry( - new AccountScope('sgjesse@google.com'), AclPermission.WRITE), - new AclEntry(new GroupScope('misc@dartlang.org'), AclPermission.READ), - new AclEntry( - new DomainScope('dartlang.org'), AclPermission.FULL_CONTROL) + Acl acl4 = Acl([ + AclEntry(AclScope.allUsers, AclPermission.WRITE), + AclEntry(AccountScope('sgjesse@google.com'), AclPermission.WRITE), + AclEntry(GroupScope('misc@dartlang.org'), AclPermission.READ), + AclEntry(DomainScope('dartlang.org'), AclPermission.FULL_CONTROL) ]); // The expected length of the returned ACL is one longer than the one @@ -252,8 +248,8 @@ void main() { })); } - var metadata1 = new ObjectMetadata(contentType: 'text/plain'); - var metadata2 = new ObjectMetadata( + var metadata1 = ObjectMetadata(contentType: 'text/plain'); + var metadata2 = ObjectMetadata( contentType: 'text/plain', cacheControl: 'no-cache', contentDisposition: 'attachment; filename="test.txt"', diff --git a/pkgs/gcloud/test/storage/storage_test.dart b/pkgs/gcloud/test/storage/storage_test.dart index 16af7b89..226b6c04 100644 --- a/pkgs/gcloud/test/storage/storage_test.dart +++ b/pkgs/gcloud/test/storage/storage_test.dart @@ -20,17 +20,17 @@ import '../common_e2e.dart'; const String HOSTNAME = 'www.googleapis.com'; const String ROOT_PATH = '/storage/v1/'; -MockClient mockClient() => new MockClient(HOSTNAME, ROOT_PATH); +MockClient mockClient() => MockClient(HOSTNAME, ROOT_PATH); withMockClient(function(MockClient client, Storage storage)) { var mock = mockClient(); - function(mock, new Storage(mock, PROJECT)); + function(mock, Storage(mock, PROJECT)); } Future withMockClientAsync( Future function(MockClient client, Storage storage)) async { var mock = mockClient(); - await function(mock, new Storage(mock, PROJECT)); + await function(mock, Storage(mock, PROJECT)); } main() { @@ -41,9 +41,9 @@ main() { withMockClient((mock, api) { mock.register('POST', 'b', expectAsync1((http.Request request) { var requestBucket = - new storage.Bucket.fromJson(jsonDecode(request.body) as Map); + storage.Bucket.fromJson(jsonDecode(request.body) as Map); expect(requestBucket.name, bucketName); - return mock.respond(new storage.Bucket()..name = bucketName); + return mock.respond(storage.Bucket()..name = bucketName); })); expect(api.createBucket(bucketName), completion(isNull)); @@ -67,12 +67,12 @@ main() { 'b', expectAsync1((http.Request request) { var requestBucket = - new storage.Bucket.fromJson(jsonDecode(request.body) as Map); + storage.Bucket.fromJson(jsonDecode(request.body) as Map); expect(requestBucket.name, bucketName); expect(requestBucket.acl, isNull); expect(request.url.queryParameters['predefinedAcl'], predefined[count++][1]); - return mock.respond(new storage.Bucket()..name = bucketName); + return mock.respond(storage.Bucket()..name = bucketName); }, count: predefined.length)); var futures = []; @@ -85,20 +85,17 @@ main() { }); test('create-with-acl', () { - var acl1 = new Acl([ - new AclEntry( - new AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + var acl1 = Acl([ + AclEntry(AccountScope('user@example.com'), AclPermission.FULL_CONTROL), ]); - var acl2 = new Acl([ - new AclEntry( - new AccountScope('user@example.com'), AclPermission.FULL_CONTROL), - new AclEntry(new GroupScope('group@example.com'), AclPermission.WRITE), + var acl2 = Acl([ + AclEntry(AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + AclEntry(GroupScope('group@example.com'), AclPermission.WRITE), ]); - var acl3 = new Acl([ - new AclEntry( - new AccountScope('user@example.com'), AclPermission.FULL_CONTROL), - new AclEntry(new GroupScope('group@example.com'), AclPermission.WRITE), - new AclEntry(new DomainScope('example.com'), AclPermission.READ), + var acl3 = Acl([ + AclEntry(AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + AclEntry(GroupScope('group@example.com'), AclPermission.WRITE), + AclEntry(DomainScope('example.com'), AclPermission.READ), ]); var acls = [acl1, acl2, acl3]; @@ -111,7 +108,7 @@ main() { 'b', expectAsync1((http.Request request) { var requestBucket = - new storage.Bucket.fromJson(jsonDecode(request.body) as Map); + storage.Bucket.fromJson(jsonDecode(request.body) as Map); expect(requestBucket.name, bucketName); expect(request.url.queryParameters['predefinedAcl'], isNull); expect(requestBucket.acl, isNotNull); @@ -127,7 +124,7 @@ main() { expect(requestBucket.acl[2].role, 'READER'); } count++; - return mock.respond(new storage.Bucket()..name = bucketName); + return mock.respond(storage.Bucket()..name = bucketName); }, count: acls.length)); var futures = []; @@ -147,20 +144,17 @@ main() { [PredefinedAcl.publicReadWrite, 'publicReadWrite'] ]; - var acl1 = new Acl([ - new AclEntry( - new AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + var acl1 = Acl([ + AclEntry(AccountScope('user@example.com'), AclPermission.FULL_CONTROL), ]); - var acl2 = new Acl([ - new AclEntry( - new AccountScope('user@example.com'), AclPermission.FULL_CONTROL), - new AclEntry(new GroupScope('group@example.com'), AclPermission.WRITE), + var acl2 = Acl([ + AclEntry(AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + AclEntry(GroupScope('group@example.com'), AclPermission.WRITE), ]); - var acl3 = new Acl([ - new AclEntry( - new AccountScope('user@example.com'), AclPermission.FULL_CONTROL), - new AclEntry(new GroupScope('group@example.com'), AclPermission.WRITE), - new AclEntry(new DomainScope('example.com'), AclPermission.READ), + var acl3 = Acl([ + AclEntry(AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + AclEntry(GroupScope('group@example.com'), AclPermission.WRITE), + AclEntry(DomainScope('example.com'), AclPermission.READ), ]); var acls = [acl1, acl2, acl3]; @@ -173,7 +167,7 @@ main() { 'b', expectAsync1((http.Request request) { var requestBucket = - new storage.Bucket.fromJson(jsonDecode(request.body) as Map); + storage.Bucket.fromJson(jsonDecode(request.body) as Map); int predefinedIndex = count ~/ acls.length; int aclIndex = count % acls.length; expect(requestBucket.name, bucketName); @@ -192,7 +186,7 @@ main() { expect(requestBucket.acl[2].role, 'READER'); } count++; - return mock.respond(new storage.Bucket()..name = bucketName); + return mock.respond(storage.Bucket()..name = bucketName); }, count: predefined.length * acls.length)); var futures = []; @@ -209,11 +203,10 @@ main() { test('delete', () { withMockClient((mock, api) { - mock.register('DELETE', new RegExp(r'b/[a-z/-]*$'), - expectAsync1((request) { + mock.register('DELETE', RegExp(r'b/[a-z/-]*$'), expectAsync1((request) { expect(request.url.path, '${ROOT_PATH}b/$bucketName'); expect(request.body.length, 0); - return mock.respond(new storage.Bucket()..name = bucketName); + return mock.respond(storage.Bucket()..name = bucketName); })); expect(api.deleteBucket(bucketName), completion(isNull)); @@ -226,12 +219,12 @@ main() { withMockClient((mock, api) { mock.register( 'GET', - new RegExp(r'b/[a-z/-]*$'), + RegExp(r'b/[a-z/-]*$'), expectAsync1((request) { expect(request.url.path, '${ROOT_PATH}b/$bucketName'); expect(request.body.length, 0); if (exists) { - return mock.respond(new storage.Bucket()..name = bucketName); + return mock.respond(storage.Bucket()..name = bucketName); } else { return mock.respondError(404); } @@ -247,18 +240,17 @@ main() { test('stat', () { withMockClient((mock, api) { - mock.register('GET', new RegExp(r'b/[a-z/-]*$'), - expectAsync1((request) { + mock.register('GET', RegExp(r'b/[a-z/-]*$'), expectAsync1((request) { expect(request.url.path, '${ROOT_PATH}b/$bucketName'); expect(request.body.length, 0); - return mock.respond(new storage.Bucket() + return mock.respond(storage.Bucket() ..name = bucketName - ..timeCreated = new DateTime(2014)); + ..timeCreated = DateTime(2014)); })); return api.bucketInfo(bucketName).then(expectAsync1((result) { expect(result.bucketName, bucketName); - expect(result.created, new DateTime(2014)); + expect(result.created, DateTime(2014)); })); }); }); @@ -268,7 +260,7 @@ main() { withMockClient((mock, api) { mock.register('GET', 'b', expectAsync1((request) { expect(request.body.length, 0); - return mock.respond(new storage.Buckets()); + return mock.respond(storage.Buckets()); })); api.listBucketNames().listen((_) => throw 'Unexpected', @@ -298,7 +290,7 @@ main() { mock.register( 'POST', 'b/srcBucket/o/srcObject/copyTo/b/destBucket/o/destObject', expectAsync1((request) { - return mock.respond(new storage.Object()..name = 'destObject'); + return mock.respond(storage.Object()..name = 'destObject'); })); expect( api.copyObject( @@ -330,7 +322,7 @@ main() { const int maxNormalUpload = 1 * MB; const int minResumableUpload = maxNormalUpload + 1; var bytesResumableUpload = - new List.generate(minResumableUpload, (e) => e & 255); + List.generate(minResumableUpload, (e) => e & 255); bool testArgumentError(e) => e is ArgumentError; bool testDetailedApiError(e) => e is storage.DetailedApiRequestError; @@ -346,11 +338,11 @@ main() { .processNormalMediaUpload(request) .then(expectAsync1((mediaUpload) { var object = - new storage.Object.fromJson(jsonDecode(mediaUpload.json) as Map); + storage.Object.fromJson(jsonDecode(mediaUpload.json) as Map); expect(object.name, objectName); expect(mediaUpload.bytes, bytes); expect(mediaUpload.contentType, 'application/octet-stream'); - return mock.respond(new storage.Object()..name = objectName); + return mock.respond(storage.Object()..name = objectName); })); })); } @@ -362,7 +354,7 @@ main() { mock.registerResumableUpload('POST', 'b/$bucketName/o', expectAsync1((request) { var requestObject = - new storage.Object.fromJson(jsonDecode(request.body) as Map); + storage.Object.fromJson(jsonDecode(request.body) as Map); expect(requestObject.name, objectName); return mock.respondInitiateResumableUpload(PROJECT); })); @@ -376,7 +368,7 @@ main() { return mock.respondContinueResumableUpload(); } else { expect(request.bodyBytes.length, 1); - return mock.respond(new storage.Object()..name = objectName); + return mock.respond(storage.Object()..name = objectName); } }, count: 2)); } @@ -388,7 +380,7 @@ main() { Future pipeToSink(StreamSink> sink, List> data) { sink.done.then(expectAsync1(checkResult)); sink.done.catchError((e) => throw 'Unexpected $e'); - return new Stream.fromIterable(data) + return Stream.fromIterable(data) .pipe(sink) .then(expectAsync1(checkResult)) .catchError((e) => throw 'Unexpected $e'); @@ -398,7 +390,7 @@ main() { sink.done.then(expectAsync1(checkResult)); sink.done.catchError((e) => throw 'Unexpected $e'); return sink - .addStream(new Stream.fromIterable(data)) + .addStream(Stream.fromIterable(data)) .then((_) => sink.close()) .then(expectAsync1(checkResult)) .catchError((e) => throw 'Unexpected $e'); @@ -478,7 +470,7 @@ main() { test: testDetailedApiError); sink.done.catchError(expectAsync1(expectNotNull), test: testDetailedApiError); - return new Stream.fromIterable([bytesNormalUpload]) + return Stream.fromIterable([bytesNormalUpload]) .pipe(sink) .then((_) => throw 'Unexpected') .catchError(expectAsync1(expectNotNull), @@ -513,7 +505,7 @@ main() { sink.done.then((_) => throw 'Unexpected').catchError( expectAsync1(expectNotNull), test: testDetailedApiError); - return new Stream.fromIterable([bytesResumableUpload]) + return Stream.fromIterable([bytesResumableUpload]) .pipe(sink) .then((_) => throw 'Unexpected') .catchError(expectAsync1(expectNotNull), @@ -543,7 +535,7 @@ main() { sink.done.then((_) => throw 'Unexpected').catchError( expectAsync1(expectNotNull), test: (e) => e is String || e is storage.ApiRequestError); - return new Stream>.fromIterable(data) + return Stream>.fromIterable(data) .pipe(sink) .then((_) => throw 'Unexpected') .catchError(expectAsync1(expectNotNull), @@ -565,11 +557,11 @@ main() { sink.done .then((_) => throw 'Unexpected') .catchError(expectAsync1(expectNotNull), test: testArgumentError); - var stream = new Stream.fromIterable([ + var stream = Stream.fromIterable([ [1, 2, 3] ]); sink.addStream(stream).then((_) { - sink.addError(new ArgumentError()); + sink.addError(ArgumentError()); sink .close() .catchError(expectAsync1(expectNotNull), test: testArgumentError); @@ -597,9 +589,9 @@ main() { sink.done .then((_) => throw 'Unexpected') .catchError(expectAsync1(expectNotNull), test: testArgumentError); - var stream = new Stream.fromIterable([bytesResumableUpload]); + var stream = Stream.fromIterable([bytesResumableUpload]); sink.addStream(stream).then((_) { - sink.addError(new ArgumentError()); + sink.addError(ArgumentError()); sink .close() .catchError(expectAsync1(expectNotNull), test: testArgumentError); @@ -609,18 +601,17 @@ main() { test('write-with-metadata-short', () { var metadata = [ - new ObjectMetadata(contentType: 'mime/type'), - new ObjectMetadata( - contentType: 'type/mime', cacheControl: 'control-cache'), - new ObjectMetadata(cacheControl: 'control-cache'), - new ObjectMetadata( + ObjectMetadata(contentType: 'mime/type'), + ObjectMetadata(contentType: 'type/mime', cacheControl: 'control-cache'), + ObjectMetadata(cacheControl: 'control-cache'), + ObjectMetadata( cacheControl: 'control-cache', contentDisposition: 'disp-content'), - new ObjectMetadata( + ObjectMetadata( contentDisposition: 'disp-content', contentEncoding: 'encoding', contentLanguage: 'language'), - new ObjectMetadata(custom: {'x': 'y'}), - new ObjectMetadata(custom: {'a': 'b', 'x': 'y'}) + ObjectMetadata(custom: {'x': 'y'}), + ObjectMetadata(custom: {'a': 'b', 'x': 'y'}) ]; withMockClient((mock, api) { @@ -634,7 +625,7 @@ main() { return mock .processNormalMediaUpload(request) .then(expectAsync1((mediaUpload) { - var object = new storage.Object.fromJson( + var object = storage.Object.fromJson( jsonDecode(mediaUpload.json) as Map); ObjectMetadata m = metadata[count]; expect(object.name, objectName); @@ -649,7 +640,7 @@ main() { expect(object.contentLanguage, m.contentLanguage); expect(object.metadata, m.custom); count++; - return mock.respond(new storage.Object()..name = objectName); + return mock.respond(storage.Object()..name = objectName); })); }, count: metadata.length)); @@ -665,18 +656,17 @@ main() { test('write-with-metadata-long', () { var metadata = [ - new ObjectMetadata(contentType: 'mime/type'), - new ObjectMetadata( - contentType: 'type/mime', cacheControl: 'control-cache'), - new ObjectMetadata(cacheControl: 'control-cache'), - new ObjectMetadata( + ObjectMetadata(contentType: 'mime/type'), + ObjectMetadata(contentType: 'type/mime', cacheControl: 'control-cache'), + ObjectMetadata(cacheControl: 'control-cache'), + ObjectMetadata( cacheControl: 'control-cache', contentDisposition: 'disp-content'), - new ObjectMetadata( + ObjectMetadata( contentDisposition: 'disp-content', contentEncoding: 'encoding', contentLanguage: 'language'), - new ObjectMetadata(custom: {'x': 'y'}), - new ObjectMetadata(custom: {'a': 'b', 'x': 'y'}) + ObjectMetadata(custom: {'x': 'y'}), + ObjectMetadata(custom: {'a': 'b', 'x': 'y'}) ]; withMockClient((mock, api) { @@ -688,7 +678,7 @@ main() { 'b/$bucketName/o', expectAsync1((request) { var object = - new storage.Object.fromJson(jsonDecode(request.body) as Map); + storage.Object.fromJson(jsonDecode(request.body) as Map); ObjectMetadata m = metadata[countInitial]; expect(object.name, objectName); expect(object.cacheControl, m.cacheControl); @@ -715,7 +705,7 @@ main() { return mock.respondContinueResumableUpload(); } else { expect(request.bodyBytes.length, 1); - return mock.respond(new storage.Object()..name = objectName); + return mock.respond(storage.Object()..name = objectName); } }, count: metadata.length * 2)); @@ -750,7 +740,7 @@ main() { return mock .processNormalMediaUpload(request) .then(expectAsync1((mediaUpload) { - var object = new storage.Object.fromJson( + var object = storage.Object.fromJson( jsonDecode(mediaUpload.json) as Map); expect(object.name, objectName); expect(mediaUpload.bytes, bytes); @@ -758,7 +748,7 @@ main() { expect(request.url.queryParameters['predefinedAcl'], predefined[count++][1]); expect(object.acl, isNull); - return mock.respond(new storage.Object()..name = objectName); + return mock.respond(storage.Object()..name = objectName); })); }, count: predefined.length)); @@ -773,20 +763,17 @@ main() { }); test('write-with-acl', () { - var acl1 = new Acl([ - new AclEntry( - new AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + var acl1 = Acl([ + AclEntry(AccountScope('user@example.com'), AclPermission.FULL_CONTROL), ]); - var acl2 = new Acl([ - new AclEntry( - new AccountScope('user@example.com'), AclPermission.FULL_CONTROL), - new AclEntry(new GroupScope('group@example.com'), AclPermission.WRITE), + var acl2 = Acl([ + AclEntry(AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + AclEntry(GroupScope('group@example.com'), AclPermission.WRITE), ]); - var acl3 = new Acl([ - new AclEntry( - new AccountScope('user@example.com'), AclPermission.FULL_CONTROL), - new AclEntry(new GroupScope('group@example.com'), AclPermission.WRITE), - new AclEntry(new DomainScope('example.com'), AclPermission.READ), + var acl3 = Acl([ + AclEntry(AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + AclEntry(GroupScope('group@example.com'), AclPermission.WRITE), + AclEntry(DomainScope('example.com'), AclPermission.READ), ]); var acls = [acl1, acl2, acl3]; @@ -802,7 +789,7 @@ main() { return mock .processNormalMediaUpload(request) .then(expectAsync1((mediaUpload) { - var object = new storage.Object.fromJson( + var object = storage.Object.fromJson( jsonDecode(mediaUpload.json) as Map); expect(object.name, objectName); expect(mediaUpload.bytes, bytes); @@ -821,7 +808,7 @@ main() { expect(object.acl[2].role, 'READER'); } count++; - return mock.respond(new storage.Object()..name = objectName); + return mock.respond(storage.Object()..name = objectName); })); }, count: acls.length)); @@ -844,20 +831,17 @@ main() { [PredefinedAcl.bucketOwnerRead, 'bucketOwnerRead'] ]; - var acl1 = new Acl([ - new AclEntry( - new AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + var acl1 = Acl([ + AclEntry(AccountScope('user@example.com'), AclPermission.FULL_CONTROL), ]); - var acl2 = new Acl([ - new AclEntry( - new AccountScope('user@example.com'), AclPermission.FULL_CONTROL), - new AclEntry(new GroupScope('group@example.com'), AclPermission.WRITE), + var acl2 = Acl([ + AclEntry(AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + AclEntry(GroupScope('group@example.com'), AclPermission.WRITE), ]); - var acl3 = new Acl([ - new AclEntry( - new AccountScope('user@example.com'), AclPermission.FULL_CONTROL), - new AclEntry(new GroupScope('group@example.com'), AclPermission.WRITE), - new AclEntry(new DomainScope('example.com'), AclPermission.READ), + var acl3 = Acl([ + AclEntry(AccountScope('user@example.com'), AclPermission.FULL_CONTROL), + AclEntry(GroupScope('group@example.com'), AclPermission.WRITE), + AclEntry(DomainScope('example.com'), AclPermission.READ), ]); var acls = [acl1, acl2, acl3]; @@ -875,7 +859,7 @@ main() { .then(expectAsync1((mediaUpload) { int predefinedIndex = count ~/ acls.length; int aclIndex = count % acls.length; - var object = new storage.Object.fromJson( + var object = storage.Object.fromJson( jsonDecode(mediaUpload.json) as Map); expect(object.name, objectName); expect(mediaUpload.bytes, bytes); @@ -895,7 +879,7 @@ main() { expect(object.acl[2].role, 'READER'); } count++; - return mock.respond(new storage.Object()..name = objectName); + return mock.respond(storage.Object()..name = objectName); })); }, count: predefined.length * acls.length)); @@ -1019,17 +1003,17 @@ main() { mock.register('GET', 'b/$bucketName/o/$objectName', expectAsync1((request) { expect(request.url.queryParameters['alt'], 'json'); - return mock.respond(new storage.Object() + return mock.respond(storage.Object() ..name = objectName - ..updated = new DateTime(2014) + ..updated = DateTime(2014) ..contentType = 'mime/type'); })); - var api = new Storage(mock, PROJECT); + var api = Storage(mock, PROJECT); var bucket = api.bucket(bucketName); bucket.info(objectName).then(expectAsync1((stat) { expect(stat.name, objectName); - expect(stat.updated, new DateTime(2014)); + expect(stat.updated, DateTime(2014)); expect(stat.metadata.contentType, 'mime/type'); })); }); @@ -1040,21 +1024,21 @@ main() { mock.register('GET', 'b/$bucketName/o/$objectName', expectAsync1((request) { expect(request.url.queryParameters['alt'], 'json'); - var acl1 = new storage.ObjectAccessControl(); + var acl1 = storage.ObjectAccessControl(); acl1.entity = 'user-1234567890'; acl1.role = 'OWNER'; - var acl2 = new storage.ObjectAccessControl(); + var acl2 = storage.ObjectAccessControl(); acl2.entity = 'user-xxx@yyy.zzz'; acl2.role = 'OWNER'; - var acl3 = new storage.ObjectAccessControl(); + var acl3 = storage.ObjectAccessControl(); acl3.entity = 'xxx-1234567890'; acl3.role = 'OWNER'; - return mock.respond(new storage.Object() + return mock.respond(storage.Object() ..name = objectName ..acl = [acl1, acl2, acl3]); })); - var api = new Storage(mock, PROJECT); + var api = Storage(mock, PROJECT); var bucket = api.bucket(bucketName); bucket.info(objectName).then(expectAsync1((ObjectInfo info) { expect(info.name, objectName); @@ -1074,7 +1058,7 @@ main() { withMockClient((mock, api) { mock.register('GET', 'b/$bucketName/o', expectAsync1((request) { expect(request.body.length, 0); - return mock.respond(new storage.Objects()); + return mock.respond(storage.Objects()); })); var bucket = api.bucket(bucketName); @@ -1103,46 +1087,45 @@ main() { }); group('acl', () { - var id = new StorageIdScope('1234567890'); - var user = new AccountScope('sgjesse@google.com'); - var group = new GroupScope('dart'); - var domain = new DomainScope('dartlang.org'); + var id = StorageIdScope('1234567890'); + var user = AccountScope('sgjesse@google.com'); + var group = GroupScope('dart'); + var domain = DomainScope('dartlang.org'); - var userRead = new AclEntry(user, AclPermission.READ); - var groupWrite = new AclEntry(group, AclPermission.WRITE); - var domainFullControl = new AclEntry(domain, AclPermission.FULL_CONTROL); + var userRead = AclEntry(user, AclPermission.READ); + var groupWrite = AclEntry(group, AclPermission.WRITE); + var domainFullControl = AclEntry(domain, AclPermission.FULL_CONTROL); test('compare-scope', () { - expect(id, new StorageIdScope('1234567890')); - expect(user, new AccountScope('sgjesse@google.com')); - expect(group, new GroupScope('dart')); - expect(domain, new DomainScope('dartlang.org')); - expect(AclScope.allAuthenticated, new AllAuthenticatedScope()); - expect(AclScope.allUsers, new AllUsersScope()); + expect(id, StorageIdScope('1234567890')); + expect(user, AccountScope('sgjesse@google.com')); + expect(group, GroupScope('dart')); + expect(domain, DomainScope('dartlang.org')); + expect(AclScope.allAuthenticated, AllAuthenticatedScope()); + expect(AclScope.allUsers, AllUsersScope()); }); test('compare-entries', () { - expect(userRead, new AclEntry(user, AclPermission.READ)); - expect(groupWrite, new AclEntry(group, AclPermission.WRITE)); - expect( - domainFullControl, new AclEntry(domain, AclPermission.FULL_CONTROL)); + expect(userRead, AclEntry(user, AclPermission.READ)); + expect(groupWrite, AclEntry(group, AclPermission.WRITE)); + expect(domainFullControl, AclEntry(domain, AclPermission.FULL_CONTROL)); }); test('compare-acls', () { - var acl = new Acl([userRead, groupWrite, domainFullControl]); + var acl = Acl([userRead, groupWrite, domainFullControl]); expect( acl, - new Acl([ - new AclEntry(user, AclPermission.READ), - new AclEntry(group, AclPermission.WRITE), - new AclEntry(domain, AclPermission.FULL_CONTROL) + Acl([ + AclEntry(user, AclPermission.READ), + AclEntry(group, AclPermission.WRITE), + AclEntry(domain, AclPermission.FULL_CONTROL) ])); expect( acl, - isNot(equals(new Acl([ - new AclEntry(group, AclPermission.WRITE), - new AclEntry(user, AclPermission.READ), - new AclEntry(domain, AclPermission.FULL_CONTROL) + isNot(equals(Acl([ + AclEntry(group, AclPermission.WRITE), + AclEntry(user, AclPermission.READ), + AclEntry(domain, AclPermission.FULL_CONTROL) ])))); }); From 4c0cc917d4bd2707c820e89bfcce25d258254482 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Fri, 11 Jan 2019 08:56:25 -0800 Subject: [PATCH 122/239] enable travis --- pkgs/gcloud/.travis.yml | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 pkgs/gcloud/.travis.yml diff --git a/pkgs/gcloud/.travis.yml b/pkgs/gcloud/.travis.yml new file mode 100644 index 00000000..5c0461e3 --- /dev/null +++ b/pkgs/gcloud/.travis.yml @@ -0,0 +1,22 @@ +language: dart + +dart: + - stable + - dev + +dart_task: + - dartanalyzer: --fatal-infos --fatal-warnings . + +matrix: + include: + # Only validate formatting using the dev release + - dart: dev + dart_task: dartfmt + +# Only building master means that we don't run two builds for each pull request. +branches: + only: [master] + +cache: + directories: + - $HOME/.pub-cache From 24eda97a3299b5edc4e97c2076ee3223c064864f Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Wed, 16 Jan 2019 08:58:55 -0800 Subject: [PATCH 123/239] Fix analyzer warnings with latest SDK, reenable non-pedantic lints --- pkgs/gcloud/analysis_options.yaml | 24 +++++++++++++++++++++++ pkgs/gcloud/lib/src/db/annotations.dart | 2 +- pkgs/gcloud/lib/src/db/model_db_impl.dart | 2 +- pkgs/gcloud/lib/src/pubsub_impl.dart | 12 ++++++------ pkgs/gcloud/lib/src/storage_impl.dart | 12 ++++++------ pkgs/gcloud/test/db/e2e/db_test_impl.dart | 2 +- pkgs/gcloud/test/db_all_e2e_test.dart | 2 +- pkgs/gcloud/test/pubsub/pubsub_test.dart | 8 ++++---- 8 files changed, 44 insertions(+), 20 deletions(-) diff --git a/pkgs/gcloud/analysis_options.yaml b/pkgs/gcloud/analysis_options.yaml index a4f33350..50494a19 100644 --- a/pkgs/gcloud/analysis_options.yaml +++ b/pkgs/gcloud/analysis_options.yaml @@ -2,3 +2,27 @@ include: package:pedantic/analysis_options.yaml analyzer: strong-mode: implicit-casts: false +linter: + rules: + - avoid_null_checks_in_equality_operators + - await_only_futures + - camel_case_types + - cancel_subscriptions + - control_flow_in_finally + - directives_ordering + - empty_catches + - empty_constructor_bodies + - empty_statements + - iterable_contains_unrelated_type + - library_names + - library_prefixes + - list_remove_unrelated_type + - package_api_docs + - package_names + - package_prefixed_library_names + - prefer_final_fields + - super_goes_last + - test_types_in_equals + - throw_in_finally + - type_init_formals + - unnecessary_brace_in_string_interps diff --git a/pkgs/gcloud/lib/src/db/annotations.dart b/pkgs/gcloud/lib/src/db/annotations.dart index 5f027694..5868a33b 100644 --- a/pkgs/gcloud/lib/src/db/annotations.dart +++ b/pkgs/gcloud/lib/src/db/annotations.dart @@ -278,7 +278,7 @@ class ListProperty extends Property { } if (value == null) return null; - List list = value; + var list = value as List; if (list.isEmpty) return null; if (list.length == 1) return subProperty.encodeValue(db, list[0]); return list.map((value) => subProperty.encodeValue(db, value)).toList(); diff --git a/pkgs/gcloud/lib/src/db/model_db_impl.dart b/pkgs/gcloud/lib/src/db/model_db_impl.dart index efccc235..32185616 100644 --- a/pkgs/gcloud/lib/src/db/model_db_impl.dart +++ b/pkgs/gcloud/lib/src/db/model_db_impl.dart @@ -97,7 +97,7 @@ class ModelDBImpl implements ModelDB { elements.add(ds.KeyElement(kind, id)); currentKey = currentKey.parent; } - Partition partition = currentKey._parent; + Partition partition = currentKey._parent as Partition; return ds.Key(elements.reversed.toList(), partition: ds.Partition(partition.namespace)); } diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart index d103ce7c..aba0de4f 100644 --- a/pkgs/gcloud/lib/src/pubsub_impl.dart +++ b/pkgs/gcloud/lib/src/pubsub_impl.dart @@ -21,11 +21,11 @@ class _PubSubImpl implements PubSub { _subscriptionPrefix = 'projects/$project/subscriptions/'; String _fullTopicName(String name) { - return name.startsWith('projects/') ? name : '${_topicPrefix}$name'; + return name.startsWith('projects/') ? name : '$_topicPrefix$name'; } String _fullSubscriptionName(String name) { - return name.startsWith('projects/') ? name : '${_subscriptionPrefix}$name'; + return name.startsWith('projects/') ? name : '$_subscriptionPrefix$name'; } Future _createTopic(String name) { @@ -310,16 +310,16 @@ class _PushEventImpl implements PushEvent { _PushEventImpl(this._message, this._subscriptionName); factory _PushEventImpl.fromJson(String json) { - Map body = jsonDecode(json); - String data = body['message']['data']; + Map body = jsonDecode(json) as Map; + String data = body['message']['data'] as String; Map labels = HashMap(); body['message']['labels'].forEach((label) { - String key = label['key']; + String key = label['key'] as String; var value = label['strValue']; if (value == null) value = label['numValue']; labels[key] = value.toString(); }); - String subscription = body['subscription']; + String subscription = body['subscription'] as String; // TODO(#1): Remove this when the push event subscription name is prefixed // with '/subscriptions/'. if (!subscription.startsWith(PREFIX)) { diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index 8daef73d..d7f340e6 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -137,7 +137,7 @@ class _BucketImpl implements Bucket { : this._api = storage._api; String absoluteObjectName(String objectName) { - return '${_ABSOLUTE_PREFIX}$bucketName/$objectName'; + return '$_ABSOLUTE_PREFIX$bucketName/$objectName'; } StreamSink> write(String objectName, @@ -157,7 +157,7 @@ class _BucketImpl implements Bucket { metadata = metadata.replace(contentType: contentType); } } - _ObjectMetadata objectMetadata = metadata; + _ObjectMetadata objectMetadata = metadata as _ObjectMetadata; object = objectMetadata._object; // If no predefined ACL is passed use the default (if any). @@ -191,7 +191,7 @@ class _BucketImpl implements Bucket { metadata: metadata, acl: acl, predefinedAcl: predefinedAcl, - contentType: contentType); + contentType: contentType) as _MediaUploadStreamSink; sink.add(bytes); return sink.close(); } @@ -219,8 +219,8 @@ class _BucketImpl implements Bucket { options = storage_api.PartialDownloadOptions(range); } - commons.Media media = await _api.objects - .get(bucketName, objectName, downloadOptions: options); + commons.Media media = (await _api.objects.get(bucketName, objectName, + downloadOptions: options)) as commons.Media; yield* media.stream; } @@ -255,7 +255,7 @@ class _BucketImpl implements Bucket { Future updateMetadata(String objectName, ObjectMetadata metadata) { // TODO: support other ObjectMetadata implementations? - _ObjectMetadata md = metadata; + _ObjectMetadata md = metadata as _ObjectMetadata; var object = md._object; if (md._object.acl == null && _defaultObjectAcl == null) { throw ArgumentError('ACL is required for update'); diff --git a/pkgs/gcloud/test/db/e2e/db_test_impl.dart b/pkgs/gcloud/test/db/e2e/db_test_impl.dart index a4daa94c..02b4c5f8 100644 --- a/pkgs/gcloud/test/db/e2e/db_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/db_test_impl.dart @@ -91,7 +91,7 @@ class User extends Person { if (!(super.sameAs(other) && other is User && nickname == other.nickname)) return false; - User user = other; + User user = other as User; if (languages == null) { if (user.languages == null) return true; return false; diff --git a/pkgs/gcloud/test/db_all_e2e_test.dart b/pkgs/gcloud/test/db_all_e2e_test.dart index 62fe793a..8d5ffac6 100644 --- a/pkgs/gcloud/test/db_all_e2e_test.dart +++ b/pkgs/gcloud/test/db_all_e2e_test.dart @@ -20,7 +20,7 @@ import 'db/e2e/metamodel_test_impl.dart' as db_metamodel_test; Future main() async { var scopes = datastore_impl.DatastoreImpl.SCOPES; var now = DateTime.now().millisecondsSinceEpoch; - String namespace = '${Platform.operatingSystem}${now}'; + String namespace = '${Platform.operatingSystem}$now'; datastore_impl.DatastoreImpl datastore; db.DatastoreDB datastoreDB; diff --git a/pkgs/gcloud/test/pubsub/pubsub_test.dart b/pkgs/gcloud/test/pubsub/pubsub_test.dart index 4b46dd29..776ce387 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_test.dart @@ -27,8 +27,8 @@ main() { 'projects/topics', 'projects/$PROJECT', 'projects/$PROJECT/', - 'projects/${PROJECT}/topics', - 'projects/${PROJECT}/topics/' + 'projects/$PROJECT/topics', + 'projects/$PROJECT/topics/' ]; var badSubscriptionNames = [ @@ -36,8 +36,8 @@ main() { 'projects/subscriptions', 'projects/$PROJECT', 'projects/$PROJECT/', - 'projects/${PROJECT}/subscriptions', - 'projects/${PROJECT}/subscriptions/' + 'projects/$PROJECT/subscriptions', + 'projects/$PROJECT/subscriptions/' ]; group('topic', () { From 58b69bb522d57dfc84a000548416807eb47ea70d Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Wed, 16 Jan 2019 08:59:05 -0800 Subject: [PATCH 124/239] remove codereview settings --- pkgs/gcloud/codereview.settings | 3 --- 1 file changed, 3 deletions(-) delete mode 100644 pkgs/gcloud/codereview.settings diff --git a/pkgs/gcloud/codereview.settings b/pkgs/gcloud/codereview.settings deleted file mode 100644 index d25f0372..00000000 --- a/pkgs/gcloud/codereview.settings +++ /dev/null @@ -1,3 +0,0 @@ -CODE_REVIEW_SERVER: http://codereview.chromium.org/ -VIEW_VC: https://github.com/dart-lang/gcloud/commit/ -CC_LIST: reviews@dartlang.org From 933fcc073e228741b87402e3d377738cea3c2ca6 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Wed, 16 Jan 2019 09:03:10 -0800 Subject: [PATCH 125/239] Enable non-e2e tests on Travis --- pkgs/gcloud/.travis.yml | 1 + pkgs/gcloud/dart_test.yaml | 5 +++++ pkgs/gcloud/test/db_all_e2e_test.dart | 2 ++ pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart | 2 ++ pkgs/gcloud/test/storage/e2e_test.dart | 2 ++ 5 files changed, 12 insertions(+) create mode 100644 pkgs/gcloud/dart_test.yaml diff --git a/pkgs/gcloud/.travis.yml b/pkgs/gcloud/.travis.yml index 5c0461e3..69a17db3 100644 --- a/pkgs/gcloud/.travis.yml +++ b/pkgs/gcloud/.travis.yml @@ -6,6 +6,7 @@ dart: dart_task: - dartanalyzer: --fatal-infos --fatal-warnings . + - test: -P travis matrix: include: diff --git a/pkgs/gcloud/dart_test.yaml b/pkgs/gcloud/dart_test.yaml new file mode 100644 index 00000000..a465e8e7 --- /dev/null +++ b/pkgs/gcloud/dart_test.yaml @@ -0,0 +1,5 @@ +presets: + travis: + tags: + e2e: + skip: "e2e tests don't run on Travis" diff --git a/pkgs/gcloud/test/db_all_e2e_test.dart b/pkgs/gcloud/test/db_all_e2e_test.dart index 8d5ffac6..cbbe6f7d 100644 --- a/pkgs/gcloud/test/db_all_e2e_test.dart +++ b/pkgs/gcloud/test/db_all_e2e_test.dart @@ -2,6 +2,8 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +@Tags(const ["e2e"]) + library gcloud.test.db_all_test; import 'dart:async'; diff --git a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart index 84b7a076..723cc7bc 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart @@ -2,6 +2,8 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +@Tags(const ["e2e"]) + import 'package:gcloud/pubsub.dart'; import 'package:http/http.dart'; import 'package:test/test.dart'; diff --git a/pkgs/gcloud/test/storage/e2e_test.dart b/pkgs/gcloud/test/storage/e2e_test.dart index 01f85da0..851cb496 100644 --- a/pkgs/gcloud/test/storage/e2e_test.dart +++ b/pkgs/gcloud/test/storage/e2e_test.dart @@ -2,6 +2,8 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +@Tags(const ["e2e"]) + library gcloud.storage; import 'dart:async'; From 98d029c0d6117da5647e7be61be6155af5c5226e Mon Sep 17 00:00:00 2001 From: Jonas Finnemann Jensen Date: Mon, 1 Apr 2019 12:43:27 +0200 Subject: [PATCH 126/239] Bump to 0.6.0+3 --- pkgs/gcloud/CHANGELOG.md | 4 ++++ pkgs/gcloud/pubspec.yaml | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 7bb9e0f2..aff27130 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.6.0+3 + + * Fixed code formatting and lints. + ## 0.6.0+2 * Support the latest `pkg:http`. diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 3fa88476..b364d649 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.6.0+2 +version: 0.6.0+3 author: Dart Team description: Dart gcloud APIs homepage: https://github.com/dart-lang/gcloud From 040bf1b408384e6f0d374ed2fe2a0ed7a6ff6d57 Mon Sep 17 00:00:00 2001 From: Jonas Finnemann Jensen Date: Mon, 1 Apr 2019 13:02:49 +0200 Subject: [PATCH 127/239] Added an example --- pkgs/gcloud/CHANGELOG.md | 5 +++++ pkgs/gcloud/example/main.dart | 24 ++++++++++++++++++++++++ pkgs/gcloud/pubspec.yaml | 5 +++-- 3 files changed, 32 insertions(+), 2 deletions(-) create mode 100644 pkgs/gcloud/example/main.dart diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index aff27130..55acc4ee 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,8 @@ +## 0.6.0+4 + + * Updated package description. + * Added an example showing how to use Google Cloud Storage. + ## 0.6.0+3 * Fixed code formatting and lints. diff --git a/pkgs/gcloud/example/main.dart b/pkgs/gcloud/example/main.dart new file mode 100644 index 00000000..77dc6954 --- /dev/null +++ b/pkgs/gcloud/example/main.dart @@ -0,0 +1,24 @@ +// Copyright (c) 2019, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'dart:async' show Future; +import 'dart:convert' show utf8; +import 'package:googleapis_auth/auth_io.dart' as auth; +import 'package:gcloud/storage.dart'; + +// Note: The README.md contains more details on how to use this package. + +Future main() async { + // When running on Google Computer Engine, AppEngine or GKE credentials can + // be obtained from a meta-data server as follows. + final client = await auth.clientViaMetadataServer(); + try { + final storage = Storage(client, 'my_gcp_project'); + final b = storage.bucket('test-bucket'); + await b.writeBytes('my-file.txt', utf8.encode('hello world')); + print('Wrote "hello world" to "my-file.txt" in "test-bucket"'); + } finally { + client.close(); + } +} diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index b364d649..a2493196 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,7 +1,8 @@ name: gcloud -version: 0.6.0+3 +version: 0.6.0+4 author: Dart Team -description: Dart gcloud APIs +description: | + High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. homepage: https://github.com/dart-lang/gcloud environment: From 16dabdd775aeeb313b1b13a070a2e0e9b8ff7b9a Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Fri, 10 May 2019 10:21:16 -0700 Subject: [PATCH 128/239] Fix latest pedantic lints --- pkgs/gcloud/analysis_options.yaml | 1 - pkgs/gcloud/lib/src/datastore_impl.dart | 3 ++- pkgs/gcloud/test/db/e2e/db_test_impl.dart | 3 ++- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/pkgs/gcloud/analysis_options.yaml b/pkgs/gcloud/analysis_options.yaml index 50494a19..8b727fcb 100644 --- a/pkgs/gcloud/analysis_options.yaml +++ b/pkgs/gcloud/analysis_options.yaml @@ -21,7 +21,6 @@ linter: - package_names - package_prefixed_library_names - prefer_final_fields - - super_goes_last - test_types_in_equals - throw_in_finally - type_init_formals diff --git a/pkgs/gcloud/lib/src/datastore_impl.dart b/pkgs/gcloud/lib/src/datastore_impl.dart index 840409ed..983132a0 100644 --- a/pkgs/gcloud/lib/src/datastore_impl.dart +++ b/pkgs/gcloud/lib/src/datastore_impl.dart @@ -157,8 +157,9 @@ class DatastoreImpl implements datastore.Datastore { .toList(); else if (value.entityValue != null) throw UnsupportedError('Entity values are not supported.'); - else if (value.geoPointValue != null) + else if (value.geoPointValue != null) { throw UnsupportedError('GeoPoint values are not supported.'); + } return null; } diff --git a/pkgs/gcloud/test/db/e2e/db_test_impl.dart b/pkgs/gcloud/test/db/e2e/db_test_impl.dart index 02b4c5f8..95bcccd2 100644 --- a/pkgs/gcloud/test/db/e2e/db_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/db_test_impl.dart @@ -88,8 +88,9 @@ class User extends Person { List languages = const []; sameAs(Object other) { - if (!(super.sameAs(other) && other is User && nickname == other.nickname)) + if (!(super.sameAs(other) && other is User && nickname == other.nickname)) { return false; + } User user = other as User; if (languages == null) { From 302c7a3ce52df26e4d404a9af34052ce636b4180 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Fri, 10 May 2019 10:21:51 -0700 Subject: [PATCH 129/239] Test on the oldest supported SDK --- pkgs/gcloud/.travis.yml | 2 +- pkgs/gcloud/pubspec.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/.travis.yml b/pkgs/gcloud/.travis.yml index 69a17db3..53ebe7ec 100644 --- a/pkgs/gcloud/.travis.yml +++ b/pkgs/gcloud/.travis.yml @@ -1,7 +1,7 @@ language: dart dart: - - stable + - 2.0.0 - dev dart_task: diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index a2493196..f26bb06b 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.6.0+4 +version: 0.6.1-dev author: Dart Team description: | High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. From 553806e679a1849fa6a2e6c4fdea2f230f71f51f Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Fri, 10 May 2019 10:22:57 -0700 Subject: [PATCH 130/239] Enable lints fixed by dartfmt --fix --- pkgs/gcloud/analysis_options.yaml | 3 +++ pkgs/gcloud/test/db_all_e2e_test.dart | 2 +- pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart | 2 +- pkgs/gcloud/test/storage/e2e_test.dart | 2 +- 4 files changed, 6 insertions(+), 3 deletions(-) diff --git a/pkgs/gcloud/analysis_options.yaml b/pkgs/gcloud/analysis_options.yaml index 8b727fcb..8406cc11 100644 --- a/pkgs/gcloud/analysis_options.yaml +++ b/pkgs/gcloud/analysis_options.yaml @@ -21,7 +21,10 @@ linter: - package_names - package_prefixed_library_names - prefer_final_fields + - prefer_generic_function_type_aliases - test_types_in_equals - throw_in_finally - type_init_formals - unnecessary_brace_in_string_interps + - unnecessary_const + - unnecessary_new diff --git a/pkgs/gcloud/test/db_all_e2e_test.dart b/pkgs/gcloud/test/db_all_e2e_test.dart index cbbe6f7d..07bdbe94 100644 --- a/pkgs/gcloud/test/db_all_e2e_test.dart +++ b/pkgs/gcloud/test/db_all_e2e_test.dart @@ -2,7 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -@Tags(const ["e2e"]) +@Tags(["e2e"]) library gcloud.test.db_all_test; diff --git a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart index 723cc7bc..f99c0af2 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart @@ -2,7 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -@Tags(const ["e2e"]) +@Tags(["e2e"]) import 'package:gcloud/pubsub.dart'; import 'package:http/http.dart'; diff --git a/pkgs/gcloud/test/storage/e2e_test.dart b/pkgs/gcloud/test/storage/e2e_test.dart index 851cb496..f0645d68 100644 --- a/pkgs/gcloud/test/storage/e2e_test.dart +++ b/pkgs/gcloud/test/storage/e2e_test.dart @@ -2,7 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -@Tags(const ["e2e"]) +@Tags(["e2e"]) library gcloud.storage; From cf89139425aee16a84b7821bc44fc929055b59f5 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Fri, 24 May 2019 11:10:33 -0700 Subject: [PATCH 131/239] fix formatting (dart-lang/gcloud#74) --- pkgs/gcloud/lib/src/datastore_impl.dart | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/pkgs/gcloud/lib/src/datastore_impl.dart b/pkgs/gcloud/lib/src/datastore_impl.dart index 983132a0..5865330c 100644 --- a/pkgs/gcloud/lib/src/datastore_impl.dart +++ b/pkgs/gcloud/lib/src/datastore_impl.dart @@ -137,27 +137,27 @@ class DatastoreImpl implements datastore.Datastore { } static dynamic _convertApi2DatastoreProperty(api.Value value) { - if (value.booleanValue != null) + if (value.booleanValue != null) { return value.booleanValue; - else if (value.integerValue != null) + } else if (value.integerValue != null) { return int.parse(value.integerValue); - else if (value.doubleValue != null) + } else if (value.doubleValue != null) { return value.doubleValue; - else if (value.stringValue != null) + } else if (value.stringValue != null) { return value.stringValue; - else if (value.timestampValue != null) + } else if (value.timestampValue != null) { return DateTime.parse(value.timestampValue); - else if (value.blobValue != null) + } else if (value.blobValue != null) { return datastore.BlobValue(value.blobValueAsBytes); - else if (value.keyValue != null) + } else if (value.keyValue != null) { return _convertApi2DatastoreKey(value.keyValue); - else if (value.arrayValue != null && value.arrayValue.values != null) + } else if (value.arrayValue != null && value.arrayValue.values != null) { return value.arrayValue.values .map(_convertApi2DatastoreProperty) .toList(); - else if (value.entityValue != null) + } else if (value.entityValue != null) { throw UnsupportedError('Entity values are not supported.'); - else if (value.geoPointValue != null) { + } else if (value.geoPointValue != null) { throw UnsupportedError('GeoPoint values are not supported.'); } return null; From 41c7957d28dcbaf03d364f6bbcca542d0f1b25a1 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Fri, 7 Jun 2019 10:55:34 -0700 Subject: [PATCH 132/239] fix return values in anon functions (dart-lang/gcloud#75) --- pkgs/gcloud/lib/http.dart | 1 + .../datastore/e2e/datastore_test_impl.dart | 2 ++ pkgs/gcloud/test/db_all_e2e_test.dart | 1 + pkgs/gcloud/test/service_scope_test.dart | 20 +++++++++++++++---- 4 files changed, 20 insertions(+), 4 deletions(-) diff --git a/pkgs/gcloud/lib/http.dart b/pkgs/gcloud/lib/http.dart index 5051807e..0ba6cdc8 100644 --- a/pkgs/gcloud/lib/http.dart +++ b/pkgs/gcloud/lib/http.dart @@ -36,6 +36,7 @@ void registerAuthClientService(http.Client client, {bool close = true}) { if (close) { ss.registerScopeExitCallback(() { client.close(); + return null; }); } } diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index 93b098e6..6646af4d 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -334,6 +334,7 @@ void runTests(Datastore datastore, String namespace) { .commit(transaction: transaction) .then((_) => null); } + return null; }); } @@ -398,6 +399,7 @@ void runTests(Datastore datastore, String namespace) { if (transaction != null) { return datastore.commit(transaction: transaction); } + return null; }); } diff --git a/pkgs/gcloud/test/db_all_e2e_test.dart b/pkgs/gcloud/test/db_all_e2e_test.dart index 07bdbe94..afc7f8fd 100644 --- a/pkgs/gcloud/test/db_all_e2e_test.dart +++ b/pkgs/gcloud/test/db_all_e2e_test.dart @@ -32,6 +32,7 @@ Future main() async { datastore = datastore_impl.DatastoreImpl(httpClient, project); datastoreDB = db.DatastoreDB(datastore); client = httpClient; + return null; }); tearDownAll(() async { diff --git a/pkgs/gcloud/test/service_scope_test.dart b/pkgs/gcloud/test/service_scope_test.dart index 2bf287ce..c4da569d 100644 --- a/pkgs/gcloud/test/service_scope_test.dart +++ b/pkgs/gcloud/test/service_scope_test.dart @@ -12,7 +12,8 @@ import 'package:test/test.dart'; main() { test('no-service-scope', () { expect(() => ss.register(1, 'foobar'), throwsA(isStateError)); - expect(() => ss.registerScopeExitCallback(() {}), throwsA(isStateError)); + expect( + () => ss.registerScopeExitCallback(() => null), throwsA(isStateError)); expect(() => ss.lookup(1), throwsA(isStateError)); var c = Completer.sync(); @@ -25,7 +26,8 @@ main() { // of the zone created by the fork()ing. c.future.then(expectAsync1((_) { expect(() => ss.register(1, 'foobar'), throwsA(isStateError)); - expect(() => ss.registerScopeExitCallback(() {}), throwsA(isStateError)); + expect(() => ss.registerScopeExitCallback(() => null), + throwsA(isStateError)); expect(() => ss.lookup(1), throwsA(isStateError)); })); }); @@ -52,7 +54,7 @@ main() { test('only-cleanup', () { return ss.fork(expectAsync0(() => Future.sync(() { - ss.registerScopeExitCallback(expectAsync0(() {})); + ss.registerScopeExitCallback(expectAsync0(() => null)); }))); }); @@ -71,6 +73,7 @@ main() { ss.registerScopeExitCallback(expectAsync0(() { expect(insertions, equals(i + 1)); insertions--; + return null; })); for (int j = 0; j <= NUM; j++) { @@ -90,19 +93,23 @@ main() { ss.registerScopeExitCallback(expectAsync0(() { expect(ss.lookup(1), isNull); expect(ss.lookup(2), isNull); + return null; })); ss.register(1, 'value1'); ss.registerScopeExitCallback(expectAsync0(() { expect(ss.lookup(1), equals('value1')); expect(ss.lookup(2), isNull); + return null; })); ss.register(2, 'value2', onScopeExit: expectAsync0(() { expect(ss.lookup(1), equals('value1')); expect(ss.lookup(2), isNull); + return null; })); ss.registerScopeExitCallback(expectAsync0(() { expect(ss.lookup(1), 'value1'); expect(ss.lookup(2), 'value2'); + return null; })); return Future.value(); })); @@ -123,6 +130,7 @@ main() { expect(insertions, equals(i + 1)); insertions--; if (i.isEven) throw 'xx${i}yy'; + return null; }); } })) @@ -145,9 +153,10 @@ main() { Timer.run(expectAsync0(() { expect(() => ss.lookup(key), throwsA(isStateError)); expect(() => ss.register(2, 'value'), throwsA(isStateError)); - expect(() => ss.registerScopeExitCallback(() {}), + expect(() => ss.registerScopeExitCallback(() => null), throwsA(isStateError)); })); + return null; })); expect(ss.lookup(key), equals('firstValue')); }))); @@ -196,6 +205,7 @@ main() { ss.registerScopeExitCallback(expectAsync0(() { expect(cleanupFork1, equals(2)); expect(cleanupFork2, equals(2)); + return null; })); expect(ss.lookup(rootKey), equals('root')); @@ -217,9 +227,11 @@ main() { return Future.wait([ spawnChild(subKey1, subKey2, 1, () { cleanupFork1++; + return null; }), spawnChild(subKey2, subKey1, 2, () { cleanupFork2++; + return null; }), ]); })); From 1915009aa840d37d51dfb63e7a882a44e4adcb06 Mon Sep 17 00:00:00 2001 From: Dan Field Date: Wed, 3 Jul 2019 15:57:25 -0700 Subject: [PATCH 133/239] Update README.md Clarify example. --- pkgs/gcloud/README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pkgs/gcloud/README.md b/pkgs/gcloud/README.md index b18070b2..e2836e86 100644 --- a/pkgs/gcloud/README.md +++ b/pkgs/gcloud/README.md @@ -146,6 +146,8 @@ var persons = (await db.query().run()).toList(); To fetch one or multiple existing entities, use `lookup`. ```dart +var key = new Person() + ..parentKey = db.emptyKey; var person = (await db.lookup([key])).single; var people = await db.lookup([key1, key2]); ``` From 0a9d0eb16dd9562d25b5587d42a763d4986ba8e6 Mon Sep 17 00:00:00 2001 From: Dan Field Date: Thu, 4 Jul 2019 08:18:22 -0700 Subject: [PATCH 134/239] Update README.md --- pkgs/gcloud/README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pkgs/gcloud/README.md b/pkgs/gcloud/README.md index e2836e86..6134f355 100644 --- a/pkgs/gcloud/README.md +++ b/pkgs/gcloud/README.md @@ -147,6 +147,8 @@ To fetch one or multiple existing entities, use `lookup`. ```dart var key = new Person() + ..name = 'UniqueName' + ..age = 42 ..parentKey = db.emptyKey; var person = (await db.lookup([key])).single; var people = await db.lookup([key1, key2]); From 61ae3c2c6a4474ecbef2de995839efc595e299ba Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Thu, 18 Jul 2019 09:16:22 -0700 Subject: [PATCH 135/239] make private field final --- pkgs/gcloud/lib/src/storage_impl.dart | 4 ++-- pkgs/gcloud/test/db/properties_test.dart | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index d7f340e6..c426f5af 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -128,8 +128,8 @@ class _BucketInfoImpl implements BucketInfo { /// Bucket API implementation providing access to objects. class _BucketImpl implements Bucket { final storage_api.StorageApi _api; - PredefinedAcl _defaultPredefinedObjectAcl; - Acl _defaultObjectAcl; + final PredefinedAcl _defaultPredefinedObjectAcl; + final Acl _defaultObjectAcl; final String bucketName; _BucketImpl(_StorageImpl storage, this.bucketName, diff --git a/pkgs/gcloud/test/db/properties_test.dart b/pkgs/gcloud/test/db/properties_test.dart index db3f132b..dbc7f028 100644 --- a/pkgs/gcloud/test/db/properties_test.dart +++ b/pkgs/gcloud/test/db/properties_test.dart @@ -208,7 +208,7 @@ class CustomProperty extends StringProperty { } class KeyMock implements Key { - datastore.Key _datastoreKey; + final datastore.Key _datastoreKey; KeyMock(this._datastoreKey); From a10bb4e4db879eab782fdb44f5a845fae8f37b23 Mon Sep 17 00:00:00 2001 From: Todd Volkert Date: Tue, 23 Jul 2019 08:21:30 -0700 Subject: [PATCH 136/239] Various enhancements (dart-lang/gcloud#78) * Provide a more helpful (contextual) exception message when we fail to set a field while decoding a property. * Relax the constraints that dictate whether we found a "default constructor" when loading a model class. The existing constraints didn't allow a class to be used both as a model and as a json_serializable target, since the latter requires the default constructor to provide named arguments for all the properties. * Add generics support to `withTransaction()`, making the signature: `Future withTransaction(TransactionHandler)`. This enables type safety in the returned future vis-a-vis the return type of the transaction callback. --- pkgs/gcloud/CHANGELOG.md | 8 ++++ pkgs/gcloud/lib/db.dart | 2 + pkgs/gcloud/lib/src/db/db.dart | 4 +- pkgs/gcloud/lib/src/db/model_db_impl.dart | 54 +++++++++++++++++------ pkgs/gcloud/pubspec.yaml | 3 +- pkgs/gcloud/test/db/db_test.dart | 50 +++++++++++++++++++++ 6 files changed, 105 insertions(+), 16 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 55acc4ee..f86f75d0 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,11 @@ +## 0.6.1 + + * Added examples. + * Fixed formatting and lints. + * Allow `Model` classes to contain constructors with optional or named + arguments (as long as they're annotated with `@required`). + * Add generics support to `withTransaction()`. + ## 0.6.0+4 * Updated package description. diff --git a/pkgs/gcloud/lib/db.dart b/pkgs/gcloud/lib/db.dart index 9fba24d2..7878d03c 100644 --- a/pkgs/gcloud/lib/db.dart +++ b/pkgs/gcloud/lib/db.dart @@ -14,6 +14,8 @@ import 'dart:core'; import 'dart:core' as core; import 'dart:mirrors' as mirrors; +import 'package:meta/meta.dart'; + import 'common.dart' show StreamFromPages; import 'datastore.dart' as ds; import 'service_scope.dart' as ss; diff --git a/pkgs/gcloud/lib/src/db/db.dart b/pkgs/gcloud/lib/src/db/db.dart index 6f98ef06..4df6724e 100644 --- a/pkgs/gcloud/lib/src/db/db.dart +++ b/pkgs/gcloud/lib/src/db/db.dart @@ -8,7 +8,7 @@ part of gcloud.db; /// /// The function will be given a [Transaction] object which can be used to make /// lookups/queries and queue modifications (inserts/updates/deletes). -typedef TransactionHandler = Future Function(Transaction transaction); +typedef TransactionHandler = Future Function(Transaction transaction); /// A datastore transaction. /// @@ -263,7 +263,7 @@ class DatastoreDB { /// A transaction can touch only a limited number of entity groups. This limit /// is currently 5. // TODO: Add retries and/or auto commit/rollback. - Future withTransaction(TransactionHandler transactionHandler) { + Future withTransaction(TransactionHandler transactionHandler) { return datastore .beginTransaction(crossEntityGroup: true) .then((datastoreTransaction) { diff --git a/pkgs/gcloud/lib/src/db/model_db_impl.dart b/pkgs/gcloud/lib/src/db/model_db_impl.dart index 32185616..33b16802 100644 --- a/pkgs/gcloud/lib/src/db/model_db_impl.dart +++ b/pkgs/gcloud/lib/src/db/model_db_impl.dart @@ -238,6 +238,40 @@ class ModelDBImpl implements ModelDB { } } + static bool _isRequiredAnnotation(mirrors.InstanceMirror annotation) { + return annotation.type.simpleName == #Required; + } + + /// Returns true if a constructor invocation is valid even if the specified + /// [parameter] is omitted. + /// + /// This is true for named parameters, optional parameters, and parameters + /// with a default value. + static bool _canBeOmitted(mirrors.ParameterMirror parameter) { + if (parameter.metadata.any(_isRequiredAnnotation)) { + return false; + } + return parameter.isOptional || + parameter.isNamed || + parameter.hasDefaultValue; + } + + /// Returns true if the specified [classMirror] has a default (unnamed) + /// constructor that accepts an empty arguments list. + @visibleForTesting + static bool hasDefaultConstructor(mirrors.ClassMirror classMirror) { + for (var declaration in classMirror.declarations.values) { + if (declaration is mirrors.MethodMirror) { + if (declaration.isConstructor && + declaration.constructorName == const Symbol('') && + declaration.parameters.every(_canBeOmitted)) { + return true; + } + } + } + return false; + } + void _tryLoadNewModelClassFull( mirrors.ClassMirror modelClass, String name, bool useIntegerId) { assert(!_modelDesc2Type.containsKey(modelClass.reflectedType)); @@ -256,18 +290,7 @@ class ModelDBImpl implements ModelDB { _propertiesFromModelDescription(modelClass); // Ensure we have an empty constructor. - bool defaultConstructorFound = false; - for (var declaration in modelClass.declarations.values) { - if (declaration is mirrors.MethodMirror) { - if (declaration.isConstructor && - declaration.constructorName == const Symbol('') && - declaration.parameters.isEmpty) { - defaultConstructorFound = true; - break; - } - } - } - if (!defaultConstructorFound) { + if (!hasDefaultConstructor(modelClass)) { throw StateError('Class ${modelClass.simpleName} does not have a default ' 'constructor.'); } @@ -439,7 +462,12 @@ class _ModelDescription { '${entity.key.elements.last.kind} (property name: $propertyName)'); } - mirror.setField(mirrors.MirrorSystem.getSymbol(fieldName), value); + try { + mirror.setField(mirrors.MirrorSystem.getSymbol(fieldName), value); + } on TypeError catch (error) { + throw StateError('Error trying to set property "${prop.propertyName}" ' + 'to $value for field "$fieldName": $error'); + } } String fieldNameToPropertyName(String fieldName) { diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index f26bb06b..3efd505d 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.6.1-dev +version: 0.6.1 author: Dart Team description: | High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. @@ -12,6 +12,7 @@ dependencies: _discoveryapis_commons: ^0.1.6+1 googleapis: '>=0.50.2 <1.0.0' http: '>=0.11.0 <0.13.0' + meta: ^1.0.2 dev_dependencies: googleapis_auth: '>=0.2.3 <0.3.0' diff --git a/pkgs/gcloud/test/db/db_test.dart b/pkgs/gcloud/test/db/db_test.dart index 92b2307a..b9fd736e 100644 --- a/pkgs/gcloud/test/db/db_test.dart +++ b/pkgs/gcloud/test/db/db_test.dart @@ -4,7 +4,10 @@ library gcloud.db_test; +import 'dart:mirrors' show reflectClass; + import 'package:gcloud/db.dart'; +import 'package:meta/meta.dart'; import 'package:test/test.dart'; @Kind() @@ -46,5 +49,52 @@ main() { expect(key.id, 42); expect(key.type, equals(Foobar)); }); + + test('hasDefaultConstructor', () { + expect(hasDefaultConstructor(Empty), isTrue); + expect(hasDefaultConstructor(OnlyNamedConstructor), isFalse); + expect(hasDefaultConstructor(DefaultAndNamedConstructor), isTrue); + expect(hasDefaultConstructor(RequiredArguments), isFalse); + expect(hasDefaultConstructor(OnlyPositionalArguments), isTrue); + expect(hasDefaultConstructor(OnlyNamedArguments), isTrue); + expect(hasDefaultConstructor(RequiredNamedArguments), isFalse); + expect(hasDefaultConstructor(DefaultArgumentValues), isTrue); + }); }); } + +bool hasDefaultConstructor(Type type) => + ModelDBImpl.hasDefaultConstructor(reflectClass(type)); + +class Empty { + const Empty(); +} + +class OnlyNamedConstructor { + const OnlyNamedConstructor.named(); +} + +class DefaultAndNamedConstructor { + const DefaultAndNamedConstructor(); + const DefaultAndNamedConstructor.named(); +} + +class RequiredArguments { + const RequiredArguments(int arg); +} + +class OnlyPositionalArguments { + const OnlyPositionalArguments([int arg, int arg2]); +} + +class OnlyNamedArguments { + const OnlyNamedArguments({int arg, int arg2}); +} + +class RequiredNamedArguments { + const RequiredNamedArguments({int arg1, @required int arg2}); +} + +class DefaultArgumentValues { + const DefaultArgumentValues([int arg1 = 1, int arg2 = 2]); +} From 3b8ad2b86cf5637f4b33bee6d48c2359ceddc458 Mon Sep 17 00:00:00 2001 From: Todd Volkert Date: Thu, 25 Jul 2019 08:16:51 -0700 Subject: [PATCH 137/239] Fix logic in `Transaction.rollback()` (dart-lang/gcloud#79) It's common to have code that looks like the following: ```dart try { ... transaction.commit(); } catch (error) { transaction.rollback(); } ``` This code is prone to errors - if the commit fails (and throws an exception), then the attempt to call `rollback()` will throw a secondary error (transaciton has already been committed), thus masking the real error. This PR fixes that logic. --- pkgs/gcloud/CHANGELOG.md | 4 ++++ pkgs/gcloud/lib/src/db/db.dart | 28 ++++++++++++++++++---------- pkgs/gcloud/pubspec.yaml | 2 +- 3 files changed, 23 insertions(+), 11 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index f86f75d0..a218c7da 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.6.2 + + * Fixed bug in `Transaction.rollback()`. + ## 0.6.1 * Added examples. diff --git a/pkgs/gcloud/lib/src/db/db.dart b/pkgs/gcloud/lib/src/db/db.dart index 4df6724e..205b1d17 100644 --- a/pkgs/gcloud/lib/src/db/db.dart +++ b/pkgs/gcloud/lib/src/db/db.dart @@ -19,6 +19,7 @@ class Transaction { static const int _TRANSACTION_STARTED = 0; static const int _TRANSACTION_ROLLED_BACK = 1; static const int _TRANSACTION_COMMITTED = 2; + static const int _TRANSACTION_COMMIT_FAILED = 3; final DatastoreDB db; final ds.Transaction _datastoreTransaction; @@ -26,7 +27,7 @@ class Transaction { final List _inserts = []; final List _deletes = []; - int _transactionState = _TRANSACTION_STARTED; + int _state = _TRANSACTION_STARTED; Transaction(this.db, this._datastoreTransaction); @@ -69,27 +70,34 @@ class Transaction { /// Rolls this transaction back. Future rollback() { - _checkSealed(changeState: _TRANSACTION_ROLLED_BACK); + _checkSealed(changeState: _TRANSACTION_ROLLED_BACK, allowFailed: true); return db.datastore.rollback(_datastoreTransaction); } /// Commits this transaction including all of the queued mutations. Future commit() { _checkSealed(changeState: _TRANSACTION_COMMITTED); - return _commitHelper(db, - inserts: _inserts, - deletes: _deletes, - datastoreTransaction: _datastoreTransaction); + try { + return _commitHelper(db, + inserts: _inserts, + deletes: _deletes, + datastoreTransaction: _datastoreTransaction); + } catch (error) { + _state = _TRANSACTION_COMMIT_FAILED; + rethrow; + } } - _checkSealed({int changeState}) { - if (_transactionState == _TRANSACTION_COMMITTED) { + _checkSealed({int changeState, bool allowFailed = false}) { + if (_state == _TRANSACTION_COMMITTED) { throw StateError('The transaction has already been committed.'); - } else if (_transactionState == _TRANSACTION_ROLLED_BACK) { + } else if (_state == _TRANSACTION_ROLLED_BACK) { throw StateError('The transaction has already been rolled back.'); + } else if (_state == _TRANSACTION_COMMIT_FAILED && !allowFailed) { + throw StateError('The transaction has attempted commit and failed.'); } if (changeState != null) { - _transactionState = changeState; + _state = changeState; } } } diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 3efd505d..0f2705f7 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.6.1 +version: 0.6.2 author: Dart Team description: | High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. From 8280aa63227c847211d0c4cf778e637eb12537a2 Mon Sep 17 00:00:00 2001 From: Todd Volkert Date: Mon, 29 Jul 2019 09:21:01 -0700 Subject: [PATCH 138/239] Add `DatastoreDB.lookupValue()` (dart-lang/gcloud#80) `DatastoreDB.lookup()` returns null rows in its results, which is confusing - the caller calls `lookup(keys)`, none of which are successfully looked up, and instead of getting an empty list back, they get a list of nulls. Since changing that behavior would be an API breaking change, and since a very common use-case is calling `lookup([key])`, this change adds a new API method for looking up a single key. This new method will throw if the lookup was unsuccessful. --- pkgs/gcloud/CHANGELOG.md | 4 ++ pkgs/gcloud/lib/db.dart | 1 + pkgs/gcloud/lib/src/db/db.dart | 54 ++++++++++++++++++++++++++ pkgs/gcloud/lib/src/db/exceptions.dart | 18 +++++++++ pkgs/gcloud/pubspec.yaml | 2 +- 5 files changed, 78 insertions(+), 1 deletion(-) create mode 100644 pkgs/gcloud/lib/src/db/exceptions.dart diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index a218c7da..9d3878fd 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.6.3 + + * Added `DatastoreDB.lookupValue()` + ## 0.6.2 * Fixed bug in `Transaction.rollback()`. diff --git a/pkgs/gcloud/lib/db.dart b/pkgs/gcloud/lib/db.dart index 7878d03c..42e15514 100644 --- a/pkgs/gcloud/lib/db.dart +++ b/pkgs/gcloud/lib/db.dart @@ -22,6 +22,7 @@ import 'service_scope.dart' as ss; part 'src/db/annotations.dart'; part 'src/db/db.dart'; +part 'src/db/exceptions.dart'; part 'src/db/models.dart'; part 'src/db/model_db.dart'; part 'src/db/model_db_impl.dart'; diff --git a/pkgs/gcloud/lib/src/db/db.dart b/pkgs/gcloud/lib/src/db/db.dart index 205b1d17..68e5b2c1 100644 --- a/pkgs/gcloud/lib/src/db/db.dart +++ b/pkgs/gcloud/lib/src/db/db.dart @@ -37,6 +37,29 @@ class Transaction { datastoreTransaction: _datastoreTransaction); } + /// Looks up a single [key] within this transaction, and returns the + /// associated [Model] object. + /// + /// If [orElse] is specified, then it will be consulted to provide a default + /// value for the model object in the event that [key] was not found within + /// the transaction. + /// + /// If the [key] is not found within the transaction and [orElse] was not + /// specified, then a [KeyNotFoundException] will be thrown. + Future lookupValue(Key key, {T orElse()}) async { + final List values = await lookup([key]); + assert(values.length == 1); + T value = values.single; + if (value == null) { + if (orElse != null) { + value = orElse(); + } else { + throw KeyNotFoundException(key); + } + } + return value; + } + /// Enqueues [inserts] and [deletes] which should be committed at commit time. void queueMutations({List inserts, List deletes}) { _checkSealed(); @@ -302,12 +325,43 @@ class DatastoreDB { /// Looks up [keys] in the datastore and returns a list of [Model] objects. /// + /// Any key that is not found in the datastore will have a corresponding + /// value of null in the list of model objects that is returned. + /// /// For transactions, please use [beginTransaction] and call the [lookup] /// method on it's returned [Transaction] object. + /// + /// See also: + /// + /// * [lookupValue], which looks a single value up by its key, requiring a + /// successful lookup. Future> lookup(List keys) { return _lookupHelper(this, keys); } + /// Looks up a single [key] in the datastore, and returns the associated + /// [Model] object. + /// + /// If [orElse] is specified, then it will be consulted to provide a default + /// value for the model object in the event that [key] was not found in the + /// datastore. + /// + /// If the [key] is not found in the datastore and [orElse] was not + /// specified, then a [KeyNotFoundException] will be thrown. + Future lookupValue(Key key, {T orElse()}) async { + final List values = await lookup([key]); + assert(values.length == 1); + T value = values.single; + if (value == null) { + if (orElse != null) { + value = orElse(); + } else { + throw KeyNotFoundException(key); + } + } + return value; + } + /// Add [inserts] to the datastore and remove [deletes] from it. /// /// The order of inserts and deletes is not specified. When the commit is done diff --git a/pkgs/gcloud/lib/src/db/exceptions.dart b/pkgs/gcloud/lib/src/db/exceptions.dart new file mode 100644 index 00000000..11c48b1c --- /dev/null +++ b/pkgs/gcloud/lib/src/db/exceptions.dart @@ -0,0 +1,18 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +part of gcloud.db; + +/// Exception that gets thrown when a caller attempts to look up a value by +/// its key, and the key cannot be found in the datastore. +class KeyNotFoundException implements Exception { + /// Creates a new [KeyNotFoundException] for the specified [key]. + const KeyNotFoundException(this.key); + + /// The [Key] that was not found in the datastore. + final Key key; + + @override + String toString() => 'Key not found: ${key.type}:${key.id}'; +} diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 0f2705f7..ace53a65 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.6.2 +version: 0.6.3 author: Dart Team description: | High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. From 67e5597778281bda0ab5104c779be6d2d6ca651e Mon Sep 17 00:00:00 2001 From: Jonas Finnemann Jensen Date: Wed, 21 Aug 2019 11:48:08 +0200 Subject: [PATCH 139/239] Fixed datastore test cases --- pkgs/gcloud/test/db/e2e/db_test_impl.dart | 73 +++++++++++++---------- 1 file changed, 40 insertions(+), 33 deletions(-) diff --git a/pkgs/gcloud/test/db/e2e/db_test_impl.dart b/pkgs/gcloud/test/db/e2e/db_test_impl.dart index 95bcccd2..66c7b1c4 100644 --- a/pkgs/gcloud/test/db/e2e/db_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/db_test_impl.dart @@ -452,7 +452,12 @@ void runTests(db.DatastoreDB store, String namespace) { var allInserts = []..addAll(users)..addAll(expandoPersons); var allKeys = allInserts.map((db.Model model) => model.key).toList(); return store.commit(inserts: allInserts).then((_) { - return waitUntilEntitiesReady(store, allKeys, partition).then((_) { + return Future.wait([ + waitUntilEntitiesReady( + store, users.map((u) => u.key).toList(), partition), + waitUntilEntitiesReady( + store, expandoPersons.map((u) => u.key).toList(), partition), + ]).then((_) { var tests = [ // Queries for [Person] return no results, we only have [User] // objects. @@ -600,7 +605,12 @@ void runTests(db.DatastoreDB store, String namespace) { () => store.commit(deletes: allKeys), // Wait until the entity deletes are reflected in the indices. - () => waitUntilEntitiesGone(store, allKeys, partition), + () => Future.wait([ + waitUntilEntitiesGone( + store, users.map((u) => u.key).toList(), partition), + waitUntilEntitiesGone(store, + expandoPersons.map((u) => u.key).toList(), partition), + ]), // Make sure queries don't return results () => store.lookup(allKeys).then((List models) { @@ -638,47 +648,44 @@ Future> runQueryWithExponentialBackoff( "Tried running a query with exponential backoff, giving up now."); } -Future waitUntilEntitiesReady( +Future waitUntilEntitiesReady( db.DatastoreDB mdb, List keys, db.Partition partition) { - return waitUntilEntitiesHelper(mdb, keys, true, partition); + return waitUntilEntitiesHelper(mdb, keys, true, partition); } -Future waitUntilEntitiesGone( +Future waitUntilEntitiesGone( db.DatastoreDB mdb, List keys, db.Partition partition) { - return waitUntilEntitiesHelper(mdb, keys, false, partition); + return waitUntilEntitiesHelper(mdb, keys, false, partition); } -Future waitUntilEntitiesHelper(db.DatastoreDB mdb, List keys, - bool positive, db.Partition partition) { - var keysByKind = >{}; - for (var key in keys) { - keysByKind.putIfAbsent(key.type, () => []).add(key); - } - - Future waitForKeys(List keys) { - return mdb - .query(partition: partition) - .run() - .toList() - .then((List models) { - for (var key in keys) { - bool found = false; - for (var model in models) { - if (key == model.key) found = true; +Future waitUntilEntitiesHelper( + db.DatastoreDB mdb, + List keys, + bool positive, + db.Partition partition, +) async { + bool done = false; + while (!done) { + final models = await mdb.query(partition: partition).run().toList(); + + done = true; + for (var key in keys) { + bool found = false; + for (var model in models) { + if (key == model.key) found = true; + } + if (positive) { + if (!found) { + done = false; } - if (positive) { - if (!found) return waitForKeys(keys); - } else { - if (found) return waitForKeys(keys); + } else { + if (found) { + done = false; } } - return null; - }); + } + return null; } - - return Future.forEach(keysByKind.keys.toList(), (Type kind) { - return waitForKeys(keysByKind[kind]); - }); } Future main() async { From abe82c875a28063dd72ce7ee901ab2da9f810c72 Mon Sep 17 00:00:00 2001 From: Matias Meno Date: Tue, 31 Jul 2018 13:17:37 +0200 Subject: [PATCH 140/239] Add possibility to define generic keys --- pkgs/gcloud/lib/src/db/models.dart | 16 ++++++++-------- pkgs/gcloud/test/db/properties_test.dart | 2 +- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/pkgs/gcloud/lib/src/db/models.dart b/pkgs/gcloud/lib/src/db/models.dart index 95f645a9..12b77269 100644 --- a/pkgs/gcloud/lib/src/db/models.dart +++ b/pkgs/gcloud/lib/src/db/models.dart @@ -8,12 +8,12 @@ part of gcloud.db; /// /// The [Key] can be incomplete if it's id is `null`. In this case the id will /// be automatically allocated and set at commit time. -class Key { +class Key { // Either KeyImpl or PartitionImpl final Object _parent; final Type type; - final Object id; + final T id; Key(Key parent, this.type, this.id) : _parent = parent { if (type == null) { @@ -46,8 +46,8 @@ class Key { return obj as Partition; } - Key append(Type modelType, {Object id}) { - return Key(this, modelType, id); + Key append(Type modelType, {U id}) { + return Key(this, modelType, id); } bool get isEmpty => _parent is Partition; @@ -90,13 +90,13 @@ class Partition { /// Superclass for all model classes. /// -/// Every model class has a [id] -- which must be an integer or a string, and +/// Every model class has a [id] of type [T] which must be `int` or `String`, and /// a [parentKey]. The [key] getter is returning the key for the model object. -abstract class Model { - Object id; +abstract class Model { + T id; Key parentKey; - Key get key => parentKey.append(this.runtimeType, id: id); + Key get key => parentKey.append(this.runtimeType, id: id); } /// Superclass for all expanded model classes. diff --git a/pkgs/gcloud/test/db/properties_test.dart b/pkgs/gcloud/test/db/properties_test.dart index dbc7f028..a4083ecf 100644 --- a/pkgs/gcloud/test/db/properties_test.dart +++ b/pkgs/gcloud/test/db/properties_test.dart @@ -218,7 +218,7 @@ class KeyMock implements Key { bool get isEmpty => false; Partition get partition => null; datastore.Key get datastoreKey => _datastoreKey; - Key append(Type modelType, {Object id}) => null; + Key append(Type modelType, {T id}) => null; int get hashCode => 1; } From f2ec36982d3d4c9c180c71bc9b56b147cd29c81a Mon Sep 17 00:00:00 2001 From: Matias Meno Date: Wed, 25 Sep 2019 15:26:21 +0200 Subject: [PATCH 141/239] Refactor _isExpandoClass to use ClassMirror.isSubtypeOf --- pkgs/gcloud/lib/src/db/model_db_impl.dart | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/pkgs/gcloud/lib/src/db/model_db_impl.dart b/pkgs/gcloud/lib/src/db/model_db_impl.dart index 33b16802..8fda941e 100644 --- a/pkgs/gcloud/lib/src/db/model_db_impl.dart +++ b/pkgs/gcloud/lib/src/db/model_db_impl.dart @@ -352,17 +352,8 @@ class ModelDBImpl implements ModelDB { return properties; } - bool _isExpandoClass(mirrors.ClassMirror modelClass) { - while (modelClass.superclass != modelClass) { - if (modelClass.reflectedType == ExpandoModel) { - return true; - } else if (modelClass.reflectedType == Model) { - return false; - } - modelClass = modelClass.superclass; - } - throw StateError('This should be unreachable.'); - } + bool _isExpandoClass(mirrors.ClassMirror modelClass) => + modelClass.isSubtypeOf(mirrors.reflectClass(ExpandoModel)); } class _ModelDescription { From 9ec71df4ab89a8b7a7854b12fc3e48185af068fe Mon Sep 17 00:00:00 2001 From: Matias Meno Date: Wed, 25 Sep 2019 15:46:30 +0200 Subject: [PATCH 142/239] Convert to the correct Key generic type from datastore --- pkgs/gcloud/lib/src/db/model_db_impl.dart | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/lib/src/db/model_db_impl.dart b/pkgs/gcloud/lib/src/db/model_db_impl.dart index 8fda941e..2e6800e0 100644 --- a/pkgs/gcloud/lib/src/db/model_db_impl.dart +++ b/pkgs/gcloud/lib/src/db/model_db_impl.dart @@ -56,7 +56,8 @@ class ModelDBImpl implements ModelDB { _initialize([mirrors.currentMirrorSystem().findLibrary(librarySymbol)]); } - /// Converts a [ds.Key] to a [Key]. + /// Converts a [ds.Key] to a [Key]. The key returned will have the correct + /// id type which is either `Key` or `Key`. Key fromDatastoreKey(ds.Key datastoreKey) { var namespace = Partition(datastoreKey.partition.namespace); Key key = namespace.emptyKey; @@ -68,7 +69,15 @@ class ModelDBImpl implements ModelDB { 'Please ensure a model class was annotated with ' '`@Kind(name: "${element.kind}")`.'); } - key = key.append(type, id: element.id); + final elementId = element.id; + if (elementId is String) { + key = key.append(type, id: elementId); + } else if (elementId is int) { + key = key.append(type, id: elementId); + } else { + throw StateError('Key must be either String or int, but ' + 'was ${elementId.runtimeType} for key ${element.kind}'); + } } return key; } From 63bc08fc52f3f0736ee632a5e6bea18418bd184d Mon Sep 17 00:00:00 2001 From: Matias Meno Date: Tue, 1 Oct 2019 15:58:00 +0200 Subject: [PATCH 143/239] Add Key.cast() --- pkgs/gcloud/lib/src/db/models.dart | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pkgs/gcloud/lib/src/db/models.dart b/pkgs/gcloud/lib/src/db/models.dart index 12b77269..69aa44af 100644 --- a/pkgs/gcloud/lib/src/db/models.dart +++ b/pkgs/gcloud/lib/src/db/models.dart @@ -60,6 +60,9 @@ class Key { } int get hashCode => _parent.hashCode ^ type.hashCode ^ id.hashCode; + + /// Converts `Key` to `Key`. + Key cast() => Key(parent, type, id as U); } /// Represents a datastore partition. From 562870e1762e6ef376675fbcd4c22d8efa0f23ab Mon Sep 17 00:00:00 2001 From: Matias Meno Date: Tue, 1 Oct 2019 16:10:41 +0200 Subject: [PATCH 144/239] Bump version and update CHANGELOG --- pkgs/gcloud/CHANGELOG.md | 8 ++++++++ pkgs/gcloud/pubspec.yaml | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 9d3878fd..78151e40 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,11 @@ +## 0.7.0 + + * **BREAKING CHANGE:** Add generics support for `Model.id`. + It is now possible to define the type of the id a model has (either `String` + or `int`). A model can now be defined as + `class MyModel extends Model {}` and `myModel.id` will then + be of type `String` and `myModel.key` of type `Key`. + ## 0.6.3 * Added `DatastoreDB.lookupValue()` diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index ace53a65..57d01ee8 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.6.3 +version: 0.7.0 author: Dart Team description: | High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. From 9136b5227687a86e2910ae0efd3d82223b108fe8 Mon Sep 17 00:00:00 2001 From: Sigurd Meldgaard Date: Fri, 6 Dec 2019 09:50:46 +0100 Subject: [PATCH 145/239] Mention that package doesn't work with Flutter --- pkgs/gcloud/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/gcloud/README.md b/pkgs/gcloud/README.md index 6134f355..84ab74fb 100644 --- a/pkgs/gcloud/README.md +++ b/pkgs/gcloud/README.md @@ -16,7 +16,7 @@ uses the [googleapis_auth][googleapisauth] package. Note that this package is only intended for being used with the standalone VM in a server or command line application. Don't expect this package to work on -the browser. +the browser or in Flutter. The code snippets below demonstrating the use of this package all assume that the following imports are present: From 5e279cb63dea10ec4f4ef2bef85a43abc2543135 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Fri, 28 Feb 2020 07:28:34 -0800 Subject: [PATCH 146/239] Fix latest pedantic lints (dart-lang/gcloud#89) Remove explicit lints duplicated by pkg:pedantic --- pkgs/gcloud/.travis.yml | 9 +- pkgs/gcloud/CHANGELOG.md | 4 + pkgs/gcloud/analysis_options.yaml | 12 +- pkgs/gcloud/lib/common.dart | 10 +- pkgs/gcloud/lib/datastore.dart | 39 +++-- pkgs/gcloud/lib/pubsub.dart | 2 +- pkgs/gcloud/lib/service_scope.dart | 14 +- pkgs/gcloud/lib/src/datastore_impl.dart | 47 +++--- pkgs/gcloud/lib/src/db/annotations.dart | 20 ++- pkgs/gcloud/lib/src/db/db.dart | 25 ++- pkgs/gcloud/lib/src/db/model_db_impl.dart | 82 ++++++---- pkgs/gcloud/lib/src/db/models.dart | 11 +- pkgs/gcloud/lib/src/pubsub_impl.dart | 79 +++++++-- pkgs/gcloud/lib/src/storage_impl.dart | 150 +++++++++++------- pkgs/gcloud/lib/storage.dart | 59 ++++--- pkgs/gcloud/pubspec.yaml | 4 +- pkgs/gcloud/test/common.dart | 12 +- pkgs/gcloud/test/common_e2e.dart | 9 +- .../datastore/e2e/datastore_test_impl.dart | 66 ++++---- pkgs/gcloud/test/datastore/e2e/utils.dart | 4 +- pkgs/gcloud/test/db/db_test.dart | 2 +- pkgs/gcloud/test/db/e2e/db_test_impl.dart | 35 ++-- .../test/db/e2e/metamodel_test_impl.dart | 4 +- pkgs/gcloud/test/db/model_db_test.dart | 4 +- .../db/model_dbs/duplicate_fieldname.dart | 1 + pkgs/gcloud/test/db/properties_test.dart | 25 ++- pkgs/gcloud/test/db_all_e2e_test.dart | 4 +- pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart | 14 +- pkgs/gcloud/test/pubsub/pubsub_test.dart | 66 ++++---- pkgs/gcloud/test/service_scope_test.dart | 20 +-- pkgs/gcloud/test/storage/e2e_test.dart | 12 +- pkgs/gcloud/test/storage/storage_test.dart | 91 +++++------ 32 files changed, 550 insertions(+), 386 deletions(-) diff --git a/pkgs/gcloud/.travis.yml b/pkgs/gcloud/.travis.yml index 53ebe7ec..2850234f 100644 --- a/pkgs/gcloud/.travis.yml +++ b/pkgs/gcloud/.travis.yml @@ -1,11 +1,10 @@ language: dart dart: - - 2.0.0 + - 2.3.0 - dev dart_task: - - dartanalyzer: --fatal-infos --fatal-warnings . - test: -P travis matrix: @@ -13,6 +12,12 @@ matrix: # Only validate formatting using the dev release - dart: dev dart_task: dartfmt + - dart: dev + dart_task: + dartanalyzer: --fatal-infos --fatal-warnings . + - dart: 2.3.0 + dart_task: + dartanalyzer: --fatal-warnings . # Only building master means that we don't run two builds for each pull request. branches: diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 9d3878fd..a1346db4 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.6.4 + +* Require minimum Dart SDK `2.3.0`. + ## 0.6.3 * Added `DatastoreDB.lookupValue()` diff --git a/pkgs/gcloud/analysis_options.yaml b/pkgs/gcloud/analysis_options.yaml index 8406cc11..43cc915b 100644 --- a/pkgs/gcloud/analysis_options.yaml +++ b/pkgs/gcloud/analysis_options.yaml @@ -1,30 +1,22 @@ include: package:pedantic/analysis_options.yaml + analyzer: strong-mode: implicit-casts: false + linter: rules: - - avoid_null_checks_in_equality_operators - await_only_futures - camel_case_types - cancel_subscriptions - control_flow_in_finally - directives_ordering - - empty_catches - - empty_constructor_bodies - empty_statements - iterable_contains_unrelated_type - - library_names - - library_prefixes - list_remove_unrelated_type - package_api_docs - package_names - package_prefixed_library_names - - prefer_final_fields - - prefer_generic_function_type_aliases - test_types_in_equals - throw_in_finally - - type_init_formals - unnecessary_brace_in_string_interps - - unnecessary_const - - unnecessary_new diff --git a/pkgs/gcloud/lib/common.dart b/pkgs/gcloud/lib/common.dart index e516807b..ce675de8 100644 --- a/pkgs/gcloud/lib/common.dart +++ b/pkgs/gcloud/lib/common.dart @@ -66,24 +66,24 @@ class StreamFromPages { } } - _onListen() { - int pageSize = _PAGE_SIZE; + void _onListen() { + var pageSize = _PAGE_SIZE; _pendingRequest = true; _firstPageProvider(pageSize).then(_handlePage, onError: _handleError); } - _onPause() { + void _onPause() { _paused = true; } - _onResume() { + void _onResume() { _paused = false; if (_pendingRequest) return; _pendingRequest = true; _currentPage.next().then(_handlePage, onError: _handleError); } - _onCancel() { + void _onCancel() { _cancelled = true; } } diff --git a/pkgs/gcloud/lib/datastore.dart b/pkgs/gcloud/lib/datastore.dart index 12670142..1146cf25 100644 --- a/pkgs/gcloud/lib/datastore.dart +++ b/pkgs/gcloud/lib/datastore.dart @@ -41,30 +41,30 @@ class ApplicationError implements Exception { final String message; ApplicationError(this.message); - String toString() => "ApplicationError: $message"; + @override + String toString() => 'ApplicationError: $message'; } class DatastoreError implements Exception { final String message; DatastoreError([String message]) - : message = (message != null - ? message - : 'DatastoreError: An unknown error occured'); + : message = (message ?? 'DatastoreError: An unknown error occured'); + @override String toString() => '$message'; } class UnknownDatastoreError extends DatastoreError { - UnknownDatastoreError(error) : super("An unknown error occured ($error)."); + UnknownDatastoreError(error) : super('An unknown error occured ($error).'); } class TransactionAbortedError extends DatastoreError { - TransactionAbortedError() : super("The transaction was aborted."); + TransactionAbortedError() : super('The transaction was aborted.'); } class TimeoutError extends DatastoreError { - TimeoutError() : super("The operation timed out."); + TimeoutError() : super('The operation timed out.'); } /// Thrown when a query would require an index which was not set. @@ -72,19 +72,19 @@ class TimeoutError extends DatastoreError { /// An application needs to specify indices in a `index.yaml` file and needs to /// create indices using the `gcloud preview datastore create-indexes` command. class NeedIndexError extends DatastoreError { - NeedIndexError() : super("An index is needed for the query to succeed."); + NeedIndexError() : super('An index is needed for the query to succeed.'); } class PermissionDeniedError extends DatastoreError { - PermissionDeniedError() : super("Permission denied."); + PermissionDeniedError() : super('Permission denied.'); } class InternalError extends DatastoreError { - InternalError() : super("Internal service error."); + InternalError() : super('Internal service error.'); } class QuotaExceededError extends DatastoreError { - QuotaExceededError(error) : super("Quota was exceeded ($error)."); + QuotaExceededError(error) : super('Quota was exceeded ($error).'); } /// A datastore Entity @@ -137,7 +137,7 @@ class Key { final List elements; Key(this.elements, {Partition partition}) - : this.partition = (partition == null) ? Partition.DEFAULT : partition; + : partition = (partition == null) ? Partition.DEFAULT : partition; factory Key.fromParent(String kind, int id, {Key parent}) { Partition partition; @@ -150,16 +150,18 @@ class Key { return Key(elements, partition: partition); } + @override int get hashCode => elements.fold(partition.hashCode, (a, b) => a ^ b.hashCode); + @override bool operator ==(Object other) { if (identical(this, other)) return true; if (other is Key && partition == other.partition && elements.length == other.elements.length) { - for (int i = 0; i < elements.length; i++) { + for (var i = 0; i < elements.length; i++) { if (elements[i] != other.elements[i]) return false; } return true; @@ -167,6 +169,7 @@ class Key { return false; } + @override String toString() { var namespaceString = partition.namespace == null ? 'null' : "'${partition.namespace}'"; @@ -193,10 +196,12 @@ class Partition { } } - const Partition._default() : this.namespace = null; + const Partition._default() : namespace = null; + @override int get hashCode => namespace.hashCode; + @override bool operator ==(Object other) => other is Partition && namespace == other.namespace; } @@ -224,12 +229,15 @@ class KeyElement { } } + @override int get hashCode => kind.hashCode ^ id.hashCode; + @override bool operator ==(Object other) => other is KeyElement && kind == other.kind && id == other.id; - String toString() => "$kind.$id"; + @override + String toString() => '$kind.$id'; } /// A relation used in query filters. @@ -244,6 +252,7 @@ class FilterRelation { const FilterRelation._(this.name); + @override String toString() => name; } diff --git a/pkgs/gcloud/lib/pubsub.dart b/pkgs/gcloud/lib/pubsub.dart index 207967d7..5f6645bf 100644 --- a/pkgs/gcloud/lib/pubsub.dart +++ b/pkgs/gcloud/lib/pubsub.dart @@ -127,7 +127,7 @@ abstract class PubSub { var emulator = Platform.environment['PUBSUB_EMULATOR_HOST']; return emulator == null ? _PubSubImpl(client, project) - : _PubSubImpl.rootUrl(client, project, "http://$emulator/"); + : _PubSubImpl.rootUrl(client, project, 'http://$emulator/'); } /// The name of the project. diff --git a/pkgs/gcloud/lib/service_scope.dart b/pkgs/gcloud/lib/service_scope.dart index ea67d9f8..3a14dd24 100644 --- a/pkgs/gcloud/lib/service_scope.dart +++ b/pkgs/gcloud/lib/service_scope.dart @@ -96,11 +96,9 @@ _ServiceScope get _serviceScope => /// /// If an uncaught error occurs and [onError] is given, it will be called. The /// `onError` parameter can take the same values as `Zone.current.fork`. -Future fork(Future func(), {Function onError}) { +Future fork(Future Function() func, {Function onError}) { var currentServiceScope = _serviceScope; - if (currentServiceScope == null) { - currentServiceScope = _emptyServiceScope; - } + currentServiceScope ??= _emptyServiceScope; return currentServiceScope._fork(func, onError: onError); } @@ -146,10 +144,10 @@ Object lookup(Object key) { class _ServiceScope { /// A mapping of keys to values stored inside the service scope. final Map _key2Values = - Map(); + {}; /// A set which indicates whether an object was copied from it's parent. - final Set _parentCopies = Set(); + final Set _parentCopies = {}; /// On-Scope-Exit functions which will be called in reverse insertion order. final List<_RegisteredEntry> _registeredEntries = []; @@ -173,7 +171,7 @@ class _ServiceScope { _ensureNotInCleaningState(); _ensureNotInDestroyingState(); - bool isParentCopy = _parentCopies.contains(serviceScopeKey); + var isParentCopy = _parentCopies.contains(serviceScopeKey); if (!isParentCopy && _key2Values.containsKey(serviceScopeKey)) { throw ArgumentError( 'Servie scope already contains key $serviceScopeKey.'); @@ -199,7 +197,7 @@ class _ServiceScope { } /// Start a new zone with a forked service scope. - Future _fork(Future func(), {Function onError}) { + Future _fork(Future Function() func, {Function onError}) { _ensureNotInCleaningState(); _ensureNotInDestroyingState(); diff --git a/pkgs/gcloud/lib/src/datastore_impl.dart b/pkgs/gcloud/lib/src/datastore_impl.dart index 5865330c..a57dc820 100644 --- a/pkgs/gcloud/lib/src/datastore_impl.dart +++ b/pkgs/gcloud/lib/src/datastore_impl.dart @@ -90,7 +90,7 @@ class DatastoreImpl implements datastore.Datastore { if (b.partitionId != null) return false; } - for (int i = 0; i < a.path.length; i++) { + for (var i = 0; i < a.path.length; i++) { if (a.path[i].id != b.path[i].id || a.path[i].name != b.path[i].name || a.path[i].kind != b.path[i].kind) return false; @@ -102,7 +102,7 @@ class DatastoreImpl implements datastore.Datastore { {bool lists = true}) { var apiValue = api.Value()..excludeFromIndexes = !indexed; if (value == null) { - return apiValue..nullValue = "NULL_VALUE"; + return apiValue..nullValue = 'NULL_VALUE'; } else if (value is bool) { return apiValue..booleanValue = value; } else if (value is int) { @@ -124,7 +124,7 @@ class DatastoreImpl implements datastore.Datastore { throw Exception('List values are not allowed.'); } - convertItem(i) => + api.Value convertItem(i) => _convertDatastore2ApiPropertyValue(i, indexed, lists: false); return api.Value() @@ -164,7 +164,7 @@ class DatastoreImpl implements datastore.Datastore { } static datastore.Entity _convertApi2DatastoreEntity(api.Entity entity) { - var unindexedProperties = Set(); + var unindexedProperties = {}; var properties = {}; if (entity.properties != null) { @@ -188,7 +188,7 @@ class DatastoreImpl implements datastore.Datastore { if (entity.properties != null) { for (var key in entity.properties.keys) { var value = entity.properties[key]; - bool indexed = false; + var indexed = false; if (entity.unIndexedProperties != null) { indexed = !entity.unIndexedProperties.contains(key); } @@ -284,6 +284,7 @@ class DatastoreImpl implements datastore.Datastore { return Future.error(error, stack); } + @override Future> allocateIds(List keys) { var request = api.AllocateIdsRequest(); request @@ -295,6 +296,7 @@ class DatastoreImpl implements datastore.Datastore { }, onError: _handleError); } + @override Future beginTransaction( {bool crossEntityGroup = false}) { var request = api.BeginTransactionRequest(); @@ -303,6 +305,7 @@ class DatastoreImpl implements datastore.Datastore { }, onError: _handleError); } + @override Future commit( {List inserts, List autoIdInserts, @@ -319,22 +322,22 @@ class DatastoreImpl implements datastore.Datastore { var mutations = request.mutations = []; if (inserts != null) { - for (int i = 0; i < inserts.length; i++) { + for (var i = 0; i < inserts.length; i++) { mutations.add(api.Mutation() ..upsert = _convertDatastore2ApiEntity(inserts[i], enforceId: true)); } } - int autoIdStartIndex = -1; + var autoIdStartIndex = -1; if (autoIdInserts != null) { autoIdStartIndex = mutations.length; - for (int i = 0; i < autoIdInserts.length; i++) { + for (var i = 0; i < autoIdInserts.length; i++) { mutations.add(api.Mutation() ..insert = _convertDatastore2ApiEntity(autoIdInserts[i], enforceId: false)); } } if (deletes != null) { - for (int i = 0; i < deletes.length; i++) { + for (var i = 0; i < deletes.length; i++) { mutations.add(api.Mutation() ..delete = _convertDatastore2ApiKey(deletes[i], enforceId: true)); } @@ -342,7 +345,7 @@ class DatastoreImpl implements datastore.Datastore { return _api.projects.commit(request, _project).then((result) { List keys; if (autoIdInserts != null && autoIdInserts.isNotEmpty) { - List mutationResults = result.mutationResults; + var mutationResults = result.mutationResults; assert(autoIdStartIndex != -1); assert(mutationResults.length >= (autoIdStartIndex + autoIdInserts.length)); @@ -357,6 +360,7 @@ class DatastoreImpl implements datastore.Datastore { }, onError: _handleError); } + @override Future> lookup(List keys, {datastore.Transaction transaction}) { var apiKeys = keys.map((key) { @@ -392,10 +396,10 @@ class DatastoreImpl implements datastore.Datastore { // repeated Key deferred = 3; // } var entities = List(apiKeys.length); - for (int i = 0; i < apiKeys.length; i++) { + for (var i = 0; i < apiKeys.length; i++) { var apiKey = apiKeys[i]; - bool found = false; + var found = false; if (response.found != null) { for (var result in response.found) { @@ -429,6 +433,7 @@ class DatastoreImpl implements datastore.Datastore { }, onError: _handleError); } + @override Future> query(datastore.Query query, {datastore.Partition partition, datastore.Transaction transaction}) { // NOTE: We explicitly do not set 'limit' here, since this is handled by @@ -458,6 +463,7 @@ class DatastoreImpl implements datastore.Datastore { .catchError(_handleError); } + @override Future rollback(datastore.Transaction transaction) { // TODO: Handle [transaction] var request = api.RollbackRequest() @@ -484,10 +490,8 @@ class QueryPageImpl implements Page { static Future runQuery(api.DatastoreApi api, String project, api.RunQueryRequest request, int limit, {int batchSize}) { - int batchLimit = batchSize; - if (batchLimit == null) { - batchLimit = MAX_ENTITIES_PER_RESPONSE; - } + var batchLimit = batchSize; + batchLimit ??= MAX_ENTITIES_PER_RESPONSE; if (limit != null && limit < batchLimit) { batchLimit = limit; } @@ -544,12 +548,12 @@ class QueryPageImpl implements Page { // If the server signals there are more entities and we either have no // limit or our limit has not been reached, we set `moreBatches` to // `true`. - bool moreBatches = (remainingEntities == null || remainingEntities > 0) && + var moreBatches = (remainingEntities == null || remainingEntities > 0) && response.batch.moreResults == 'MORE_RESULTS_AFTER_LIMIT'; - bool gotAll = limit != null && remainingEntities == 0; - bool noMore = response.batch.moreResults == 'NO_MORE_RESULTS'; - bool isLast = gotAll || noMore; + var gotAll = limit != null && remainingEntities == 0; + var noMore = response.batch.moreResults == 'NO_MORE_RESULTS'; + var isLast = gotAll || noMore; // As a sanity check, we assert that `moreBatches XOR isLast`. assert(isLast != moreBatches); @@ -589,10 +593,13 @@ class QueryPageImpl implements Page { }); } + @override bool get isLast => _isLast; + @override List get items => _entities; + @override Future> next({int pageSize}) { // NOTE: We do not respect [pageSize] here, the only mechanism we can // really use is `query.limit`, but this is user-specified when making diff --git a/pkgs/gcloud/lib/src/db/annotations.dart b/pkgs/gcloud/lib/src/db/annotations.dart index 5868a33b..ef6ead3a 100644 --- a/pkgs/gcloud/lib/src/db/annotations.dart +++ b/pkgs/gcloud/lib/src/db/annotations.dart @@ -58,7 +58,8 @@ class IdType { const IdType(this._type); - core.String toString() => "IdType: $_type"; + @override + core.String toString() => 'IdType: $_type'; } /// Describes a property of an Entity. @@ -103,9 +104,11 @@ abstract class PrimitiveProperty extends Property { {String propertyName, bool required = false, bool indexed = true}) : super(propertyName: propertyName, required: required, indexed: indexed); + @override Object encodeValue(ModelDB db, Object value, {bool forComparison = false}) => value; + @override Object decodePrimitiveValue(ModelDB db, Object value) => value; } @@ -118,6 +121,7 @@ class BoolProperty extends PrimitiveProperty { {String propertyName, bool required = false, bool indexed = true}) : super(propertyName: propertyName, required: required, indexed: indexed); + @override bool validate(ModelDB db, Object value) => super.validate(db, value) && (value == null || value is bool); } @@ -131,6 +135,7 @@ class IntProperty extends PrimitiveProperty { {String propertyName, bool required = false, bool indexed = true}) : super(propertyName: propertyName, required: required, indexed: indexed); + @override bool validate(ModelDB db, Object value) => super.validate(db, value) && (value == null || value is int); } @@ -144,6 +149,7 @@ class DoubleProperty extends PrimitiveProperty { {String propertyName, bool required = false, bool indexed = true}) : super(propertyName: propertyName, required: required, indexed: indexed); + @override bool validate(ModelDB db, Object value) => super.validate(db, value) && (value == null || value is double); } @@ -157,6 +163,7 @@ class StringProperty extends PrimitiveProperty { {String propertyName, bool required = false, bool indexed = true}) : super(propertyName: propertyName, required: required, indexed: indexed); + @override bool validate(ModelDB db, Object value) => super.validate(db, value) && (value == null || value is String); } @@ -170,14 +177,17 @@ class ModelKeyProperty extends PrimitiveProperty { {String propertyName, bool required = false, bool indexed = true}) : super(propertyName: propertyName, required: required, indexed: indexed); + @override bool validate(ModelDB db, Object value) => super.validate(db, value) && (value == null || value is Key); + @override Object encodeValue(ModelDB db, Object value, {bool forComparison = false}) { if (value == null) return null; return db.toDatastoreKey(value as Key); } + @override Object decodePrimitiveValue(ModelDB db, Object value) { if (value == null) return null; return db.fromDatastoreKey(value as ds.Key); @@ -197,14 +207,17 @@ class BlobProperty extends PrimitiveProperty { // of the range 0..255! // If an untyped list was created the type check will always succeed. i.e. // "[1, true, 'bar'] is List" evaluates to `true` + @override bool validate(ModelDB db, Object value) => super.validate(db, value) && (value == null || value is List); + @override Object encodeValue(ModelDB db, Object value, {bool forComparison = false}) { if (value == null) return null; return ds.BlobValue(value as List); } + @override Object decodePrimitiveValue(ModelDB db, Object value) { if (value == null) return null; @@ -221,9 +234,11 @@ class DateTimeProperty extends PrimitiveProperty { {String propertyName, bool required = false, bool indexed = true}) : super(propertyName: propertyName, required: required, indexed: indexed); + @override bool validate(ModelDB db, Object value) => super.validate(db, value) && (value == null || value is DateTime); + @override Object decodePrimitiveValue(ModelDB db, Object value) { if (value is int) { return DateTime.fromMillisecondsSinceEpoch(value ~/ 1000, isUtc: true); @@ -246,6 +261,7 @@ class ListProperty extends Property { {String propertyName, bool indexed = true}) : super(propertyName: propertyName, required: true, indexed: indexed); + @override bool validate(ModelDB db, Object value) { if (!super.validate(db, value) || value is! List) return false; @@ -255,6 +271,7 @@ class ListProperty extends Property { return true; } + @override Object encodeValue(ModelDB db, Object value, {bool forComparison = false}) { if (forComparison) { // If we have comparison of list properties (i.e. repeated property names) @@ -284,6 +301,7 @@ class ListProperty extends Property { return list.map((value) => subProperty.encodeValue(db, value)).toList(); } + @override Object decodePrimitiveValue(ModelDB db, Object value) { if (value == null) return []; if (value is! List) return [subProperty.decodePrimitiveValue(db, value)]; diff --git a/pkgs/gcloud/lib/src/db/db.dart b/pkgs/gcloud/lib/src/db/db.dart index 68e5b2c1..81b17d0e 100644 --- a/pkgs/gcloud/lib/src/db/db.dart +++ b/pkgs/gcloud/lib/src/db/db.dart @@ -46,10 +46,10 @@ class Transaction { /// /// If the [key] is not found within the transaction and [orElse] was not /// specified, then a [KeyNotFoundException] will be thrown. - Future lookupValue(Key key, {T orElse()}) async { - final List values = await lookup([key]); + Future lookupValue(Key key, {T Function() orElse}) async { + final values = await lookup([key]); assert(values.length == 1); - T value = values.single; + var value = values.single; if (value == null) { if (orElse != null) { value = orElse(); @@ -111,7 +111,7 @@ class Transaction { } } - _checkSealed({int changeState, bool allowFailed = false}) { + void _checkSealed({int changeState, bool allowFailed = false}) { if (_state == _TRANSACTION_COMMITTED) { throw StateError('The transaction has already been committed.'); } else if (_state == _TRANSACTION_ROLLED_BACK) { @@ -255,7 +255,7 @@ class Query { String _convertToDatastoreName(String name) { var propertyName = _db.modelDB.fieldNameToPropertyName(_kind, name); if (propertyName == null) { - throw ArgumentError("Field $name is not available for kind $_kind"); + throw ArgumentError('Field $name is not available for kind $_kind'); } return propertyName; } @@ -267,9 +267,8 @@ class DatastoreDB { Partition _defaultPartition; DatastoreDB(this.datastore, {ModelDB modelDB, Partition defaultPartition}) - : _modelDB = modelDB != null ? modelDB : ModelDBImpl() { - _defaultPartition = - defaultPartition != null ? defaultPartition : Partition(null); + : _modelDB = modelDB ?? ModelDBImpl() { + _defaultPartition = defaultPartition ?? Partition(null); } /// The [ModelDB] used to serialize/deserialize objects. @@ -348,10 +347,10 @@ class DatastoreDB { /// /// If the [key] is not found in the datastore and [orElse] was not /// specified, then a [KeyNotFoundException] will be thrown. - Future lookupValue(Key key, {T orElse()}) async { - final List values = await lookup([key]); + Future lookupValue(Key key, {T Function() orElse}) async { + final values = await lookup([key]); assert(values.length == 1); - T value = values.single; + var value = values.single; if (value == null) { if (orElse != null) { value = orElse(); @@ -390,9 +389,7 @@ Future _commitHelper(DatastoreDB db, for (var model in inserts) { // If parent was not explicitly set, we assume this model will map to // it's own entity group. - if (model.parentKey == null) { - model.parentKey = db.defaultPartition.emptyKey; - } + model.parentKey ??= db.defaultPartition.emptyKey; if (model.id == null) { autoIdModelInserts.add(model); entityAutoIdInserts.add(db.modelDB.toDatastoreEntity(model)); diff --git a/pkgs/gcloud/lib/src/db/model_db_impl.dart b/pkgs/gcloud/lib/src/db/model_db_impl.dart index 33b16802..49d371e9 100644 --- a/pkgs/gcloud/lib/src/db/model_db_impl.dart +++ b/pkgs/gcloud/lib/src/db/model_db_impl.dart @@ -57,9 +57,10 @@ class ModelDBImpl implements ModelDB { } /// Converts a [ds.Key] to a [Key]. + @override Key fromDatastoreKey(ds.Key datastoreKey) { var namespace = Partition(datastoreKey.partition.namespace); - Key key = namespace.emptyKey; + var key = namespace.emptyKey; for (var element in datastoreKey.elements) { var type = _type2ModelDesc[_kind2ModelDesc[element.kind]]; if (type == null) { @@ -74,8 +75,9 @@ class ModelDBImpl implements ModelDB { } /// Converts a [Key] to a [ds.Key]. + @override ds.Key toDatastoreKey(Key dbKey) { - List elements = []; + var elements = []; var currentKey = dbKey; while (!currentKey.isEmpty) { var id = currentKey.id; @@ -83,7 +85,7 @@ class ModelDBImpl implements ModelDB { var modelDescription = _modelDescriptionForType(currentKey.type); var kind = modelDescription.kindName(this); - bool useIntegerId = modelDescription.useIntegerId; + var useIntegerId = modelDescription.useIntegerId; if (useIntegerId && id != null && id is! int) { throw ArgumentError('Expected an integer id property but ' @@ -97,12 +99,13 @@ class ModelDBImpl implements ModelDB { elements.add(ds.KeyElement(kind, id)); currentKey = currentKey.parent; } - Partition partition = currentKey._parent as Partition; + var partition = currentKey._parent as Partition; return ds.Key(elements.reversed.toList(), partition: ds.Partition(partition.namespace)); } /// Converts a [Model] instance to a [ds.Entity]. + @override ds.Entity toDatastoreEntity(Model model) { try { var modelDescription = _modelDescriptionForType(model.runtimeType); @@ -113,10 +116,11 @@ class ModelDBImpl implements ModelDB { } /// Converts a [ds.Entity] to a [Model] instance. + @override T fromDatastoreEntity(ds.Entity entity) { if (entity == null) return null; - Key key = fromDatastoreKey(entity.key); + var key = fromDatastoreKey(entity.key); var kind = entity.key.elements.last.kind; var modelDescription = _kind2ModelDesc[kind]; if (modelDescription == null) { @@ -134,6 +138,7 @@ class ModelDBImpl implements ModelDB { /// Returns the string representation of the kind of model class [type]. /// /// If the model class `type` is not found it will throw an `ArgumentError`. + @override String kindName(Type type) { var kind = _modelDesc2Type[type]?.kind; if (kind == null) { @@ -144,6 +149,7 @@ class ModelDBImpl implements ModelDB { /// Returns the name of the property corresponding to the kind [kind] and /// [fieldName]. + @override String fieldNameToPropertyName(String kind, String fieldName) { var modelDescription = _kind2ModelDesc[kind]; if (modelDescription == null) { @@ -153,6 +159,7 @@ class ModelDBImpl implements ModelDB { } /// Converts [value] according to the [Property] named [name] in [type]. + @override Object toDatastoreValue(String kind, String fieldName, Object value, {bool forComparison = false}) { var modelDescription = _kind2ModelDesc[kind]; @@ -210,7 +217,7 @@ class ModelDBImpl implements ModelDB { void _tryLoadNewModelClass(mirrors.ClassMirror classMirror) { Kind kindAnnotation; - for (mirrors.InstanceMirror instance in classMirror.metadata) { + for (var instance in classMirror.metadata) { if (instance.reflectee.runtimeType == Kind) { if (kindAnnotation != null) { throw StateError( @@ -227,9 +234,7 @@ class ModelDBImpl implements ModelDB { var stringId = kindAnnotation.idType == IdType.String; // Fall back to the class name. - if (name == null) { - name = mirrors.MirrorSystem.getName(classMirror.simpleName); - } + name ??= mirrors.MirrorSystem.getName(classMirror.simpleName); // This constraint should be guaranteed by the Kind() const constructor. assert((integerId && !stringId) || (!integerId && stringId)); @@ -298,8 +303,8 @@ class ModelDBImpl implements ModelDB { Map _propertiesFromModelDescription( mirrors.ClassMirror modelClassMirror) { - var properties = Map(); - var propertyNames = Set(); + var properties = {}; + var propertyNames = {}; // Loop over all classes in the inheritance path up to the Object class. while (modelClassMirror.superclass != null) { @@ -328,7 +333,7 @@ class ModelDBImpl implements ModelDB { // Determine the name to use for the property in datastore. var propertyName = property.propertyName; - if (propertyName == null) propertyName = fieldName; + propertyName ??= fieldName; if (properties.containsKey(fieldName)) { throw StateError( @@ -368,8 +373,8 @@ class ModelDBImpl implements ModelDB { class _ModelDescription { final HashMap _property2FieldName = HashMap(); final HashMap _field2PropertyName = HashMap(); - final Set _indexedProperties = Set(); - final Set _unIndexedProperties = Set(); + final Set _indexedProperties = {}; + final Set _unIndexedProperties = {}; final String kind; final bool useIntegerId; @@ -381,8 +386,8 @@ class _ModelDescription { db._propertiesForModel(this).forEach((String fieldName, Property prop) { // The default of a datastore property name is the fieldName. // It can be overridden with [Property.propertyName]. - String propertyName = prop.propertyName; - if (propertyName == null) propertyName = fieldName; + var propertyName = prop.propertyName; + propertyName ??= fieldName; _property2FieldName[propertyName] = fieldName; _field2PropertyName[fieldName] = propertyName; @@ -390,8 +395,8 @@ class _ModelDescription { // Compute properties & unindexed properties db._propertiesForModel(this).forEach((String fieldName, Property prop) { - String propertyName = prop.propertyName; - if (propertyName == null) propertyName = fieldName; + var propertyName = prop.propertyName; + propertyName ??= fieldName; if (prop.indexed) { _indexedProperties.add(propertyName); @@ -417,10 +422,15 @@ class _ModelDescription { unIndexedProperties: _unIndexedProperties); } - _encodeProperty(ModelDBImpl db, Model model, mirrors.InstanceMirror mirror, - Map properties, String fieldName, Property prop) { - String propertyName = prop.propertyName; - if (propertyName == null) propertyName = fieldName; + void _encodeProperty( + ModelDBImpl db, + Model model, + mirrors.InstanceMirror mirror, + Map properties, + String fieldName, + Property prop) { + var propertyName = prop.propertyName; + propertyName ??= fieldName; var value = mirror.getField(mirrors.MirrorSystem.getSymbol(fieldName)).reflectee; @@ -449,9 +459,9 @@ class _ModelDescription { return mirror.reflectee as H; } - _decodeProperty(ModelDBImpl db, ds.Entity entity, + void _decodeProperty(ModelDBImpl db, ds.Entity entity, mirrors.InstanceMirror mirror, String fieldName, Property prop) { - String propertyName = fieldNameToPropertyName(fieldName); + var propertyName = fieldNameToPropertyName(fieldName); var rawValue = entity.properties[propertyName]; var value = prop.decodePrimitiveValue(db, rawValue); @@ -480,7 +490,7 @@ class _ModelDescription { Object encodeField(ModelDBImpl db, String fieldName, Object value, {bool enforceFieldExists = true, bool forComparison = false}) { - Property property = db._propertiesForModel(this)[fieldName]; + var property = db._propertiesForModel(this)[fieldName]; if (property != null) { return property.encodeValue(db, value, forComparison: forComparison); } @@ -508,14 +518,16 @@ class _ExpandoModelDescription extends _ModelDescription { _ExpandoModelDescription(String kind, bool useIntegerId) : super(kind, useIntegerId); + @override void initialize(ModelDBImpl db) { super.initialize(db); realFieldNames = Set.from(_field2PropertyName.keys); realPropertyNames = Set.from(_property2FieldName.keys); - usedNames = Set()..addAll(realFieldNames)..addAll(realPropertyNames); + usedNames = {}..addAll(realFieldNames)..addAll(realPropertyNames); } + @override ds.Entity encodeModel(ModelDBImpl db, ExpandoModel model) { var entity = super.encodeModel(db, model); var properties = entity.properties; @@ -528,10 +540,11 @@ class _ExpandoModelDescription extends _ModelDescription { return entity; } + @override T decodeEntity(ModelDBImpl db, Key key, ds.Entity entity) { if (entity == null) return null; - ExpandoModel model = super.decodeEntity(db, key, entity); + var model = super.decodeEntity(db, key, entity) as ExpandoModel; var properties = entity.properties; properties.forEach((String key, Object value) { if (!usedNames.contains(key)) { @@ -542,34 +555,37 @@ class _ExpandoModelDescription extends _ModelDescription { return model as T; } + @override String fieldNameToPropertyName(String fieldName) { - String propertyName = super.fieldNameToPropertyName(fieldName); + var propertyName = super.fieldNameToPropertyName(fieldName); // If the ModelDescription doesn't know about [fieldName], it's an // expanded property, where propertyName == fieldName. - if (propertyName == null) propertyName = fieldName; + propertyName ??= fieldName; return propertyName; } + @override String propertyNameToFieldName(ModelDBImpl db, String propertyName) { - String fieldName = super.propertyNameToFieldName(db, propertyName); + var fieldName = super.propertyNameToFieldName(db, propertyName); // If the ModelDescription doesn't know about [propertyName], it's an // expanded property, where propertyName == fieldName. - if (fieldName == null) fieldName = propertyName; + fieldName ??= propertyName; return fieldName; } + @override Object encodeField(ModelDBImpl db, String fieldName, Object value, {bool enforceFieldExists = true, bool forComparison = false}) { // The [enforceFieldExists] argument is intentionally ignored. - Object primitiveValue = super.encodeField(db, fieldName, value, + var primitiveValue = super.encodeField(db, fieldName, value, enforceFieldExists: false, forComparison: forComparison); // If superclass can't encode field, we return value here (and assume // it's primitive) // NOTE: Implicit assumption: // If value != null then superclass will return != null. // TODO: Ensure [value] is primitive in this case. - if (primitiveValue == null) primitiveValue = value; + primitiveValue ??= value; return primitiveValue; } } diff --git a/pkgs/gcloud/lib/src/db/models.dart b/pkgs/gcloud/lib/src/db/models.dart index 95f645a9..9b734620 100644 --- a/pkgs/gcloud/lib/src/db/models.dart +++ b/pkgs/gcloud/lib/src/db/models.dart @@ -52,13 +52,15 @@ class Key { bool get isEmpty => _parent is Partition; - operator ==(Object other) { + @override + bool operator ==(Object other) { return other is Key && _parent == other._parent && type == other.type && id == other.id; } + @override int get hashCode => _parent.hashCode ^ type.hashCode ^ id.hashCode; } @@ -81,10 +83,12 @@ class Partition { /// group. Key get emptyKey => Key.emptyKey(this); - operator ==(Object other) { + @override + bool operator ==(Object other) { return other is Partition && namespace == other.namespace; } + @override int get hashCode => namespace.hashCode; } @@ -96,7 +100,7 @@ abstract class Model { Object id; Key parentKey; - Key get key => parentKey.append(this.runtimeType, id: id); + Key get key => parentKey.append(runtimeType, id: id); } /// Superclass for all expanded model classes. @@ -107,6 +111,7 @@ abstract class Model { abstract class ExpandoModel extends Model { final Map additionalProperties = {}; + @override Object noSuchMethod(Invocation invocation) { var name = mirrors.MirrorSystem.getName(invocation.memberName); if (name.endsWith('=')) name = name.substring(0, name.length - 1); diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart index aba0de4f..f6ca96ed 100644 --- a/pkgs/gcloud/lib/src/pubsub_impl.dart +++ b/pkgs/gcloud/lib/src/pubsub_impl.dart @@ -5,6 +5,7 @@ part of gcloud.pubsub; class _PubSubImpl implements PubSub { + @override final String project; final pubsub.PubsubApi _api; final String _topicPrefix; @@ -112,7 +113,7 @@ class _PubSubImpl implements PubSub { void _checkTopicName(String name) { if (name.startsWith('projects/') && !name.contains('/topics/')) { throw ArgumentError( - "Illegal topic name. Absolute topic names must have the form " + 'Illegal topic name. Absolute topic names must have the form ' "'projects/[project-id]/topics/[topic-name]"); } if (name.endsWith('/topics/')) { @@ -124,7 +125,7 @@ class _PubSubImpl implements PubSub { void _checkSubscriptionName(String name) { if (name.startsWith('projects/') && !name.contains('/subscriptions/')) { throw ArgumentError( - "Illegal subscription name. Absolute subscription names must have " + 'Illegal subscription name. Absolute subscription names must have ' "the form 'projects/[project-id]/subscriptions/[subscription-name]"); } if (name.endsWith('/subscriptions/')) { @@ -134,22 +135,26 @@ class _PubSubImpl implements PubSub { } } + @override Future createTopic(String name) { _checkTopicName(name); return _createTopic(_fullTopicName(name)) .then((top) => _TopicImpl(this, top)); } + @override Future deleteTopic(String name) { _checkTopicName(name); return _deleteTopic(_fullTopicName(name)); } + @override Future lookupTopic(String name) { _checkTopicName(name); return _getTopic(_fullTopicName(name)).then((top) => _TopicImpl(this, top)); } + @override Stream listTopics() { Future> firstPage(int pageSize) { return _listTopics(pageSize, null) @@ -159,12 +164,14 @@ class _PubSubImpl implements PubSub { return StreamFromPages(firstPage).stream; } + @override Future> pageTopics({int pageSize = 50}) { return _listTopics(pageSize, null).then((response) { return _TopicPageImpl(this, pageSize, response); }); } + @override Future createSubscription(String name, String topic, {Uri endpoint}) { _checkSubscriptionName(name); @@ -174,17 +181,20 @@ class _PubSubImpl implements PubSub { .then((sub) => _SubscriptionImpl(this, sub)); } + @override Future deleteSubscription(String name) { _checkSubscriptionName(name); return _deleteSubscription(_fullSubscriptionName(name)); } + @override Future lookupSubscription(String name) { _checkSubscriptionName(name); return _getSubscription(_fullSubscriptionName(name)) .then((sub) => _SubscriptionImpl(this, sub)); } + @override Stream listSubscriptions([String query]) { Future> firstPage(int pageSize) { return _listSubscriptions(query, pageSize, null).then( @@ -194,6 +204,7 @@ class _PubSubImpl implements PubSub { return StreamFromPages(firstPage).stream; } + @override Future> pageSubscriptions( {String topic, int pageSize = 50}) { return _listSubscriptions(topic, pageSize, null).then((response) { @@ -213,6 +224,7 @@ class _MessageImpl implements Message { // null. final List _bytesMessage; + @override final Map attributes; _MessageImpl.withString(this._stringMessage, {this.attributes}) @@ -221,11 +233,11 @@ class _MessageImpl implements Message { _MessageImpl.withBytes(this._bytesMessage, {this.attributes}) : _stringMessage = null; - List get asBytes => - _bytesMessage != null ? _bytesMessage : utf8.encode(_stringMessage); + @override + List get asBytes => _bytesMessage ?? utf8.encode(_stringMessage); - String get asString => - _stringMessage != null ? _stringMessage : utf8.decode(_bytesMessage); + @override + String get asString => _stringMessage ?? utf8.decode(_bytesMessage); } /// Message received using [Subscription.pull]. @@ -241,16 +253,19 @@ class _PullMessage implements Message { _PullMessage(this._message); + @override List get asBytes { - if (_bytes == null) _bytes = _message.dataAsBytes; + _bytes ??= _message.dataAsBytes; return _bytes; } + @override String get asString { - if (_string == null) _string = utf8.decode(_message.dataAsBytes); + _string ??= utf8.decode(_message.dataAsBytes); return _string; } + @override Map get attributes => _message.attributes; } @@ -262,12 +277,15 @@ class _PullMessage implements Message { /// The labels have been decoded into a Map. class _PushMessage implements Message { final String _base64Message; + @override final Map attributes; _PushMessage(this._base64Message, this.attributes); + @override List get asBytes => base64.decode(_base64Message); + @override String get asString => utf8.decode(asBytes); } @@ -283,13 +301,15 @@ class _PullEventImpl implements PullEvent { /// Low level response received from Pub/Sub. final pubsub.PullResponse _response; + @override final Message message; _PullEventImpl( this._api, this._subscriptionName, pubsub.PullResponse response) - : this._response = response, + : _response = response, message = _PullMessage(response.receivedMessages[0].message); + @override Future acknowledge() { return _api._ack(_response.receivedMessages[0].ackId, _subscriptionName); } @@ -303,23 +323,25 @@ class _PushEventImpl implements PushEvent { final Message _message; final String _subscriptionName; + @override Message get message => _message; + @override String get subscriptionName => _subscriptionName; _PushEventImpl(this._message, this._subscriptionName); factory _PushEventImpl.fromJson(String json) { Map body = jsonDecode(json) as Map; - String data = body['message']['data'] as String; + var data = body['message']['data'] as String; Map labels = HashMap(); body['message']['labels'].forEach((label) { - String key = label['key'] as String; + var key = label['key'] as String; var value = label['strValue']; - if (value == null) value = label['numValue']; + value ??= label['numValue']; labels[key] = value.toString(); }); - String subscription = body['subscription'] as String; + var subscription = body['subscription'] as String; // TODO(#1): Remove this when the push event subscription name is prefixed // with '/subscriptions/'. if (!subscription.startsWith(PREFIX)) { @@ -335,28 +357,35 @@ class _TopicImpl implements Topic { _TopicImpl(this._api, this._topic); + @override String get name { assert(_topic.name.startsWith(_api._topicPrefix)); return _topic.name.substring(_api._topicPrefix.length); } + @override String get project { assert(_topic.name.startsWith(_api._topicPrefix)); return _api.project; } + @override String get absoluteName => _topic.name; + @override Future publish(Message message) { return _api._publish(_topic.name, message.asBytes, message.attributes); } + @override Future delete() => _api._deleteTopic(_topic.name); + @override Future publishString(String message, {Map attributes}) { return _api._publish(_topic.name, utf8.encode(message), attributes); } + @override Future publishBytes(List message, {Map attributes}) { return _api._publish(_topic.name, message, attributes); } @@ -368,25 +397,31 @@ class _SubscriptionImpl implements Subscription { _SubscriptionImpl(this._api, this._subscription); + @override String get name { assert(_subscription.name.startsWith(_api._subscriptionPrefix)); return _subscription.name.substring(_api._subscriptionPrefix.length); } + @override String get project { assert(_subscription.name.startsWith(_api._subscriptionPrefix)); return _api.project; } + @override String get absoluteName => _subscription.name; + @override Topic get topic { var topic = pubsub.Topic()..name = _subscription.topic; return _TopicImpl(_api, topic); } + @override Future delete() => _api._deleteSubscription(_subscription.name); + @override Future pull({bool wait = true}) { return _api._pull(_subscription.name, !wait).then((response) { // The documentation says 'Returns an empty list if there are no @@ -401,12 +436,16 @@ class _SubscriptionImpl implements Subscription { test: (e) => e is pubsub.DetailedApiRequestError && e.status == 400); } + @override Uri get endpoint => null; + @override bool get isPull => endpoint == null; + @override bool get isPush => endpoint != null; + @override Future updatePushConfiguration(Uri endpoint) { return _api._modifyPushConfig(_subscription.name, endpoint); } @@ -416,23 +455,26 @@ class _TopicPageImpl implements Page { final _PubSubImpl _api; final int _pageSize; final String _nextPageToken; + @override final List items; _TopicPageImpl(this._api, this._pageSize, pubsub.ListTopicsResponse response) : items = List(response.topics != null ? response.topics.length : 0), _nextPageToken = response.nextPageToken { if (response.topics != null) { - for (int i = 0; i < response.topics.length; i++) { + for (var i = 0; i < response.topics.length; i++) { items[i] = _TopicImpl(_api, response.topics[i]); } } } + @override bool get isLast => _nextPageToken == null; + @override Future> next({int pageSize}) { if (isLast) return Future.value(null); - if (pageSize == null) pageSize = this._pageSize; + pageSize ??= _pageSize; return _api._listTopics(pageSize, _nextPageToken).then((response) { return _TopicPageImpl(_api, pageSize, response); @@ -445,6 +487,7 @@ class _SubscriptionPageImpl implements Page { final String _topic; final int _pageSize; final String _nextPageToken; + @override final List items; _SubscriptionPageImpl(this._api, this._topic, this._pageSize, @@ -453,17 +496,19 @@ class _SubscriptionPageImpl implements Page { response.subscriptions != null ? response.subscriptions.length : 0), _nextPageToken = response.nextPageToken { if (response.subscriptions != null) { - for (int i = 0; i < response.subscriptions.length; i++) { + for (var i = 0; i < response.subscriptions.length; i++) { items[i] = _SubscriptionImpl(_api, response.subscriptions[i]); } } } + @override bool get isLast => _nextPageToken == null; + @override Future> next({int pageSize}) { if (_nextPageToken == null) return Future.value(null); - if (pageSize == null) pageSize = this._pageSize; + pageSize ??= _pageSize; return _api ._listSubscriptions(_topic, pageSize, _nextPageToken) diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index c426f5af..2064e128 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -18,14 +18,14 @@ class _AbsoluteName { throw FormatException("Absolute name '$absoluteName' does not start " "with '$_ABSOLUTE_PREFIX'"); } - int index = absoluteName.indexOf('/', _ABSOLUTE_PREFIX.length); + var index = absoluteName.indexOf('/', _ABSOLUTE_PREFIX.length); if (index == -1 || index == _ABSOLUTE_PREFIX.length) { throw FormatException("Absolute name '$absoluteName' does not have " - "a bucket name"); + 'a bucket name'); } if (index == absoluteName.length - 1) { throw FormatException("Absolute name '$absoluteName' does not have " - "an object name"); + 'an object name'); } bucketName = absoluteName.substring(_ABSOLUTE_PREFIX.length, index); objectName = absoluteName.substring(index + 1); @@ -40,6 +40,7 @@ class _StorageImpl implements Storage { _StorageImpl(http.Client client, this.project) : _api = storage_api.StorageApi(client); + @override Future createBucket(String bucketName, {PredefinedAcl predefinedAcl, Acl acl}) { var bucket = storage_api.Bucket()..name = bucketName; @@ -52,18 +53,21 @@ class _StorageImpl implements Storage { .then((bucket) => null); } + @override Future deleteBucket(String bucketName) { return _api.buckets.delete(bucketName); } + @override Bucket bucket(String bucketName, {PredefinedAcl defaultPredefinedObjectAcl, Acl defaultObjectAcl}) { return _BucketImpl( this, bucketName, defaultPredefinedObjectAcl, defaultObjectAcl); } + @override Future bucketExists(String bucketName) { - notFoundError(e) { + bool notFoundError(e) { return e is storage_api.DetailedApiRequestError && e.status == 404; } @@ -73,12 +77,14 @@ class _StorageImpl implements Storage { .catchError((e) => false, test: notFoundError); } + @override Future bucketInfo(String bucketName) { return _api.buckets .get(bucketName, projection: 'full') .then((bucket) => _BucketInfoImpl(bucket)); } + @override Stream listBucketNames() { Future<_BucketPageImpl> firstPage(int pageSize) { return _listBuckets(pageSize, null) @@ -88,12 +94,14 @@ class _StorageImpl implements Storage { return StreamFromPages(firstPage).stream; } + @override Future> pageBucketNames({int pageSize = 50}) { return _listBuckets(pageSize, null).then((response) { return _BucketPageImpl(this, pageSize, response); }); } + @override Future copyObject(String src, String dest) { var srcName = _AbsoluteName.parse(src); var destName = _AbsoluteName.parse(dest); @@ -114,14 +122,19 @@ class _BucketInfoImpl implements BucketInfo { _BucketInfoImpl(this._bucket); + @override String get bucketName => _bucket.name; + @override String get etag => _bucket.etag; + @override DateTime get created => _bucket.timeCreated; + @override String get id => _bucket.id; + @override Acl get acl => Acl._fromBucketAcl(_bucket); } @@ -130,16 +143,19 @@ class _BucketImpl implements Bucket { final storage_api.StorageApi _api; final PredefinedAcl _defaultPredefinedObjectAcl; final Acl _defaultObjectAcl; + @override final String bucketName; _BucketImpl(_StorageImpl storage, this.bucketName, this._defaultPredefinedObjectAcl, this._defaultObjectAcl) - : this._api = storage._api; + : _api = storage._api; + @override String absoluteObjectName(String objectName) { return '$_ABSOLUTE_PREFIX$bucketName/$objectName'; } + @override StreamSink> write(String objectName, {int length, ObjectMetadata metadata, @@ -157,14 +173,13 @@ class _BucketImpl implements Bucket { metadata = metadata.replace(contentType: contentType); } } - _ObjectMetadata objectMetadata = metadata as _ObjectMetadata; + var objectMetadata = metadata as _ObjectMetadata; object = objectMetadata._object; // If no predefined ACL is passed use the default (if any). String predefinedName; if (predefinedAcl != null || _defaultPredefinedObjectAcl != null) { - var predefined = - predefinedAcl != null ? predefinedAcl : _defaultPredefinedObjectAcl; + var predefined = predefinedAcl ?? _defaultPredefinedObjectAcl; predefinedName = predefined._name; } @@ -181,12 +196,13 @@ class _BucketImpl implements Bucket { return sink; } + @override Future writeBytes(String objectName, List bytes, {ObjectMetadata metadata, Acl acl, PredefinedAcl predefinedAcl, String contentType}) { - _MediaUploadStreamSink sink = write(objectName, + var sink = write(objectName, length: bytes.length, metadata: metadata, acl: acl, @@ -196,10 +212,9 @@ class _BucketImpl implements Bucket { return sink.close(); } + @override Stream> read(String objectName, {int offset, int length}) async* { - if (offset == null) { - offset = 0; - } + offset ??= 0; if (offset != 0 && length == null) { throw ArgumentError('length must have a value if offset is non-zero.'); @@ -219,22 +234,25 @@ class _BucketImpl implements Bucket { options = storage_api.PartialDownloadOptions(range); } - commons.Media media = (await _api.objects.get(bucketName, objectName, + var media = (await _api.objects.get(bucketName, objectName, downloadOptions: options)) as commons.Media; yield* media.stream; } + @override Future info(String objectName) { return _api.objects .get(bucketName, objectName, projection: 'full') .then((object) => _ObjectInfoImpl(object as storage_api.Object)); } + @override Future delete(String objectName) { return _api.objects.delete(bucketName, objectName); } + @override Stream list({String prefix}) { Future<_ObjectPageImpl> firstPage(int pageSize) { return _listObjects(bucketName, prefix, _DIRECTORY_DELIMITER, 50, null) @@ -245,6 +263,7 @@ class _BucketImpl implements Bucket { return StreamFromPages(firstPage).stream; } + @override Future> page({String prefix, int pageSize = 50}) { return _listObjects( bucketName, prefix, _DIRECTORY_DELIMITER, pageSize, null) @@ -253,9 +272,10 @@ class _BucketImpl implements Bucket { }); } + @override Future updateMetadata(String objectName, ObjectMetadata metadata) { // TODO: support other ObjectMetadata implementations? - _ObjectMetadata md = metadata as _ObjectMetadata; + var md = metadata as _ObjectMetadata; var object = md._object; if (md._object.acl == null && _defaultObjectAcl == null) { throw ArgumentError('ACL is required for update'); @@ -263,9 +283,7 @@ class _BucketImpl implements Bucket { if (md.contentType == null) { throw ArgumentError('Content-Type is required for update'); } - if (md._object.acl == null) { - md._object.acl = _defaultObjectAcl._toObjectAccessControlList(); - } + md._object.acl ??= _defaultObjectAcl._toObjectAccessControlList(); return _api.objects.update(object, bucketName, objectName); } @@ -283,21 +301,24 @@ class _BucketPageImpl implements Page { final _StorageImpl _storage; final int _pageSize; final String _nextPageToken; + @override final List items; _BucketPageImpl(this._storage, this._pageSize, storage_api.Buckets response) : items = List(response.items != null ? response.items.length : 0), _nextPageToken = response.nextPageToken { - for (int i = 0; i < items.length; i++) { + for (var i = 0; i < items.length; i++) { items[i] = response.items[i].name; } } + @override bool get isLast => _nextPageToken == null; + @override Future> next({int pageSize}) { if (isLast) return Future.value(null); - if (pageSize == null) pageSize = this._pageSize; + pageSize ??= _pageSize; return _storage._listBuckets(pageSize, _nextPageToken).then((response) { return _BucketPageImpl(_storage, pageSize, response); @@ -310,6 +331,7 @@ class _ObjectPageImpl implements Page { final String _prefix; final int _pageSize; final String _nextPageToken; + @override final List items; _ObjectPageImpl( @@ -319,23 +341,25 @@ class _ObjectPageImpl implements Page { _nextPageToken = response.nextPageToken { var prefixes = 0; if (response.prefixes != null) { - for (int i = 0; i < response.prefixes.length; i++) { + for (var i = 0; i < response.prefixes.length; i++) { items[i] = BucketEntry._directory(response.prefixes[i]); } prefixes = response.prefixes.length; } if (response.items != null) { - for (int i = 0; i < response.items.length; i++) { + for (var i = 0; i < response.items.length; i++) { items[prefixes + i] = BucketEntry._object(response.items[i].name); } } } + @override bool get isLast => _nextPageToken == null; + @override Future> next({int pageSize}) { if (isLast) return Future.value(null); - if (pageSize == null) pageSize = this._pageSize; + pageSize ??= _pageSize; return _bucket ._listObjects(_bucket.bucketName, _prefix, _DIRECTORY_DELIMITER, @@ -347,7 +371,9 @@ class _ObjectPageImpl implements Page { } class _ObjectGenerationImpl implements ObjectGeneration { + @override final String objectGeneration; + @override final int metaGeneration; _ObjectGenerationImpl(this.objectGeneration, this.metaGeneration); @@ -363,37 +389,42 @@ class _ObjectInfoImpl implements ObjectInfo { : _object = object, _metadata = _ObjectMetadata._(object); + @override String get name => _object.name; + @override int get length => int.parse(_object.size); + @override DateTime get updated => _object.updated; + @override String get etag => _object.etag; + @override List get md5Hash => base64.decode(_object.md5Hash); + @override int get crc32CChecksum { var list = base64.decode(_object.crc32c); return (list[3] << 24) | (list[2] << 16) | (list[1] << 8) | list[0]; } + @override Uri get downloadLink { - if (_downloadLink == null) { - _downloadLink = Uri.parse(_object.mediaLink); - } + _downloadLink ??= Uri.parse(_object.mediaLink); return _downloadLink; } + @override ObjectGeneration get generation { - if (_generation == null) { - _generation = _ObjectGenerationImpl( - _object.generation, int.parse(_object.metageneration)); - } + _generation ??= _ObjectGenerationImpl( + _object.generation, int.parse(_object.metageneration)); return _generation; } /// Additional metadata. + @override ObjectMetadata get metadata => _metadata; } @@ -423,39 +454,41 @@ class _ObjectMetadata implements ObjectMetadata { _ObjectMetadata._(this._object); + @override Acl get acl { - if (_cachedAcl == null) { - _cachedAcl = Acl._fromObjectAcl(_object); - } + _cachedAcl ??= Acl._fromObjectAcl(_object); return _cachedAcl; } + @override String get contentType => _object.contentType; + @override String get contentEncoding => _object.contentEncoding; + @override String get cacheControl => _object.cacheControl; + @override String get contentDisposition => _object.contentDisposition; + @override String get contentLanguage => _object.contentLanguage; ObjectGeneration get generation { - if (_cachedGeneration == null) { - _cachedGeneration = ObjectGeneration( - _object.generation, int.parse(_object.metageneration)); - } + _cachedGeneration ??= + ObjectGeneration(_object.generation, int.parse(_object.metageneration)); return _cachedGeneration; } + @override Map get custom { if (_object.metadata == null) return null; - if (_cachedCustom == null) { - _cachedCustom = UnmodifiableMapView(_object.metadata); - } + _cachedCustom ??= UnmodifiableMapView(_object.metadata); return _cachedCustom; } + @override ObjectMetadata replace( {Acl acl, String contentType, @@ -465,16 +498,12 @@ class _ObjectMetadata implements ObjectMetadata { String contentLanguage, Map custom}) { return _ObjectMetadata( - acl: acl != null ? acl : this.acl, - contentType: contentType != null ? contentType : this.contentType, - contentEncoding: - contentEncoding != null ? contentEncoding : this.contentEncoding, - cacheControl: cacheControl != null ? cacheControl : this.cacheControl, - contentDisposition: contentDisposition != null - ? contentDisposition - : this.contentEncoding, - contentLanguage: - contentLanguage != null ? contentLanguage : this.contentEncoding, + acl: acl ?? this.acl, + contentType: contentType ?? this.contentType, + contentEncoding: contentEncoding ?? this.contentEncoding, + cacheControl: cacheControl ?? this.cacheControl, + contentDisposition: contentDisposition ?? this.contentEncoding, + contentLanguage: contentLanguage ?? this.contentEncoding, custom: custom != null ? Map.from(custom) : this.custom); } } @@ -492,7 +521,7 @@ class _MediaUploadStreamSink implements StreamSink> { final int _length; final int _maxNormalUploadLength; int _bufferLength = 0; - final List> buffer = List>(); + final List> buffer = >[]; final _controller = StreamController>(sync: true); StreamSubscription _subscription; StreamController> _resumableController; @@ -524,26 +553,31 @@ class _MediaUploadStreamSink implements StreamSink> { } } + @override void add(List event) { _controller.add(event); } + @override void addError(errorEvent, [StackTrace stackTrace]) { _controller.addError(errorEvent, stackTrace); } + @override Future addStream(Stream> stream) { return _controller.addStream(stream); } + @override Future close() { _controller.close(); return _doneCompleter.future; } + @override Future get done => _doneCompleter.future; - _onData(List data) { + void _onData(List data) { assert(_state != _STATE_LENGTH_KNOWN); if (_state == _STATE_PROBING_LENGTH) { buffer.add(data); @@ -562,7 +596,7 @@ class _MediaUploadStreamSink implements StreamSink> { } } - _onDone() { + void _onDone() { if (_state == _STATE_PROBING_LENGTH) { // As the data is already cached don't bother to wait on somebody // listening on the stream before adding the data. @@ -572,7 +606,7 @@ class _MediaUploadStreamSink implements StreamSink> { } } - _onError(e, StackTrace s) { + void _onError(e, StackTrace s) { // If still deciding on the strategy complete with error. Otherwise // forward the error for default processing. if (_state == _STATE_PROBING_LENGTH) { @@ -582,7 +616,7 @@ class _MediaUploadStreamSink implements StreamSink> { } } - _completeError(e, StackTrace s) { + void _completeError(e, StackTrace s) { if (_state != _STATE_LENGTH_KNOWN) { // Always cancel subscription on error. _subscription.cancel(); @@ -591,9 +625,7 @@ class _MediaUploadStreamSink implements StreamSink> { } void _startNormalUpload(Stream> stream, int length) { - var contentType = _object.contentType != null - ? _object.contentType - : 'application/octet-stream'; + var contentType = _object.contentType ?? 'application/octet-stream'; var media = storage_api.Media(stream, length, contentType: contentType); _api.objects .insert(_object, _bucketName, @@ -607,9 +639,7 @@ class _MediaUploadStreamSink implements StreamSink> { } void _startResumableUpload(Stream> stream, int length) { - var contentType = _object.contentType != null - ? _object.contentType - : 'application/octet-stream'; + var contentType = _object.contentType ?? 'application/octet-stream'; var media = storage_api.Media(stream, length, contentType: contentType); _api.objects .insert(_object, _bucketName, diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index 9ab01ce0..27b3a86f 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -89,9 +89,9 @@ void registerStorageService(Storage storage) { int _jenkinsHash(List e) { const _HASH_MASK = 0x3fffffff; - int hash = 0; - for (int i = 0; i < e.length; i++) { - int c = e[i].hashCode; + var hash = 0; + for (var i = 0; i < e.length; i++) { + var c = e[i].hashCode; hash = (hash + c) & _HASH_MASK; hash = (hash + (hash << 10)) & _HASH_MASK; hash ^= (hash >> 6); @@ -121,7 +121,7 @@ class Acl { Acl._fromBucketAcl(storage_api.Bucket bucket) : _entries = List(bucket.acl == null ? 0 : bucket.acl.length) { if (bucket.acl != null) { - for (int i = 0; i < bucket.acl.length; i++) { + for (var i = 0; i < bucket.acl.length; i++) { _entries[i] = AclEntry(_aclScopeFromEntity(bucket.acl[i].entity), _aclPermissionFromRole(bucket.acl[i].role)); } @@ -131,7 +131,7 @@ class Acl { Acl._fromObjectAcl(storage_api.Object object) : _entries = List(object.acl == null ? 0 : object.acl.length) { if (object.acl != null) { - for (int i = 0; i < object.acl.length; i++) { + for (var i = 0; i < object.acl.length; i++) { _entries[i] = AclEntry(_aclScopeFromEntity(object.acl[i].entity), _aclPermissionFromRole(object.acl[i].role)); } @@ -140,8 +140,8 @@ class Acl { AclScope _aclScopeFromEntity(String entity) { if (entity.startsWith('user-')) { - String tmp = entity.substring(5); - int at = tmp.indexOf('@'); + var tmp = entity.substring(5); + var at = tmp.indexOf('@'); if (at != -1) { return AccountScope(tmp); } else { @@ -156,8 +156,8 @@ class Acl { } else if (entity.startsWith('allUsers-')) { return AclScope.allUsers; } else if (entity.startsWith('project-')) { - String tmp = entity.substring(8); - int dash = tmp.indexOf('-'); + var tmp = entity.substring(8); + var dash = tmp.indexOf('-'); if (dash != -1) { return ProjectScope(tmp.substring(dash + 1), tmp.substring(0, dash)); } @@ -181,18 +181,16 @@ class Acl { return _entries.map((entry) => entry._toObjectAccessControl()).toList(); } - int get hashCode { - return _cachedHashCode != null - ? _cachedHashCode - : _cachedHashCode = _jenkinsHash(_entries); - } + @override + int get hashCode => _cachedHashCode ??= _jenkinsHash(_entries); + @override bool operator ==(Object other) { if (other is Acl) { List entries = _entries; List otherEntries = other._entries; if (entries.length != otherEntries.length) return false; - for (int i = 0; i < entries.length; i++) { + for (var i = 0; i < entries.length; i++) { if (entries[i] != otherEntries[i]) return false; } return true; @@ -201,6 +199,7 @@ class Acl { } } + @override String toString() => 'Acl($_entries)'; } @@ -228,18 +227,17 @@ class AclEntry { return acl; } - int get hashCode { - return _cachedHashCode != null - ? _cachedHashCode - : _cachedHashCode = _jenkinsHash([scope, permission]); - } + @override + int get hashCode => _cachedHashCode ??= _jenkinsHash([scope, permission]); + @override bool operator ==(Object other) { return other is AclEntry && scope == other.scope && permission == other.permission; } + @override String toString() => 'AclEntry($scope, $permission)'; } @@ -297,16 +295,15 @@ abstract class AclScope { AclScope._(this._type, this._id); - int get hashCode { - return _cachedHashCode != null - ? _cachedHashCode - : _cachedHashCode = _jenkinsHash([_type, _id]); - } + @override + int get hashCode => _cachedHashCode ??= _jenkinsHash([_type, _id]); + @override bool operator ==(Object other) { return other is AclScope && _type == other._type && _id == other._id; } + @override String toString() => 'AclScope($_storageEntity)'; String get _storageEntity; @@ -323,6 +320,7 @@ class StorageIdScope extends AclScope { /// Google Storage ID. String get storageId => _id; + @override String get _storageEntity => 'user-$_id'; } @@ -333,6 +331,7 @@ class AccountScope extends AclScope { /// Email address. String get email => _id; + @override String get _storageEntity => 'user-$_id'; } @@ -343,6 +342,7 @@ class GroupScope extends AclScope { /// Group name. String get group => _id; + @override String get _storageEntity => 'group-$_id'; } @@ -353,6 +353,7 @@ class DomainScope extends AclScope { /// Domain name. String get domain => _id; + @override String get _storageEntity => 'domain-$_id'; } @@ -369,6 +370,7 @@ class ProjectScope extends AclScope { /// Project ID. String get project => _id; + @override String get _storageEntity => 'project-$role-$_id'; } @@ -376,6 +378,7 @@ class ProjectScope extends AclScope { class OpaqueScope extends AclScope { OpaqueScope(String id) : super._(AclScope._TYPE_OPAQUE, id); + @override String get _storageEntity => _id; } @@ -383,6 +386,7 @@ class OpaqueScope extends AclScope { class AllAuthenticatedScope extends AclScope { AllAuthenticatedScope() : super._(AclScope._TYPE_ALL_AUTHENTICATED, null); + @override String get _storageEntity => 'allAuthenticatedUsers'; } @@ -390,6 +394,7 @@ class AllAuthenticatedScope extends AclScope { class AllUsersScope extends AclScope { AllUsersScope() : super._(AclScope._TYPE_ALL_USERS, null); + @override String get _storageEntity => 'allUsers'; } @@ -416,12 +421,15 @@ class AclPermission { String get _storageObjectRole => this == WRITE ? FULL_CONTROL._id : _id; + @override int get hashCode => _id.hashCode; + @override bool operator ==(Object other) { return other is AclPermission && _id == other._id; } + @override String toString() => 'AclPermission($_id)'; } @@ -469,6 +477,7 @@ class PredefinedAcl { static const PredefinedAcl bucketOwnerRead = PredefinedAcl._('bucketOwnerRead'); + @override String toString() => 'PredefinedAcl($_name)'; } diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index ace53a65..805c007c 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,12 +1,12 @@ name: gcloud -version: 0.6.3 +version: 0.6.4-dev author: Dart Team description: | High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. homepage: https://github.com/dart-lang/gcloud environment: - sdk: '>=2.0.0 <3.0.0' + sdk: '>=2.3.0 <3.0.0' dependencies: _discoveryapis_commons: ^0.1.6+1 diff --git a/pkgs/gcloud/test/common.dart b/pkgs/gcloud/test/common.dart index 3b37c548..538f5ad5 100644 --- a/pkgs/gcloud/test/common.dart +++ b/pkgs/gcloud/test/common.dart @@ -18,7 +18,7 @@ const RESPONSE_HEADERS = {'content-type': CONTENT_TYPE_JSON_UTF8}; class MockClient extends http.BaseClient { static const bytes = [1, 2, 3, 4, 5]; - final _bytesHeaderRegexp = RegExp(r"bytes=(\d+)-(\d+)"); + final _bytesHeaderRegexp = RegExp(r'bytes=(\d+)-(\d+)'); final String hostname; final String rootPath; @@ -36,7 +36,7 @@ class MockClient extends http.BaseClient { void register( String method, Pattern path, http_testing.MockClientHandler handler) { - var map = mocks.putIfAbsent(method, () => Map()); + var map = mocks.putIfAbsent(method, () => {}); if (path is RegExp) { map[RegExp('$rootPath${path.pattern}')] = handler; } else { @@ -46,13 +46,13 @@ class MockClient extends http.BaseClient { void registerUpload( String method, Pattern path, http_testing.MockClientHandler handler) { - var map = mocks.putIfAbsent(method, () => Map()); + var map = mocks.putIfAbsent(method, () => {}); map['/upload$rootPath$path'] = handler; } void registerResumableUpload( String method, Pattern path, http_testing.MockClientHandler handler) { - var map = mocks.putIfAbsent(method, () => Map()); + var map = mocks.putIfAbsent(method, () => {}); map['/resumable/upload$rootPath$path'] = handler; } @@ -81,6 +81,7 @@ class MockClient extends http.BaseClient { return mockHandler(request); } + @override Future send(http.BaseRequest request) { return client.send(request); } @@ -194,6 +195,7 @@ class TraceClient extends http.BaseClient { TraceClient(this.client); + @override Future send(http.BaseRequest request) { print(request); return request.finalize().toBytes().then((body) { @@ -215,6 +217,7 @@ class TraceClient extends http.BaseClient { }); } + @override void close() { client.close(); } @@ -226,6 +229,7 @@ class RequestImpl extends http.BaseRequest { RequestImpl(String method, Uri url, this._body) : super(method, url); + @override http.ByteStream finalize() { super.finalize(); return http.ByteStream.fromBytes(_body); diff --git a/pkgs/gcloud/test/common_e2e.dart b/pkgs/gcloud/test/common_e2e.dart index a4e70fd2..93a86033 100644 --- a/pkgs/gcloud/test/common_e2e.dart +++ b/pkgs/gcloud/test/common_e2e.dart @@ -72,8 +72,8 @@ typedef AuthCallback = Future Function(String project, http.Client client); Future withAuthClient(List scopes, AuthCallback callback, {bool trace = false}) { - String project = Platform.environment[PROJECT_ENV]; - String serviceKeyLocation = Platform.environment[SERVICE_KEY_LOCATION_ENV]; + var project = Platform.environment[PROJECT_ENV]; + var serviceKeyLocation = Platform.environment[SERVICE_KEY_LOCATION_ENV]; if (!onBot() && (project == null || serviceKeyLocation == null)) { throw StateError( @@ -81,9 +81,8 @@ Future withAuthClient(List scopes, AuthCallback callback, 'required when not running on the package bot'); } - project = project != null ? project : DEFAULT_PROJECT; - serviceKeyLocation = - serviceKeyLocation != null ? serviceKeyLocation : DEFAULT_KEY_LOCATION; + project = project ?? DEFAULT_PROJECT; + serviceKeyLocation = serviceKeyLocation ?? DEFAULT_KEY_LOCATION; return serviceKeyJson(serviceKeyLocation).then((keyJson) { var creds = auth.ServiceAccountCredentials.fromJson(keyJson); diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index 6646af4d..af5bc42a 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -49,9 +49,9 @@ Future> consumePages(FirstPageProvider provider) { } void runTests(Datastore datastore, String namespace) { - Partition partition = Partition(namespace); + var partition = Partition(namespace); - Future withTransaction(FutureOr f(Transaction t), + Future withTransaction(FutureOr Function(Transaction t) f, {bool xg = false}) { return datastore.beginTransaction(crossEntityGroup: xg).then(f); } @@ -125,7 +125,7 @@ void runTests(Datastore datastore, String namespace) { bool compareKey(Key a, Key b, {bool ignoreIds = false}) { if (a.partition != b.partition) return false; if (a.elements.length != b.elements.length) return false; - for (int i = 0; i < a.elements.length; i++) { + for (var i = 0; i < a.elements.length; i++) { if (a.elements[i].kind != b.elements[i].kind) return false; if (!ignoreIds && a.elements[i].id != b.elements[i].id) return false; } @@ -194,9 +194,9 @@ void runTests(Datastore datastore, String namespace) { return test(null); } - Future> testInsertNegative(List entities, + FutureOr testInsertNegative(List entities, {bool transactional = false, bool xg = false}) { - test(Transaction transaction) { + void test(Transaction transaction) { expect( datastore.commit( autoIdInserts: entities, transaction: transaction), @@ -206,7 +206,7 @@ void runTests(Datastore datastore, String namespace) { if (transactional) { return withTransaction(test, xg: xg); } - return test(null); + test(null); } var unnamedEntities1 = buildEntities(42, 43, partition: partition); @@ -268,18 +268,18 @@ void runTests(Datastore datastore, String namespace) { group('allocate_ids', () { test('allocate_ids_query', () { - compareResult(List keys, List completedKeys) { + void compareResult(List keys, List completedKeys) { expect(completedKeys.length, equals(keys.length)); - for (int i = 0; i < keys.length; i++) { + for (var i = 0; i < keys.length; i++) { var insertedKey = keys[i]; var completedKey = completedKeys[i]; expect(completedKey.elements.length, equals(insertedKey.elements.length)); - for (int j = 0; j < insertedKey.elements.length - 1; j++) { + for (var j = 0; j < insertedKey.elements.length - 1; j++) { expect(completedKey.elements[j], equals(insertedKey.elements[j])); } - for (int j = insertedKey.elements.length - 1; + for (var j = insertedKey.elements.length - 1; j < insertedKey.elements.length; j++) { expect(completedKey.elements[j].kind, @@ -317,7 +317,7 @@ void runTests(Datastore datastore, String namespace) { return datastore.lookup(keysToLookup).then((List entities) { expect(entities.length, equals(keysToLookup.length)); if (negative) { - for (int i = 0; i < entities.length; i++) { + for (var i = 0; i < entities.length; i++) { expect(entities[i], isNull); } } else { @@ -472,9 +472,9 @@ void runTests(Datastore datastore, String namespace) { } var namedEntities1 = - buildEntities(42, 43, idFunction: (i) => "i$i", partition: partition); + buildEntities(42, 43, idFunction: (i) => 'i$i', partition: partition); var namedEntities5 = - buildEntities(1, 6, idFunction: (i) => "i$i", partition: partition); + buildEntities(1, 6, idFunction: (i) => 'i$i', partition: partition); var namedEntities1Keys = namedEntities1.map((e) => e.key).toList(); var namedEntities5Keys = namedEntities5.map((e) => e.key).toList(); @@ -507,11 +507,11 @@ void runTests(Datastore datastore, String namespace) { } var namedEntities1 = - buildEntities(42, 43, idFunction: (i) => "i$i", partition: partition); + buildEntities(42, 43, idFunction: (i) => 'i$i', partition: partition); var namedEntities5 = - buildEntities(1, 6, idFunction: (i) => "i$i", partition: partition); + buildEntities(1, 6, idFunction: (i) => 'i$i', partition: partition); var namedEntities20 = - buildEntities(6, 26, idFunction: (i) => "i$i", partition: partition); + buildEntities(6, 26, idFunction: (i) => 'i$i', partition: partition); var namedEntities1Keys = namedEntities1.map((e) => e.key).toList(); var namedEntities5Keys = namedEntities5.map((e) => e.key).toList(); @@ -545,11 +545,11 @@ void runTests(Datastore datastore, String namespace) { Future test(List entities, Transaction transaction, value) { // Change entities: var changedEntities = List(entities.length); - for (int i = 0; i < entities.length; i++) { + for (var i = 0; i < entities.length; i++) { var entity = entities[i]; var newProperties = Map.from(entity.properties); for (var prop in newProperties.keys) { - newProperties[prop] = "${newProperties[prop]}conflict$value"; + newProperties[prop] = '${newProperties[prop]}conflict$value'; } changedEntities[i] = Entity(entity.key, newProperties); } @@ -589,9 +589,9 @@ void runTests(Datastore datastore, String namespace) { } var namedEntities1 = - buildEntities(42, 43, idFunction: (i) => "i$i", partition: partition); + buildEntities(42, 43, idFunction: (i) => 'i$i', partition: partition); var namedEntities5 = - buildEntities(1, 6, idFunction: (i) => "i$i", partition: partition); + buildEntities(1, 6, idFunction: (i) => 'i$i', partition: partition); test('conflicting_transaction', () { expect(testConflictingTransaction(namedEntities1), @@ -656,13 +656,13 @@ void runTests(Datastore datastore, String namespace) { expect(entities.length, equals(expectedEntities.length)); if (correctOrder) { - for (int i = 0; i < entities.length; i++) { + for (var i = 0; i < entities.length; i++) { expect(compareEntity(entities[i], expectedEntities[i]), isTrue); } } else { - for (int i = 0; i < entities.length; i++) { - bool found = false; - for (int j = 0; j < expectedEntities.length; j++) { + for (var i = 0; i < entities.length; i++) { + var found = false; + for (var j = 0; j < expectedEntities.length; j++) { if (compareEntity(entities[i], expectedEntities[i])) { found = true; } @@ -677,11 +677,11 @@ void runTests(Datastore datastore, String namespace) { {List orders, bool transactional = false, bool xg = false}) { // We query for all subsets of expectedEntities // NOTE: This is O(0.5 * n^2) queries, but n is currently only 6. - List queryTests = []; - for (int start = 0; start < expectedEntities.length; start++) { - for (int end = start; end < expectedEntities.length; end++) { - int offset = start; - int limit = end - start; + var queryTests = []; + for (var start = 0; start < expectedEntities.length; start++) { + for (var end = start; end < expectedEntities.length; end++) { + var offset = start; + var limit = end - start; var entities = expectedEntities.sublist(offset, offset + limit); queryTests.add(() { return testQueryAndCompare(kind, entities, @@ -714,8 +714,8 @@ void runTests(Datastore datastore, String namespace) { var stringNamedKeys = stringNamedEntities.map((e) => e.key).toList(); var QUERY_KEY = TEST_PROPERTY_KEY_PREFIX; - var QUERY_UPPER_BOUND = "${TEST_PROPERTY_VALUE_PREFIX}4"; - var QUERY_LOWER_BOUND = "${TEST_PROPERTY_VALUE_PREFIX}1"; + var QUERY_UPPER_BOUND = '${TEST_PROPERTY_VALUE_PREFIX}4'; + var QUERY_LOWER_BOUND = '${TEST_PROPERTY_VALUE_PREFIX}1'; var QUERY_LIST_ENTRY = '${TEST_LIST_VALUE}2'; var QUERY_INDEX_VALUE = '${TEST_INDEXED_PROPERTY_VALUE_PREFIX}1'; @@ -1039,7 +1039,7 @@ Future cleanupDB(Datastore db, String namespace) { } // cleanup() will call itself again as long as the DB is not clean. - cleanup(String namespace, String kind) { + Future cleanup(String namespace, String kind) { var partition = Partition(namespace); var q = Query(kind: kind, limit: 500); return consumePages((_) => db.query(q, partition: partition)) @@ -1078,7 +1078,7 @@ Future waitUntilEntitiesHelper( var q = Query(kind: kind); return consumePages((_) => db.query(q, partition: p)).then((entities) { for (var key in keys) { - bool found = false; + var found = false; for (var entity in entities) { if (key == entity.key) found = true; } diff --git a/pkgs/gcloud/test/datastore/e2e/utils.dart b/pkgs/gcloud/test/datastore/e2e/utils.dart index 2a42e4af..1b5106a0 100644 --- a/pkgs/gcloud/test/datastore/e2e/utils.dart +++ b/pkgs/gcloud/test/datastore/e2e/utils.dart @@ -51,7 +51,7 @@ List buildKeys(int from, int to, List buildEntities(int from, int to, {Function idFunction, String kind = TEST_KIND, Partition partition}) { var entities = []; - var unIndexedProperties = Set(); + var unIndexedProperties = {}; for (var i = from; i < to; i++) { var key = buildKey(i, idFunction: idFunction, kind: kind, p: partition); var properties = buildProperties(i); @@ -65,7 +65,7 @@ List buildEntities(int from, int to, List buildEntityWithAllProperties(int from, int to, {String kind = TEST_KIND, Partition partition}) { var us42 = const Duration(microseconds: 42); - var unIndexed = Set.from(['blobProperty']); + var unIndexed = {'blobProperty'}; Map buildProperties(int i) { return { diff --git a/pkgs/gcloud/test/db/db_test.dart b/pkgs/gcloud/test/db/db_test.dart index b9fd736e..e0f503ec 100644 --- a/pkgs/gcloud/test/db/db_test.dart +++ b/pkgs/gcloud/test/db/db_test.dart @@ -13,7 +13,7 @@ import 'package:test/test.dart'; @Kind() class Foobar extends Model {} -main() { +void main() { group('db', () { test('default-partition', () { var db = DatastoreDB(null); diff --git a/pkgs/gcloud/test/db/e2e/db_test_impl.dart b/pkgs/gcloud/test/db/e2e/db_test_impl.dart index 66c7b1c4..3770998a 100644 --- a/pkgs/gcloud/test/db/e2e/db_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/db_test_impl.dart @@ -65,7 +65,8 @@ class Person extends db.Model { @db.ModelKeyProperty(propertyName: 'mangledWife') db.Key wife; - operator ==(Object other) => sameAs(other); + @override + bool operator ==(Object other) => sameAs(other); bool sameAs(Object other) { return other is Person && @@ -76,6 +77,7 @@ class Person extends db.Model { wife == other.wife; } + @override String toString() => 'Person(id: $id, name: $name, age: $age)'; } @@ -87,12 +89,13 @@ class User extends Person { @db.StringListProperty(propertyName: 'language') List languages = const []; - sameAs(Object other) { + @override + bool sameAs(Object other) { if (!(super.sameAs(other) && other is User && nickname == other.nickname)) { return false; } - User user = other as User; + var user = other as User; if (languages == null) { if (user.languages == null) return true; return false; @@ -101,7 +104,7 @@ class User extends Person { return false; } - for (int i = 0; i < languages.length; i++) { + for (var i = 0; i < languages.length; i++) { if (languages[i] != user.languages[i]) { return false; } @@ -109,6 +112,7 @@ class User extends Person { return true; } + @override String toString() => 'User(${super.toString()}, nickname: $nickname, languages: $languages'; } @@ -121,7 +125,8 @@ class ExpandoPerson extends db.ExpandoModel { @db.StringProperty(propertyName: 'NN') String nickname; - operator ==(Object other) { + @override + bool operator ==(Object other) { if (other is ExpandoPerson && id == other.id && name == other.name) { if (additionalProperties.length != other.additionalProperties.length) { return false; @@ -148,7 +153,7 @@ void runTests(db.DatastoreDB store, String namespace) { if (anyOrder) { // Do expensive O(n^2) search. for (var searchModel in expectedModels) { - bool found = false; + var found = false; for (var m in models) { if (m == searchModel) { found = true; @@ -307,7 +312,7 @@ void runTests(db.DatastoreDB store, String namespace) { persons[0].parentKey = users[0].key; users[1].parentKey = persons[1].key; - return testInsertLookupDelete([]..addAll(users)..addAll(persons)); + return testInsertLookupDelete([...users, ...persons]); }); test('auto_ids', () { @@ -449,7 +454,7 @@ void runTests(db.DatastoreDB store, String namespace) { .where((User u) => u.wife == root.append(User, id: 42 + 3)) .toList(); - var allInserts = []..addAll(users)..addAll(expandoPersons); + var allInserts = [...users, ...expandoPersons]; var allKeys = allInserts.map((db.Model model) => model.key).toList(); return store.commit(inserts: allInserts).then((_) { return Future.wait([ @@ -629,23 +634,23 @@ void runTests(db.DatastoreDB store, String namespace) { Future> runQueryWithExponentialBackoff( db.Query query, int expectedResults) async { - for (int i = 0; i <= 6; i++) { + for (var i = 0; i <= 6; i++) { if (i > 0) { // Wait for 0.1s, 0.2s, ..., 12.8s var duration = Duration(milliseconds: 100 * (2 << i)); - print("Running query did return less results than expected." - "Using exponential backoff: Sleeping for $duration."); + print('Running query did return less results than expected.' + 'Using exponential backoff: Sleeping for $duration.'); await sleep(duration); } - List models = await query.run().toList(); + var models = await query.run().toList(); if (models.length >= expectedResults) { return models; } } throw Exception( - "Tried running a query with exponential backoff, giving up now."); + 'Tried running a query with exponential backoff, giving up now.'); } Future waitUntilEntitiesReady( @@ -664,13 +669,13 @@ Future waitUntilEntitiesHelper( bool positive, db.Partition partition, ) async { - bool done = false; + var done = false; while (!done) { final models = await mdb.query(partition: partition).run().toList(); done = true; for (var key in keys) { - bool found = false; + var found = false; for (var model in models) { if (key == model.key) found = true; } diff --git a/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart index 1a576019..11a669fe 100644 --- a/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart @@ -14,12 +14,12 @@ import 'package:gcloud/db.dart' as db; import 'package:gcloud/db/metamodel.dart'; List buildEntitiesWithDifferentNamespaces() { - newKey(String namespace, String kind, int id) { + Key newKey(String namespace, String kind, int id) { var partition = Partition(namespace); return Key([KeyElement(kind, id)], partition: partition); } - newEntity(String namespace, String kind, {int id = 1}) { + Entity newEntity(String namespace, String kind, {int id = 1}) { return Entity(newKey(namespace, kind, id), {'ping': 'pong'}); } diff --git a/pkgs/gcloud/test/db/model_db_test.dart b/pkgs/gcloud/test/db/model_db_test.dart index 5b2e403d..8d798717 100644 --- a/pkgs/gcloud/test/db/model_db_test.dart +++ b/pkgs/gcloud/test/db/model_db_test.dart @@ -15,12 +15,12 @@ import 'model_dbs/duplicate_property.dart' as test2; import 'model_dbs/multiple_annotations.dart' as test3; import 'model_dbs/no_default_constructor.dart' as test5; -main() { +void main() { // These unused imports make sure that [ModelDBImpl.fromLibrary()] will find // all the Model/ModelDescription classes. assert([test1.A, test2.A, test3.A, test4.A, test5.A] != null); - newModelDB(Symbol symbol) => ModelDBImpl.fromLibrary(symbol); + ModelDBImpl newModelDB(Symbol symbol) => ModelDBImpl.fromLibrary(symbol); group('model_db', () { group('from_library', () { diff --git a/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart b/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart index 33b6bce0..6cd0cde8 100644 --- a/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart +++ b/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart @@ -14,6 +14,7 @@ class A extends db.Model { @db.Kind() class B extends A { + @override @db.IntProperty(propertyName: 'bar') int foo; } diff --git a/pkgs/gcloud/test/db/properties_test.dart b/pkgs/gcloud/test/db/properties_test.dart index dbc7f028..28bad1cf 100644 --- a/pkgs/gcloud/test/db/properties_test.dart +++ b/pkgs/gcloud/test/db/properties_test.dart @@ -10,7 +10,7 @@ import 'package:gcloud/db.dart'; import 'package:gcloud/datastore.dart' as datastore; import 'package:test/test.dart'; -main() { +void main() { group('properties', () { test('bool_property', () { var prop = const BoolProperty(required: true); @@ -180,8 +180,10 @@ main() { class Custom { String customValue; + @override int get hashCode => customValue.hashCode; + @override bool operator ==(other) { return other is Custom && other.customValue == customValue; } @@ -191,16 +193,19 @@ class CustomProperty extends StringProperty { const CustomProperty( {String propertyName, bool required = false, bool indexed = true}); + @override bool validate(ModelDB db, Object value) { if (required && value == null) return false; return value == null || value is Custom; } + @override Object decodePrimitiveValue(ModelDB db, Object value) { if (value == null) return null; return Custom()..customValue = value as String; } + @override Object encodeValue(ModelDB db, Object value, {bool forComparison = false}) { if (value == null) return null; return (value as Custom).customValue; @@ -212,13 +217,20 @@ class KeyMock implements Key { KeyMock(this._datastoreKey); + @override Object id = 1; + @override Type type; + @override Key get parent => this; + @override bool get isEmpty => false; + @override Partition get partition => null; datastore.Key get datastoreKey => _datastoreKey; + @override Key append(Type modelType, {Object id}) => null; + @override int get hashCode => 1; } @@ -227,25 +239,32 @@ class ModelDBMock implements ModelDB { final Key _dbKey; ModelDBMock(this._datastoreKey, this._dbKey); + @override Key fromDatastoreKey(datastore.Key datastoreKey) { if (!identical(_datastoreKey, datastoreKey)) { - throw "Broken test"; + throw 'Broken test'; } return _dbKey; } + @override datastore.Key toDatastoreKey(Key key) { if (!identical(_dbKey, key)) { - throw "Broken test"; + throw 'Broken test'; } return _datastoreKey; } Map propertiesForModel(modelDescription) => null; + @override T fromDatastoreEntity(datastore.Entity entity) => null; + @override datastore.Entity toDatastoreEntity(Model model) => null; + @override String fieldNameToPropertyName(String kind, String fieldName) => null; + @override String kindName(Type type) => null; + @override Object toDatastoreValue(String kind, String fieldName, Object value, {bool forComparison = false}) => null; diff --git a/pkgs/gcloud/test/db_all_e2e_test.dart b/pkgs/gcloud/test/db_all_e2e_test.dart index afc7f8fd..d6809f2c 100644 --- a/pkgs/gcloud/test/db_all_e2e_test.dart +++ b/pkgs/gcloud/test/db_all_e2e_test.dart @@ -2,7 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -@Tags(["e2e"]) +@Tags(['e2e']) library gcloud.test.db_all_test; @@ -22,7 +22,7 @@ import 'db/e2e/metamodel_test_impl.dart' as db_metamodel_test; Future main() async { var scopes = datastore_impl.DatastoreImpl.SCOPES; var now = DateTime.now().millisecondsSinceEpoch; - String namespace = '${Platform.operatingSystem}$now'; + var namespace = '${Platform.operatingSystem}$now'; datastore_impl.DatastoreImpl datastore; db.DatastoreDB datastoreDB; diff --git a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart index f99c0af2..44e2d095 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart @@ -2,7 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -@Tags(["e2e"]) +@Tags(['e2e']) import 'package:gcloud/pubsub.dart'; import 'package:http/http.dart'; @@ -30,8 +30,8 @@ void main() { }); tearDownAll(() async { - bool leftovers = false; - bool cleanupErrors = false; + var leftovers = false; + var cleanupErrors = false; print('checking for leftover subscriptions'); try { @@ -115,11 +115,11 @@ void main() { }); test('create-list-delete', () async { - const int count = 5; + const count = 5; var topicPrefix = generateTopicName(); - name(i) => '$topicPrefix-$i'; + String name(i) => '$topicPrefix-$i'; for (var i = 0; i < count; i++) { await pubsub.createTopic(name(i)); @@ -152,13 +152,13 @@ void main() { }); test('create-list-delete', () async { - const int count = 5; + const count = 5; var topicName = generateTopicName(); await pubsub.createTopic(topicName); var subscriptionPrefix = generateSubscriptionName(); - name(i) => '$subscriptionPrefix-$i'; + String name(i) => '$subscriptionPrefix-$i'; for (var i = 0; i < count; i++) { await pubsub.createSubscription(name(i), topicName); diff --git a/pkgs/gcloud/test/pubsub/pubsub_test.dart b/pkgs/gcloud/test/pubsub/pubsub_test.dart index 776ce387..546c736e 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_test.dart @@ -20,7 +20,7 @@ const String ROOT_PATH = '/v1/'; MockClient mockClient() => MockClient(HOSTNAME, ROOT_PATH); -main() { +void main() { group('api', () { var badTopicNames = [ 'projects/', @@ -141,25 +141,28 @@ main() { }); group('query', () { - addTopics(pubsub.ListTopicsResponse response, int first, int count) { + void addTopics( + pubsub.ListTopicsResponse response, int first, int count) { response.topics = []; - for (int i = 0; i < count; i++) { + for (var i = 0; i < count; i++) { response.topics.add(pubsub.Topic()..name = 'topic-${first + i}'); } } // Mock that expect/generates [n] topics in pages of page size // [pageSize]. - registerQueryMock(MockClient mock, int n, int pageSize, - [int totalCalls]) { - int totalPages = (n + pageSize - 1) ~/ pageSize; + void registerQueryMock( + MockClient mock, + int n, + int pageSize, [ + int totalCalls, + ]) { + var totalPages = (n + pageSize - 1) ~/ pageSize; // No items still generate one request. if (totalPages == 0) totalPages = 1; // Can pass in total calls if this mock is overwritten before all // expected pages are done, e.g. when testing errors. - if (totalCalls == null) { - totalCalls = totalPages; - } + totalCalls ??= totalPages; var pageCount = 0; mock.register( 'GET', @@ -269,7 +272,7 @@ main() { }); test('error', () { - runTest(bool withPause) { + void runTest(bool withPause) { // Test error on first GET request. var mock = mockClient(); mock.register('GET', 'projects/$PROJECT/topics', @@ -300,7 +303,7 @@ main() { var api = PubSub(mock, PROJECT); - int count = 0; + var count = 0; var subscription; subscription = api.listTopics().listen( expectAsync1(((_) { @@ -370,7 +373,7 @@ main() { }); test('multiple', () { - runTest(int n, int pageSize) { + Future runTest(int n, int pageSize) { var totalPages = (n + pageSize - 1) ~/ pageSize; var pageCount = 0; @@ -378,7 +381,7 @@ main() { var mock = mockClient(); registerQueryMock(mock, n, pageSize); - handlePage(Page page) { + void handlePage(Page page) { pageCount++; expect(page.isLast, pageCount == totalPages); expect(page.items.length, @@ -528,10 +531,10 @@ main() { }); group('query', () { - addSubscriptions( + void addSubscriptions( pubsub.ListSubscriptionsResponse response, int first, int count) { response.subscriptions = []; - for (int i = 0; i < count; i++) { + for (var i = 0; i < count; i++) { response.subscriptions .add(pubsub.Subscription()..name = 'subscription-${first + i}'); } @@ -539,16 +542,14 @@ main() { // Mock that expect/generates [n] subscriptions in pages of page size // [pageSize]. - registerQueryMock(MockClient mock, int n, int pageSize, + void registerQueryMock(MockClient mock, int n, int pageSize, {String topic, int totalCalls}) { var totalPages = (n + pageSize - 1) ~/ pageSize; // No items still generate one request. if (totalPages == 0) totalPages = 1; // Can pass in total calls if this mock is overwritten before all // expected pages are done, e.g. when testing errors. - if (totalCalls == null) { - totalCalls = totalPages; - } + totalCalls ??= totalPages; var pageCount = 0; mock.register( 'GET', @@ -670,7 +671,7 @@ main() { }); test('error', () { - runTest(bool withPause) { + void runTest(bool withPause) { // Test error on first GET request. var mock = mockClient(); mock.register('GET', 'projects/$PROJECT/subscriptions', @@ -695,14 +696,14 @@ main() { }); test('error-2', () { - runTest(bool withPause) { + void runTest(bool withPause) { // Test error on second GET request. var mock = mockClient(); registerQueryMock(mock, 51, 50, totalCalls: 1); var api = PubSub(mock, PROJECT); - int count = 0; + var count = 0; var subscription; subscription = api.listSubscriptions().listen( expectAsync1(((_) { @@ -731,7 +732,7 @@ main() { }); group('page', () { - emptyTest(String topic) { + Future emptyTest(String topic) { var mock = mockClient(); registerQueryMock(mock, 0, 50, topic: topic); @@ -760,7 +761,7 @@ main() { emptyTest('topic'); }); - singleTest(String topic) { + Future singleTest(String topic) { var mock = mockClient(); registerQueryMock(mock, 10, 50, topic: topic); @@ -789,7 +790,7 @@ main() { singleTest('topic'); }); - multipleTest(int n, int pageSize, String topic) { + Future multipleTest(int n, int pageSize, String topic) { var totalPages = (n + pageSize - 1) ~/ pageSize; var pageCount = 0; @@ -797,7 +798,7 @@ main() { var mock = mockClient(); registerQueryMock(mock, n, pageSize, topic: topic); - handlingPage(Page page) { + void handlingPage(Page page) { pageCount++; expect(page.isLast, pageCount == totalPages); expect(page.items.length, @@ -855,15 +856,18 @@ main() { var messageBase64 = base64.encode(messageBytes); var attributes = {'a': '1', 'b': 'text'}; - registerLookup(MockClient mock) { + void registerLookup(MockClient mock) { mock.register('GET', absoluteName, expectAsync1((request) { expect(request.body.length, 0); return mock.respond(pubsub.Topic()..name = absoluteName); })); } - registerPublish( - MockClient mock, int count, Future fn(request)) { + void registerPublish( + MockClient mock, + int count, + Future Function(pubsub.PublishRequest) fn, + ) { mock.register( 'POST', 'projects/test-project/topics/test-topic:publish', @@ -1023,7 +1027,7 @@ main() { } '''; var event = PushEvent.fromJson(requestBody); - expect(event.message.asString, "Hello, world 30 of 50!"); + expect(event.message.asString, 'Hello, world 30 of 50!'); expect(event.message.attributes['messageNo'], '30'); expect(event.message.attributes['test'], 'hello'); expect(event.subscriptionName, absoluteSubscriptionName); @@ -1049,7 +1053,7 @@ main() { } '''; var event = PushEvent.fromJson(requestBody); - expect(event.message.asString, "Hello, world 30 of 50!"); + expect(event.message.asString, 'Hello, world 30 of 50!'); expect(event.message.attributes['messageNo'], '30'); expect(event.message.attributes['test'], 'hello'); expect(event.subscriptionName, absoluteSubscriptionName); diff --git a/pkgs/gcloud/test/service_scope_test.dart b/pkgs/gcloud/test/service_scope_test.dart index c4da569d..331e2bd9 100644 --- a/pkgs/gcloud/test/service_scope_test.dart +++ b/pkgs/gcloud/test/service_scope_test.dart @@ -9,7 +9,7 @@ import 'dart:async'; import 'package:gcloud/service_scope.dart' as ss; import 'package:test/test.dart'; -main() { +void main() { test('no-service-scope', () { expect(() => ss.register(1, 'foobar'), throwsA(isStateError)); expect( @@ -61,11 +61,11 @@ main() { test('correct-insertion-and-cleanup-order', () { // Ensure cleanup functions are called in the reverse order of inserting // their entries. - int insertions = 0; + var insertions = 0; return ss.fork(expectAsync0(() => Future.value(() { - int NUM = 10; + var NUM = 10; - for (int i = 0; i < NUM; i++) { + for (var i = 0; i < NUM; i++) { var key = i; insertions++; @@ -76,7 +76,7 @@ main() { return null; })); - for (int j = 0; j <= NUM; j++) { + for (var j = 0; j <= NUM; j++) { if (j <= i) { expect(ss.lookup(key), 'value$i'); } else { @@ -120,10 +120,10 @@ main() { // result in an error. // Ensure the fork() error message contains all error messages from the // failed cleanup() calls. - int insertions = 0; + var insertions = 0; return ss .fork(() => Future.sync(() { - for (int i = 0; i < 10; i++) { + for (var i = 0; i < 10; i++) { insertions++; ss.register(i, 'value$i'); ss.registerScopeExitCallback(() { @@ -135,7 +135,7 @@ main() { } })) .catchError(expectAsync2((e, _) { - for (int i = 0; i < 10; i++) { + for (var i = 0; i < 10; i++) { expect('$e'.contains('xx${i}yy'), equals(i.isEven)); } })); @@ -198,8 +198,8 @@ main() { var subKey2 = 4; return ss.fork(expectAsync0(() { - int cleanupFork1 = 0; - int cleanupFork2 = 0; + var cleanupFork1 = 0; + var cleanupFork2 = 0; ss.register(rootKey, 'root'); ss.registerScopeExitCallback(expectAsync0(() { diff --git a/pkgs/gcloud/test/storage/e2e_test.dart b/pkgs/gcloud/test/storage/e2e_test.dart index f0645d68..6b056cb3 100644 --- a/pkgs/gcloud/test/storage/e2e_test.dart +++ b/pkgs/gcloud/test/storage/e2e_test.dart @@ -2,7 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -@Tags(["e2e"]) +@Tags(['e2e']) library gcloud.storage; @@ -110,7 +110,7 @@ void main() { group('object', () { // Run all object tests in the same bucket to try to avoid the rate-limit // for creating and deleting buckets while testing. - Future withTestBucket(Future function(Bucket bucket)) { + Future withTestBucket(Future Function(Bucket bucket) function) { return function(testBucket).whenComplete(() { // TODO: Clean the bucket. }); @@ -189,18 +189,18 @@ void main() { })); } - Acl acl1 = + var acl1 = Acl([AclEntry(AclScope.allAuthenticated, AclPermission.WRITE)]); - Acl acl2 = Acl([ + var acl2 = Acl([ AclEntry(AclScope.allUsers, AclPermission.WRITE), AclEntry(AccountScope('sgjesse@google.com'), AclPermission.WRITE) ]); - Acl acl3 = Acl([ + var acl3 = Acl([ AclEntry(AclScope.allUsers, AclPermission.WRITE), AclEntry(AccountScope('sgjesse@google.com'), AclPermission.WRITE), AclEntry(GroupScope('misc@dartlang.org'), AclPermission.READ) ]); - Acl acl4 = Acl([ + var acl4 = Acl([ AclEntry(AclScope.allUsers, AclPermission.WRITE), AclEntry(AccountScope('sgjesse@google.com'), AclPermission.WRITE), AclEntry(GroupScope('misc@dartlang.org'), AclPermission.READ), diff --git a/pkgs/gcloud/test/storage/storage_test.dart b/pkgs/gcloud/test/storage/storage_test.dart index 226b6c04..18e99bb2 100644 --- a/pkgs/gcloud/test/storage/storage_test.dart +++ b/pkgs/gcloud/test/storage/storage_test.dart @@ -22,18 +22,18 @@ const String ROOT_PATH = '/storage/v1/'; MockClient mockClient() => MockClient(HOSTNAME, ROOT_PATH); -withMockClient(function(MockClient client, Storage storage)) { +void withMockClient(Function(MockClient client, Storage storage) function) { var mock = mockClient(); function(mock, Storage(mock, PROJECT)); } Future withMockClientAsync( - Future function(MockClient client, Storage storage)) async { + Future Function(MockClient client, Storage storage) function) async { var mock = mockClient(); await function(mock, Storage(mock, PROJECT)); } -main() { +void main() { group('bucket', () { var bucketName = 'test-bucket'; @@ -60,7 +60,7 @@ main() { ]; withMockClient((mock, api) { - int count = 0; + var count = 0; mock.register( 'POST', @@ -76,7 +76,7 @@ main() { }, count: predefined.length)); var futures = []; - for (int i = 0; i < predefined.length; i++) { + for (var i = 0; i < predefined.length; i++) { futures.add(api.createBucket(bucketName, predefinedAcl: predefined[i][0] as PredefinedAcl)); } @@ -101,7 +101,7 @@ main() { var acls = [acl1, acl2, acl3]; withMockClient((mock, api) { - int count = 0; + var count = 0; mock.register( 'POST', @@ -128,7 +128,7 @@ main() { }, count: acls.length)); var futures = []; - for (int i = 0; i < acls.length; i++) { + for (var i = 0; i < acls.length; i++) { futures.add(api.createBucket(bucketName, acl: acls[i])); } return Future.wait(futures); @@ -160,7 +160,7 @@ main() { var acls = [acl1, acl2, acl3]; withMockClient((mock, api) { - int count = 0; + var count = 0; mock.register( 'POST', @@ -168,8 +168,8 @@ main() { expectAsync1((http.Request request) { var requestBucket = storage.Bucket.fromJson(jsonDecode(request.body) as Map); - int predefinedIndex = count ~/ acls.length; - int aclIndex = count % acls.length; + var predefinedIndex = count ~/ acls.length; + var aclIndex = count % acls.length; expect(requestBucket.name, bucketName); expect(request.url.queryParameters['predefinedAcl'], predefined[predefinedIndex][1]); @@ -190,8 +190,8 @@ main() { }, count: predefined.length * acls.length)); var futures = []; - for (int i = 0; i < predefined.length; i++) { - for (int j = 0; j < acls.length; j++) { + for (var i = 0; i < predefined.length; i++) { + for (var j = 0; j < acls.length; j++) { futures.add(api.createBucket(bucketName, predefinedAcl: predefined[i][0] as PredefinedAcl, acl: acls[j])); @@ -318,9 +318,9 @@ main() { var bytesNormalUpload = [1, 2, 3]; // Generate a list just above the limit when changing to resumable upload. - const int MB = 1024 * 1024; - const int maxNormalUpload = 1 * MB; - const int minResumableUpload = maxNormalUpload + 1; + const MB = 1024 * 1024; + const maxNormalUpload = 1 * MB; + const minResumableUpload = maxNormalUpload + 1; var bytesResumableUpload = List.generate(minResumableUpload, (e) => e & 255); @@ -331,7 +331,7 @@ main() { return null; }; - expectNormalUpload(MockClient mock, data, String objectName) { + void expectNormalUpload(MockClient mock, data, String objectName) { var bytes = data.fold([], (p, e) => p..addAll(e)); mock.registerUpload('POST', 'b/$bucketName/o', expectAsync1((request) { return mock @@ -347,10 +347,10 @@ main() { })); } - expectResumableUpload(MockClient mock, data, String objectName) { + void expectResumableUpload(MockClient mock, data, String objectName) { var bytes = data.fold([], (p, e) => p..addAll(e)); expect(bytes.length, bytesResumableUpload.length); - int count = 0; + var count = 0; mock.registerResumableUpload('POST', 'b/$bucketName/o', expectAsync1((request) { var requestObject = @@ -373,7 +373,7 @@ main() { }, count: 2)); } - checkResult(result) { + void checkResult(result) { expect(result.name, objectName); } @@ -410,7 +410,8 @@ main() { MockClient mock, Storage api, List> data, int length) { var bucket = api.bucket(bucketName); - Future upload(Future fn(StreamSink> sink, List> data), + Future upload( + Future Function(StreamSink> sink, List> data) fn, bool sendLength) { mock.clear(); if (length <= maxNormalUpload) { @@ -615,7 +616,7 @@ main() { ]; withMockClient((mock, api) { - int count = 0; + var count = 0; var bytes = [1, 2, 3]; mock.registerUpload( @@ -627,12 +628,10 @@ main() { .then(expectAsync1((mediaUpload) { var object = storage.Object.fromJson( jsonDecode(mediaUpload.json) as Map); - ObjectMetadata m = metadata[count]; + var m = metadata[count]; expect(object.name, objectName); expect(mediaUpload.bytes, bytes); - var contentType = m.contentType != null - ? m.contentType - : 'application/octet-stream'; + var contentType = m.contentType ?? 'application/octet-stream'; expect(mediaUpload.contentType, contentType); expect(object.cacheControl, m.cacheControl); expect(object.contentDisposition, m.contentDisposition); @@ -646,7 +645,7 @@ main() { var bucket = api.bucket(bucketName); var futures = []; - for (int i = 0; i < metadata.length; i++) { + for (var i = 0; i < metadata.length; i++) { futures .add(bucket.writeBytes(objectName, bytes, metadata: metadata[i])); } @@ -670,8 +669,8 @@ main() { ]; withMockClient((mock, api) { - int countInitial = 0; - int countData = 0; + var countInitial = 0; + var countData = 0; mock.registerResumableUpload( 'POST', @@ -679,7 +678,7 @@ main() { expectAsync1((request) { var object = storage.Object.fromJson(jsonDecode(request.body) as Map); - ObjectMetadata m = metadata[countInitial]; + var m = metadata[countInitial]; expect(object.name, objectName); expect(object.cacheControl, m.cacheControl); expect(object.contentDisposition, m.contentDisposition); @@ -693,12 +692,10 @@ main() { 'PUT', 'b/$PROJECT/o', expectAsync1((request) { - ObjectMetadata m = metadata[countData % metadata.length]; - var contentType = m.contentType != null - ? m.contentType - : 'application/octet-stream'; + var m = metadata[countData % metadata.length]; + var contentType = m.contentType ?? 'application/octet-stream'; expect(request.headers['content-type'], contentType); - bool firstPart = countData < metadata.length; + var firstPart = countData < metadata.length; countData++; if (firstPart) { expect(request.bodyBytes.length, MB); @@ -711,7 +708,7 @@ main() { var bucket = api.bucket(bucketName); var futures = []; - for (int i = 0; i < metadata.length; i++) { + for (var i = 0; i < metadata.length; i++) { futures.add(bucket.writeBytes(objectName, bytesResumableUpload, metadata: metadata[i])); } @@ -730,7 +727,7 @@ main() { ]; withMockClient((mock, api) { - int count = 0; + var count = 0; var bytes = [1, 2, 3]; mock.registerUpload( @@ -754,7 +751,7 @@ main() { var bucket = api.bucket(bucketName); var futures = []; - for (int i = 0; i < predefined.length; i++) { + for (var i = 0; i < predefined.length; i++) { futures.add(bucket.writeBytes(objectName, bytes, predefinedAcl: predefined[i][0] as PredefinedAcl)); } @@ -779,7 +776,7 @@ main() { var acls = [acl1, acl2, acl3]; withMockClient((mock, api) { - int count = 0; + var count = 0; var bytes = [1, 2, 3]; mock.registerUpload( @@ -814,7 +811,7 @@ main() { var bucket = api.bucket(bucketName); var futures = []; - for (int i = 0; i < acls.length; i++) { + for (var i = 0; i < acls.length; i++) { futures.add(bucket.writeBytes(objectName, bytes, acl: acls[i])); } return Future.wait(futures); @@ -847,7 +844,7 @@ main() { var acls = [acl1, acl2, acl3]; withMockClient((mock, api) { - int count = 0; + var count = 0; var bytes = [1, 2, 3]; mock.registerUpload( @@ -857,8 +854,8 @@ main() { return mock .processNormalMediaUpload(request) .then(expectAsync1((mediaUpload) { - int predefinedIndex = count ~/ acls.length; - int aclIndex = count % acls.length; + var predefinedIndex = count ~/ acls.length; + var aclIndex = count % acls.length; var object = storage.Object.fromJson( jsonDecode(mediaUpload.json) as Map); expect(object.name, objectName); @@ -885,8 +882,8 @@ main() { var bucket = api.bucket(bucketName); var futures = []; - for (int i = 0; i < predefined.length; i++) { - for (int j = 0; j < acls.length; j++) { + for (var i = 0; i < predefined.length; i++) { + for (var j = 0; j < acls.length; j++) { futures.add(bucket.writeBytes(objectName, bytes, acl: acls[j], predefinedAcl: predefined[i][0] as PredefinedAcl)); @@ -919,7 +916,7 @@ main() { fail('An exception should be thrown'); } on ArgumentError catch (e) { expect( - e.message, "length must have a value if offset is non-zero."); + e.message, 'length must have a value if offset is non-zero.'); } }); }); @@ -932,7 +929,7 @@ main() { await bucket.read(objectName, offset: 1, length: 0).toList(); fail('An exception should be thrown'); } on ArgumentError catch (e) { - expect(e.message, "If provided, length must greater than zero."); + expect(e.message, 'If provided, length must greater than zero.'); } }); }); @@ -945,7 +942,7 @@ main() { await bucket.read(objectName, length: -1).toList(); fail('An exception should be thrown'); } on ArgumentError catch (e) { - expect(e.message, "If provided, length must greater than zero."); + expect(e.message, 'If provided, length must greater than zero.'); } }); }); From 2fe1172709964bbe349903ab5cfde63411d6191e Mon Sep 17 00:00:00 2001 From: Matias Meno Date: Thu, 5 Mar 2020 11:33:49 +0100 Subject: [PATCH 147/239] Fix test that missed the cast implementation --- pkgs/gcloud/test/db/properties_test.dart | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pkgs/gcloud/test/db/properties_test.dart b/pkgs/gcloud/test/db/properties_test.dart index 0a599e54..45f71d7b 100644 --- a/pkgs/gcloud/test/db/properties_test.dart +++ b/pkgs/gcloud/test/db/properties_test.dart @@ -231,6 +231,8 @@ class KeyMock implements Key { @override Key append(Type modelType, {T id}) => null; @override + Key cast() => Key(parent, type, id as U); + @override int get hashCode => 1; } From 48545ef6f58c19afb7081eec2d06d27ba055668a Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Fri, 13 Mar 2020 12:20:11 -0700 Subject: [PATCH 148/239] Remove author from pubspec --- pkgs/gcloud/pubspec.yaml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 805c007c..88f3a435 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,7 +1,6 @@ name: gcloud version: 0.6.4-dev -author: Dart Team -description: | +description: >- High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. homepage: https://github.com/dart-lang/gcloud From 488a7a58863444466a6fe198e33fda1a5f05826b Mon Sep 17 00:00:00 2001 From: William Hesse Date: Mon, 4 May 2020 21:29:14 +0200 Subject: [PATCH 149/239] 0.7.0+1: Correct path separator in Bucket.list() to be '/'. Fix Bucket.list() so that object paths are split on '/' when listing directories and objects. --- pkgs/gcloud/CHANGELOG.md | 6 +++++- pkgs/gcloud/lib/src/storage_impl.dart | 2 +- pkgs/gcloud/pubspec.yaml | 2 +- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 98c7cc81..b8d95fd8 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.7.0+1 + + * Fix path separator in Bucket.list(). + ## 0.7.0 * **BREAKING CHANGE:** Add generics support for `Model.id`. @@ -8,7 +12,7 @@ ## 0.6.4 -* Require minimum Dart SDK `2.3.0`. + * Require minimum Dart SDK `2.3.0`. ## 0.6.3 diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index 2064e128..a0ac2ae2 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -5,7 +5,7 @@ part of gcloud.storage; const String _ABSOLUTE_PREFIX = 'gs://'; -const String _DIRECTORY_DELIMITER = 'gs://'; +const String _DIRECTORY_DELIMITER = '/'; /// Representation of an absolute name consisting of bucket name and object /// name. diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 40c8887d..c5ff5154 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.7.0 +version: 0.7.0+1 description: >- High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. homepage: https://github.com/dart-lang/gcloud From 8a5530566c9c1eb9a52bf085901ecbe1d0084a87 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Tue, 12 May 2020 15:56:04 -0700 Subject: [PATCH 150/239] Update test expectations after googleapis update (dart-lang/gcloud#97) --- pkgs/gcloud/test/common.dart | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/pkgs/gcloud/test/common.dart b/pkgs/gcloud/test/common.dart index 538f5ad5..c52dc380 100644 --- a/pkgs/gcloud/test/common.dart +++ b/pkgs/gcloud/test/common.dart @@ -20,7 +20,6 @@ class MockClient extends http.BaseClient { final _bytesHeaderRegexp = RegExp(r'bytes=(\d+)-(\d+)'); - final String hostname; final String rootPath; final Uri rootUri; @@ -28,8 +27,7 @@ class MockClient extends http.BaseClient { http_testing.MockClient client; MockClient(String hostname, String rootPath) - : hostname = hostname, - rootPath = rootPath, + : rootPath = rootPath, rootUri = Uri.parse('https://$hostname$rootPath') { client = http_testing.MockClient(handler); } @@ -61,7 +59,10 @@ class MockClient extends http.BaseClient { } Future handler(http.Request request) { - expect(request.url.host, hostname); + expect( + request.url.host, + anyOf(rootUri.host, 'storage.googleapis.com'), + ); var path = request.url.path; if (mocks[request.method] == null) { throw 'No mock handler for method ${request.method} found. ' From d00a32a048704b069e3aa6f8c5271f25a0f5cfe5 Mon Sep 17 00:00:00 2001 From: Jonas Finnemann Jensen Date: Thu, 7 May 2020 17:36:20 +0200 Subject: [PATCH 151/239] Upgrade _discoveryapis_commons, and fix mocks to work with newer googleapis package --- pkgs/gcloud/CHANGELOG.md | 6 ++++++ pkgs/gcloud/pubspec.yaml | 4 ++-- pkgs/gcloud/test/common.dart | 6 ++++-- pkgs/gcloud/test/storage/storage_test.dart | 2 +- 4 files changed, 13 insertions(+), 5 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index b8d95fd8..8f4aebc7 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,9 @@ +## 0.7.0+2 + + * Upgrade dependency on `_discoveryapis_commons`, changing `ApiRequestError` + from an `Error` to an `Exception`. Version constraints on + `_discoveryapis_commons` allows both new and old versions. + ## 0.7.0+1 * Fix path separator in Bucket.list(). diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index c5ff5154..b80171e6 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.7.0+1 +version: 0.7.0+2 description: >- High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. homepage: https://github.com/dart-lang/gcloud @@ -8,7 +8,7 @@ environment: sdk: '>=2.3.0 <3.0.0' dependencies: - _discoveryapis_commons: ^0.1.6+1 + _discoveryapis_commons: '>=0.1.6+1 <0.3.0' googleapis: '>=0.50.2 <1.0.0' http: '>=0.11.0 <0.13.0' meta: ^1.0.2 diff --git a/pkgs/gcloud/test/common.dart b/pkgs/gcloud/test/common.dart index c52dc380..c79d75f5 100644 --- a/pkgs/gcloud/test/common.dart +++ b/pkgs/gcloud/test/common.dart @@ -20,6 +20,7 @@ class MockClient extends http.BaseClient { final _bytesHeaderRegexp = RegExp(r'bytes=(\d+)-(\d+)'); + final String hostname; final String rootPath; final Uri rootUri; @@ -27,7 +28,8 @@ class MockClient extends http.BaseClient { http_testing.MockClient client; MockClient(String hostname, String rootPath) - : rootPath = rootPath, + : hostname = hostname, + rootPath = rootPath, rootUri = Uri.parse('https://$hostname$rootPath') { client = http_testing.MockClient(handler); } @@ -98,7 +100,7 @@ class MockClient extends http.BaseClient { Future respondInitiateResumableUpload(project) { final headers = Map.from(RESPONSE_HEADERS); - headers['location'] = 'https://www.googleapis.com/resumable/upload$rootPath' + headers['location'] = 'https://$hostname/resumable/upload$rootPath' 'b/$project/o?uploadType=resumable&alt=json&' 'upload_id=AEnB2UqucpaWy7d5cr5iVQzmbQcQlLDIKiClrm0SAX3rJ7UN' 'Mu5bEoC9b4teJcJUKpqceCUeqKzuoP_jz2ps_dV0P0nT8OTuZQ'; diff --git a/pkgs/gcloud/test/storage/storage_test.dart b/pkgs/gcloud/test/storage/storage_test.dart index 18e99bb2..5b979a18 100644 --- a/pkgs/gcloud/test/storage/storage_test.dart +++ b/pkgs/gcloud/test/storage/storage_test.dart @@ -17,7 +17,7 @@ import 'package:googleapis/storage/v1.dart' as storage; import '../common.dart'; import '../common_e2e.dart'; -const String HOSTNAME = 'www.googleapis.com'; +const String HOSTNAME = 'storage.googleapis.com'; const String ROOT_PATH = '/storage/v1/'; MockClient mockClient() => MockClient(HOSTNAME, ROOT_PATH); From 1f32c02edb24a5ec3bce84381ac2efa66d05fc5f Mon Sep 17 00:00:00 2001 From: Istvan Soos Date: Wed, 3 Jun 2020 10:57:07 +0200 Subject: [PATCH 152/239] Adding delimiter to Bucket.list and Bucket.page --- pkgs/gcloud/CHANGELOG.md | 4 ++++ pkgs/gcloud/lib/src/storage_impl.dart | 34 ++++++++++++++------------- pkgs/gcloud/pubspec.yaml | 2 +- 3 files changed, 23 insertions(+), 17 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 8f4aebc7..ff502bc6 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.7.1 + + * Added `delimiter` to `Bucket.list` and `Bucket.page`. + ## 0.7.0+2 * Upgrade dependency on `_discoveryapis_commons`, changing `ApiRequestError` diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index a0ac2ae2..86b53af3 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -253,23 +253,24 @@ class _BucketImpl implements Bucket { } @override - Stream list({String prefix}) { - Future<_ObjectPageImpl> firstPage(int pageSize) { - return _listObjects(bucketName, prefix, _DIRECTORY_DELIMITER, 50, null) - .then( - (response) => _ObjectPageImpl(this, prefix, pageSize, response)); + Stream list({String prefix, String delimiter}) { + delimiter ??= _DIRECTORY_DELIMITER; + Future<_ObjectPageImpl> firstPage(int pageSize) async { + final response = + await _listObjects(bucketName, prefix, delimiter, 50, null); + return _ObjectPageImpl(this, prefix, delimiter, pageSize, response); } return StreamFromPages(firstPage).stream; } @override - Future> page({String prefix, int pageSize = 50}) { - return _listObjects( - bucketName, prefix, _DIRECTORY_DELIMITER, pageSize, null) - .then((response) { - return _ObjectPageImpl(this, prefix, pageSize, response); - }); + Future> page( + {String prefix, String delimiter, int pageSize = 50}) async { + delimiter ??= _DIRECTORY_DELIMITER; + final response = + await _listObjects(bucketName, prefix, delimiter, pageSize, null); + return _ObjectPageImpl(this, prefix, delimiter, pageSize, response); } @override @@ -329,13 +330,14 @@ class _BucketPageImpl implements Page { class _ObjectPageImpl implements Page { final _BucketImpl _bucket; final String _prefix; + final String _delimiter; final int _pageSize; final String _nextPageToken; @override final List items; - _ObjectPageImpl( - this._bucket, this._prefix, this._pageSize, storage_api.Objects response) + _ObjectPageImpl(this._bucket, this._prefix, this._delimiter, this._pageSize, + storage_api.Objects response) : items = List((response.items != null ? response.items.length : 0) + (response.prefixes != null ? response.prefixes.length : 0)), _nextPageToken = response.nextPageToken { @@ -362,10 +364,10 @@ class _ObjectPageImpl implements Page { pageSize ??= _pageSize; return _bucket - ._listObjects(_bucket.bucketName, _prefix, _DIRECTORY_DELIMITER, - pageSize, _nextPageToken) + ._listObjects( + _bucket.bucketName, _prefix, _delimiter, pageSize, _nextPageToken) .then((response) { - return _ObjectPageImpl(_bucket, _prefix, pageSize, response); + return _ObjectPageImpl(_bucket, _prefix, _delimiter, pageSize, response); }); } } diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index b80171e6..22f05902 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.7.0+2 +version: 0.7.1 description: >- High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. homepage: https://github.com/dart-lang/gcloud From ae738c53d7f824bd59ef18e26d5a2a92c9e27cff Mon Sep 17 00:00:00 2001 From: Jonas Finnemann Jensen Date: Tue, 9 Jun 2020 18:19:36 +0200 Subject: [PATCH 153/239] Fix tests to work again. * Increased some timeouts * Disabled tests that require non-uniform bucket ACLs. (Non-uniform bucket ACLs can be disabled for an account) * Use Application Default Credentials for testing. * Skip tests that only fail on legacy Datastore. (Firestore in Datastore mode has fewer limitations) --- pkgs/gcloud/README.md | 10 ++- pkgs/gcloud/test/common_e2e.dart | 75 ++++--------------- .../datastore/e2e/datastore_test_impl.dart | 4 +- pkgs/gcloud/test/db_all_e2e_test.dart | 1 + pkgs/gcloud/test/index.yaml | 2 +- pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart | 7 +- pkgs/gcloud/test/storage/e2e_test.dart | 17 +++-- 7 files changed, 41 insertions(+), 75 deletions(-) diff --git a/pkgs/gcloud/README.md b/pkgs/gcloud/README.md index 84ab74fb..fee281e7 100644 --- a/pkgs/gcloud/README.md +++ b/pkgs/gcloud/README.md @@ -230,12 +230,12 @@ If you want to run the end-to-end tests, a Google Cloud project is required. When running these tests the following environment variables need to be set: GCLOUD_E2E_TEST_PROJECT - GCLOUD_E2E_TEST_KEY The value of the environment variable `GCLOUD_E2E_TEST_PROJECT` is the name -of the Google Cloud project to use. The value of the environment variable -`GCLOUD_E2E_TEST_KEY` is a Google Cloud Storage path (starting with `gs://`) -to a JSON key file for a service account providing access to the Cloud Project. +of the Google Cloud project to use. Authentication for testing uses +[Application Default Credentials][ADC] locally you can provide +`GOOGLE_APPLICATION_CREDENTIALS` or use +[`gcloud auth application-default login`][gcloud-adc]. You will also need to create indexes as follows: @@ -250,3 +250,5 @@ gcloud --project "$GCLOUD_E2E_TEST_PROJECT" datastore indexes create test/index. [googleapisbeta]: https://pub.dartlang.org/packages/googleapis_beta [googleapisauth]: https://pub.dartlang.org/packages/googleapis_beta [appengine]: https://pub.dartlang.org/packages/appengine +[ADC]: https://cloud.google.com/docs/authentication/production +[gcloud-adc]: https://cloud.google.com/sdk/gcloud/reference/auth/application-default/login diff --git a/pkgs/gcloud/test/common_e2e.dart b/pkgs/gcloud/test/common_e2e.dart index 93a86033..586b1fce 100644 --- a/pkgs/gcloud/test/common_e2e.dart +++ b/pkgs/gcloud/test/common_e2e.dart @@ -17,13 +17,6 @@ const PROJECT = 'test-project'; // Environment variables for specifying the cloud project to use and the // location of the service account key for that project. const String PROJECT_ENV = 'GCLOUD_E2E_TEST_PROJECT'; -const String SERVICE_KEY_LOCATION_ENV = 'GCLOUD_E2E_TEST_KEY'; - -// Default project and service key location used when running on the package -// bot. -const String DEFAULT_PROJECT = 'dart-gcloud-e2e'; -const String DEFAULT_KEY_LOCATION = - 'gs://dart-archive-internal/keys/dart-gcloud-e2e.json'; // Used for storage e2e tests: // @@ -35,62 +28,22 @@ const String DEFAULT_KEY_LOCATION = // attempt to account for that. const STORAGE_LIST_DELAY = Duration(seconds: 5); -bool onBot() { - // When running on the package-bot the current user is chrome-bot. - var envName; - if (Platform.isWindows) { - envName = 'USERNAME'; - } else { - envName = 'USER'; - } - return Platform.environment[envName] == 'chrome-bot'; -} - -// Get the service key from the specified location. -Future serviceKeyJson(String serviceKeyLocation) { - if (!serviceKeyLocation.startsWith('gs://')) { - return File(serviceKeyLocation).readAsString(); - } - Future future; - if (onBot()) { - future = Process.run( - 'python', ['third_party/gsutil/gsutil', 'cat', serviceKeyLocation], - runInShell: true); - } else { - var gsutil = Platform.isWindows ? 'gsutil.cmd' : 'gsutil'; - future = Process.run(gsutil, ['cat', serviceKeyLocation]); - } - return future.then((result) { - if (result.exitCode != 0) { - throw Exception('Failed to run gsutil, ${result.stderr}'); - } - return result.stdout.toString(); - }); -} - -typedef AuthCallback = Future Function(String project, http.Client client); - -Future withAuthClient(List scopes, AuthCallback callback, - {bool trace = false}) { +Future withAuthClient( + List scopes, + Future Function(String project, http.Client client) callback, { + bool trace = false, +}) async { var project = Platform.environment[PROJECT_ENV]; - var serviceKeyLocation = Platform.environment[SERVICE_KEY_LOCATION_ENV]; - if (!onBot() && (project == null || serviceKeyLocation == null)) { - throw StateError( - 'Environment variables $PROJECT_ENV and $SERVICE_KEY_LOCATION_ENV ' - 'required when not running on the package bot'); + if (project == null) { + throw StateError('Environment variables $PROJECT_ENV '); } - project = project ?? DEFAULT_PROJECT; - serviceKeyLocation = serviceKeyLocation ?? DEFAULT_KEY_LOCATION; - - return serviceKeyJson(serviceKeyLocation).then((keyJson) { - var creds = auth.ServiceAccountCredentials.fromJson(keyJson); - return auth - .clientViaServiceAccount(creds, scopes) - .then((http.Client client) { - if (trace) client = TraceClient(client); - return callback(project, client); - }); - }); + http.Client client = await auth.clientViaApplicationDefaultCredentials( + scopes: scopes, + ); + if (trace) { + client = TraceClient(client); + } + return await callback(project, client); } diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index af5bc42a..d8540cfc 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -254,7 +254,9 @@ void runTests(Datastore datastore, String namespace) { test('negative_insert_transactional_xg', () { return testInsertNegative(unnamedEntities26, transactional: true, xg: true); - }); + }, + skip: 'With Firestore in Datastore mode, transactions are no longer ' + 'limited to 25 entity groups'); test('negative_insert_20000_entities', () async { // Maybe it should not be a [DataStoreError] here? diff --git a/pkgs/gcloud/test/db_all_e2e_test.dart b/pkgs/gcloud/test/db_all_e2e_test.dart index d6809f2c..2a89eb54 100644 --- a/pkgs/gcloud/test/db_all_e2e_test.dart +++ b/pkgs/gcloud/test/db_all_e2e_test.dart @@ -3,6 +3,7 @@ // BSD-style license that can be found in the LICENSE file. @Tags(['e2e']) +@Timeout(Duration(seconds: 120)) library gcloud.test.db_all_test; diff --git a/pkgs/gcloud/test/index.yaml b/pkgs/gcloud/test/index.yaml index 9f3dcec5..96048adb 100644 --- a/pkgs/gcloud/test/index.yaml +++ b/pkgs/gcloud/test/index.yaml @@ -1,6 +1,6 @@ # To run tests for datastore, the following index.yaml must be declared for # the project using: -# $ gcloud datastore indexes create test/index.yaml +# $ gcloud --project "$GCLOUD_E2E_TEST_PROJECT" datastore indexes create test/index.yaml indexes: # Needed by tests in: test/db/e2e/db_test_impl.dart - kind: User diff --git a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart index 44e2d095..086030ae 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart @@ -3,6 +3,7 @@ // BSD-style license that can be found in the LICENSE file. @Tags(['e2e']) +@Timeout(Duration(seconds: 120)) import 'package:gcloud/pubsub.dart'; import 'package:http/http.dart'; @@ -20,7 +21,6 @@ void main() { // Generate a unique prefix for all names generated by the tests. var id = DateTime.now().millisecondsSinceEpoch; prefix = 'dart-e2e-test-$id'; - return withAuthClient(PubSub.SCOPES, (String _project, httpClient) async { // Share the same pubsub connection for all tests. pubsub = PubSub(httpClient, _project); @@ -33,6 +33,11 @@ void main() { var leftovers = false; var cleanupErrors = false; + // Don't cleanup if setup failed + if (pubsub == null) { + return; + } + print('checking for leftover subscriptions'); try { // Try to delete any leftover subscriptions from the tests. diff --git a/pkgs/gcloud/test/storage/e2e_test.dart b/pkgs/gcloud/test/storage/e2e_test.dart index 6b056cb3..799782f4 100644 --- a/pkgs/gcloud/test/storage/e2e_test.dart +++ b/pkgs/gcloud/test/storage/e2e_test.dart @@ -47,12 +47,15 @@ void main() { }); }); - tearDownAll(() { + tearDownAll(() async { + // Don't cleanup if setup failed + if (storage == null) { + return; + } // Deleting a bucket relies on eventually consistent behaviour, hence // the delay in attempt to prevent test flakiness. - return Future.delayed(STORAGE_LIST_DELAY, () { - return storage.deleteBucket(testBucketName); - }); + await Future.delayed(STORAGE_LIST_DELAY); + await storage.deleteBucket(testBucketName); }); group('bucket', () { @@ -98,7 +101,7 @@ void main() { var r2 = await storage.deleteBucket(bucketName); expect(r2, isNull); } - }); + }, skip: 'unable to test with uniform buckets enforced for account'); test('create-error', () { storage.createBucket('goog-reserved').catchError(expectAsync1((e) { @@ -168,7 +171,7 @@ void main() { () => test('test-6', PredefinedAcl.bucketOwnerRead, 2) ], (f) => f().then(expectAsync1((_) {}))); }); - }); + }, skip: 'unable to test with uniform buckets enforced for account'); test('create-with-acl-delete', () { return withTestBucket((Bucket bucket) { @@ -217,7 +220,7 @@ void main() { () => test('test-4', acl4, acl4.entries.length + 1) ], (f) => f().then(expectAsync1((_) {}))); }); - }); + }, skip: 'unable to test with uniform buckets enforced for account'); test('create-with-metadata-delete', () { return withTestBucket((Bucket bucket) { From 7aaa2a87e3e02ed98425ef3382c33b8fdca4ef1d Mon Sep 17 00:00:00 2001 From: Jonas Finnemann Jensen Date: Tue, 12 May 2020 13:47:25 +0200 Subject: [PATCH 154/239] Make ExpandoModel generic --- pkgs/gcloud/CHANGELOG.md | 2 ++ pkgs/gcloud/lib/src/db/models.dart | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index ff502bc6..0fc84eac 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,6 +1,8 @@ ## 0.7.1 * Added `delimiter` to `Bucket.list` and `Bucket.page`. + * Fix typing of `ExpandoModel` to `ExpandoModel` as we should have done in + version `0.7.0`. ## 0.7.0+2 diff --git a/pkgs/gcloud/lib/src/db/models.dart b/pkgs/gcloud/lib/src/db/models.dart index 85425e15..6739d648 100644 --- a/pkgs/gcloud/lib/src/db/models.dart +++ b/pkgs/gcloud/lib/src/db/models.dart @@ -111,7 +111,7 @@ abstract class Model { /// The [ExpandoModel] class adds support for having dynamic properties. You can /// set arbitrary fields on these models. The expanded values must be values /// accepted by the [RawDatastore] implementation. -abstract class ExpandoModel extends Model { +abstract class ExpandoModel extends Model { final Map additionalProperties = {}; @override From e9c6015da625241a1fc67044dac35a1ba7fad807 Mon Sep 17 00:00:00 2001 From: Jonas Finnemann Jensen Date: Wed, 10 Jun 2020 13:17:49 +0200 Subject: [PATCH 155/239] Revert the isExpandoClass logic from before generic keys See: https://github.com/dart-lang/gcloud/pull/83/files#diff-506144bf373d1aae4f1c30a19670b470L360-L370 --- pkgs/gcloud/lib/src/db/model_db_impl.dart | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/lib/src/db/model_db_impl.dart b/pkgs/gcloud/lib/src/db/model_db_impl.dart index 7c2516c3..62e6ba83 100644 --- a/pkgs/gcloud/lib/src/db/model_db_impl.dart +++ b/pkgs/gcloud/lib/src/db/model_db_impl.dart @@ -366,8 +366,17 @@ class ModelDBImpl implements ModelDB { return properties; } - bool _isExpandoClass(mirrors.ClassMirror modelClass) => - modelClass.isSubtypeOf(mirrors.reflectClass(ExpandoModel)); + bool _isExpandoClass(mirrors.ClassMirror modelClass) { + while (modelClass.superclass != modelClass) { + if (modelClass.reflectedType == ExpandoModel) { + return true; + } else if (modelClass.reflectedType == Model) { + return false; + } + modelClass = modelClass.superclass; + } + throw StateError('This should be unreachable.'); + } } class _ModelDescription { From 2f6ca0b03e8d2291a16e8e7d9250d9aedeea3757 Mon Sep 17 00:00:00 2001 From: Istvan Soos Date: Wed, 10 Jun 2020 15:04:13 +0200 Subject: [PATCH 156/239] Really add delimiter to Bucket.list and .page --- pkgs/gcloud/CHANGELOG.md | 5 +++++ pkgs/gcloud/lib/storage.dart | 10 ++++++---- pkgs/gcloud/pubspec.yaml | 2 +- 3 files changed, 12 insertions(+), 5 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 0fc84eac..481e2842 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,8 @@ +## 0.7.2 + + * Added `delimiter` to `Bucket.list` and `Bucket.page` + (`0.7.1` only added them the implementation). + ## 0.7.1 * Added `delimiter` to `Bucket.list` and `Bucket.page`. diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index 27b3a86f..1fc5b266 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -786,15 +786,16 @@ abstract class Bucket { /// List objects in the bucket. /// /// Listing operates like a directory listing, despite the object - /// namespace being flat. The character `/` is being used to separate - /// object names into directory components. + /// namespace being flat. Unless [delimiter] is specified, the character `/` + /// is being used to separate object names into directory components. + /// To list objects recursively, the [delimiter] can be set to empty string. /// /// Retrieves a list of objects and directory components starting /// with [prefix]. /// /// Returns a [Stream] of [BucketEntry]. Each element of the stream /// represents either an object or a directory component. - Stream list({String prefix}); + Stream list({String prefix, String delimiter}); /// Start paging through objects in the bucket. /// @@ -804,5 +805,6 @@ abstract class Bucket { /// /// Returns a `Future` which completes with a `Page` object holding the /// first page. Use the `Page` object to move to the next page. - Future> page({String prefix, int pageSize = 50}); + Future> page( + {String prefix, String delimiter, int pageSize = 50}); } diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 22f05902..c34c0e07 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.7.1 +version: 0.7.2 description: >- High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. homepage: https://github.com/dart-lang/gcloud From 3965797079ded964aede138ff9f52173d3b724fc Mon Sep 17 00:00:00 2001 From: Jonas Finnemann Jensen Date: Thu, 11 Jun 2020 16:10:08 +0200 Subject: [PATCH 157/239] Fixed reflection logic detecting isExpandoClass.\n\nType arguments means we have to use `originalDeclaration`. --- pkgs/gcloud/CHANGELOG.md | 4 +++ pkgs/gcloud/lib/src/db/model_db_impl.dart | 10 ++++-- pkgs/gcloud/pubspec.yaml | 2 +- pkgs/gcloud/test/db/e2e/db_test_impl.dart | 37 +++++++++++++++++++++++ 4 files changed, 49 insertions(+), 4 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 481e2842..09bda8c4 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.7.3 + * Fixed issue in reflection code affecting `Model` and `Model`, + but not `Model`. + ## 0.7.2 * Added `delimiter` to `Bucket.list` and `Bucket.page` diff --git a/pkgs/gcloud/lib/src/db/model_db_impl.dart b/pkgs/gcloud/lib/src/db/model_db_impl.dart index 62e6ba83..03824413 100644 --- a/pkgs/gcloud/lib/src/db/model_db_impl.dart +++ b/pkgs/gcloud/lib/src/db/model_db_impl.dart @@ -366,13 +366,17 @@ class ModelDBImpl implements ModelDB { return properties; } + final _originalExpandoModelClass = mirrors.reflectClass(ExpandoModel); + final _originalModelClass = mirrors.reflectClass(Model); + bool _isExpandoClass(mirrors.ClassMirror modelClass) { - while (modelClass.superclass != modelClass) { - if (modelClass.reflectedType == ExpandoModel) { + while (modelClass != null && modelClass.superclass != modelClass) { + if (modelClass.originalDeclaration == _originalExpandoModelClass) { return true; - } else if (modelClass.reflectedType == Model) { + } else if (modelClass.originalDeclaration == _originalModelClass) { return false; } + modelClass = modelClass.superclass; } throw StateError('This should be unreachable.'); diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index c34c0e07..efa251d6 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.7.2 +version: 0.7.3 description: >- High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. homepage: https://github.com/dart-lang/gcloud diff --git a/pkgs/gcloud/test/db/e2e/db_test_impl.dart b/pkgs/gcloud/test/db/e2e/db_test_impl.dart index 3770998a..e84f8464 100644 --- a/pkgs/gcloud/test/db/e2e/db_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/db_test_impl.dart @@ -81,6 +81,31 @@ class Person extends db.Model { String toString() => 'Person(id: $id, name: $name, age: $age)'; } +@db.Kind(idType: db.IdType.String) +class PersonStringId extends db.Model { + String get name => id; + + @db.IntProperty() + int age; + + @db.ModelKeyProperty(propertyName: 'mangledWife') + db.Key wife; + + @override + bool operator ==(Object other) => sameAs(other); + + bool sameAs(Object other) { + return other is PersonStringId && + id == other.id && + parentKey == other.parentKey && + age == other.age && + wife == other.wife; + } + + @override + String toString() => 'PersonStringId(id/name: $name, age: $age)'; +} + @db.Kind() class User extends Person { @db.StringProperty() @@ -232,6 +257,18 @@ void runTests(db.DatastoreDB store, String namespace) { persons.first.wife = persons.last.key; return testInsertLookupDelete(persons); }); + test('PersonStringId', () { + var root = partition.emptyKey; + var persons = []; + for (var i = 1; i <= 10; i++) { + persons.add(PersonStringId() + ..id = 'user$i' + ..parentKey = root + ..age = 42 + i); + } + persons.first.wife = persons.last.key; + return testInsertLookupDelete(persons); + }); test('users', () { var root = partition.emptyKey; var users = []; From 8ade7ed71bd62892ef37864e0d308e8ad12d45e2 Mon Sep 17 00:00:00 2001 From: Alexander Thomas Date: Thu, 28 Jan 2021 11:37:53 +0100 Subject: [PATCH 158/239] Migrate to GitHub Actions (dart-lang/gcloud#107) * Migrate to GitHub Actions * Delete .travis.yml * Update dart_test.yaml * Fix infos --- .../gcloud/.github/workflows/test-package.yml | 85 +++++++++++++++++++ pkgs/gcloud/.travis.yml | 28 ------ pkgs/gcloud/dart_test.yaml | 4 +- pkgs/gcloud/lib/service_scope.dart | 1 + pkgs/gcloud/lib/src/datastore_impl.dart | 2 +- pkgs/gcloud/lib/src/pubsub_impl.dart | 8 +- pkgs/gcloud/lib/src/storage_impl.dart | 9 +- pkgs/gcloud/lib/storage.dart | 6 +- .../datastore/e2e/datastore_test_impl.dart | 2 +- 9 files changed, 105 insertions(+), 40 deletions(-) create mode 100644 pkgs/gcloud/.github/workflows/test-package.yml delete mode 100644 pkgs/gcloud/.travis.yml diff --git a/pkgs/gcloud/.github/workflows/test-package.yml b/pkgs/gcloud/.github/workflows/test-package.yml new file mode 100644 index 00000000..b32b3903 --- /dev/null +++ b/pkgs/gcloud/.github/workflows/test-package.yml @@ -0,0 +1,85 @@ +name: Dart CI + +on: + # Run on PRs and pushes to the default branch. + push: + branches: [ master ] + pull_request: + branches: [ master ] + schedule: + - cron: "0 0 * * 0" + +env: + PUB_ENVIRONMENT: bot.github + +jobs: + # Check code formatting and static analysis on a single OS (linux) + # against Dart dev. + analyze: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + sdk: [dev] + steps: + - uses: actions/checkout@v2 + - uses: dart-lang/setup-dart@v0.3 + with: + sdk: ${{ matrix.sdk }} + - id: install + name: Install dependencies + run: dart pub get + - name: Check formatting + run: dart format --output=none --set-exit-if-changed . + if: always() && steps.install.outcome == 'success' + - name: Analyze code + run: dart analyze --fatal-infos + if: always() && steps.install.outcome == 'success' + + # Run tests on a matrix consisting of two dimensions: + # 1. OS: ubuntu-latest, (macos-latest, windows-latest) + # 2. release channel: dev + test: + needs: analyze + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + # Add macos-latest and/or windows-latest if relevant for this package. + os: [ubuntu-latest] + sdk: [dev] + steps: + - uses: actions/checkout@v2 + - uses: dart-lang/setup-dart@v0.3 + with: + sdk: ${{ matrix.sdk }} + - id: install + name: Install dependencies + run: dart pub get + - name: Run VM tests + run: dart test --platform vm -P ci + if: always() && steps.install.outcome == 'success' + + # Run tests on a matrix consisting of two dimensions: + # 1. OS: ubuntu-latest, (macos-latest, windows-latest) + # 2. release: 2.3.0 + test-legacy-sdk: + needs: analyze + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + # Add macos-latest and/or windows-latest if relevant for this package. + os: [ubuntu-latest] + sdk: [2.3.0] + steps: + - uses: actions/checkout@v2 + - uses: dart-lang/setup-dart@v0.3 + with: + sdk: ${{ matrix.sdk }} + - id: install + name: Install dependencies + run: pub get + - name: Run VM tests + run: pub run test --platform vm -P ci + if: always() && steps.install.outcome == 'success' diff --git a/pkgs/gcloud/.travis.yml b/pkgs/gcloud/.travis.yml deleted file mode 100644 index 2850234f..00000000 --- a/pkgs/gcloud/.travis.yml +++ /dev/null @@ -1,28 +0,0 @@ -language: dart - -dart: - - 2.3.0 - - dev - -dart_task: - - test: -P travis - -matrix: - include: - # Only validate formatting using the dev release - - dart: dev - dart_task: dartfmt - - dart: dev - dart_task: - dartanalyzer: --fatal-infos --fatal-warnings . - - dart: 2.3.0 - dart_task: - dartanalyzer: --fatal-warnings . - -# Only building master means that we don't run two builds for each pull request. -branches: - only: [master] - -cache: - directories: - - $HOME/.pub-cache diff --git a/pkgs/gcloud/dart_test.yaml b/pkgs/gcloud/dart_test.yaml index a465e8e7..e46915dc 100644 --- a/pkgs/gcloud/dart_test.yaml +++ b/pkgs/gcloud/dart_test.yaml @@ -1,5 +1,5 @@ presets: - travis: + ci: tags: e2e: - skip: "e2e tests don't run on Travis" + skip: "e2e tests don't run on CI" diff --git a/pkgs/gcloud/lib/service_scope.dart b/pkgs/gcloud/lib/service_scope.dart index 3a14dd24..0e8dbcbc 100644 --- a/pkgs/gcloud/lib/service_scope.dart +++ b/pkgs/gcloud/lib/service_scope.dart @@ -210,6 +210,7 @@ class _ServiceScope { 'callback function to return a future.'); } return f.whenComplete(serviceScope._runScopeExitHandlers); + // ignore: deprecated_member_use }, zoneValues: map, onError: onError); } diff --git a/pkgs/gcloud/lib/src/datastore_impl.dart b/pkgs/gcloud/lib/src/datastore_impl.dart index a57dc820..68838184 100644 --- a/pkgs/gcloud/lib/src/datastore_impl.dart +++ b/pkgs/gcloud/lib/src/datastore_impl.dart @@ -395,7 +395,7 @@ class DatastoreImpl implements datastore.Datastore { // // A list of keys that were not looked up due to resource constraints. // repeated Key deferred = 3; // } - var entities = List(apiKeys.length); + var entities = List.filled(apiKeys.length, null); for (var i = 0; i < apiKeys.length; i++) { var apiKey = apiKeys[i]; diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart index f6ca96ed..4f97ae30 100644 --- a/pkgs/gcloud/lib/src/pubsub_impl.dart +++ b/pkgs/gcloud/lib/src/pubsub_impl.dart @@ -459,7 +459,8 @@ class _TopicPageImpl implements Page { final List items; _TopicPageImpl(this._api, this._pageSize, pubsub.ListTopicsResponse response) - : items = List(response.topics != null ? response.topics.length : 0), + : items = List.filled( + response.topics != null ? response.topics.length : 0, null), _nextPageToken = response.nextPageToken { if (response.topics != null) { for (var i = 0; i < response.topics.length; i++) { @@ -492,8 +493,9 @@ class _SubscriptionPageImpl implements Page { _SubscriptionPageImpl(this._api, this._topic, this._pageSize, pubsub.ListSubscriptionsResponse response) - : items = List( - response.subscriptions != null ? response.subscriptions.length : 0), + : items = List.filled( + response.subscriptions != null ? response.subscriptions.length : 0, + null), _nextPageToken = response.nextPageToken { if (response.subscriptions != null) { for (var i = 0; i < response.subscriptions.length; i++) { diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index 86b53af3..c3d85c10 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -306,7 +306,8 @@ class _BucketPageImpl implements Page { final List items; _BucketPageImpl(this._storage, this._pageSize, storage_api.Buckets response) - : items = List(response.items != null ? response.items.length : 0), + : items = List.filled( + response.items != null ? response.items.length : 0, null), _nextPageToken = response.nextPageToken { for (var i = 0; i < items.length; i++) { items[i] = response.items[i].name; @@ -338,8 +339,10 @@ class _ObjectPageImpl implements Page { _ObjectPageImpl(this._bucket, this._prefix, this._delimiter, this._pageSize, storage_api.Objects response) - : items = List((response.items != null ? response.items.length : 0) + - (response.prefixes != null ? response.prefixes.length : 0)), + : items = List.filled( + (response.items != null ? response.items.length : 0) + + (response.prefixes != null ? response.prefixes.length : 0), + null), _nextPageToken = response.nextPageToken { var prefixes = 0; if (response.prefixes != null) { diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index 1fc5b266..4b3fa13f 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -119,7 +119,8 @@ class Acl { Acl(Iterable entries) : _entries = List.from(entries); Acl._fromBucketAcl(storage_api.Bucket bucket) - : _entries = List(bucket.acl == null ? 0 : bucket.acl.length) { + : _entries = + List.filled(bucket.acl == null ? 0 : bucket.acl.length, null) { if (bucket.acl != null) { for (var i = 0; i < bucket.acl.length; i++) { _entries[i] = AclEntry(_aclScopeFromEntity(bucket.acl[i].entity), @@ -129,7 +130,8 @@ class Acl { } Acl._fromObjectAcl(storage_api.Object object) - : _entries = List(object.acl == null ? 0 : object.acl.length) { + : _entries = + List.filled(object.acl == null ? 0 : object.acl.length, null) { if (object.acl != null) { for (var i = 0; i < object.acl.length; i++) { _entries[i] = AclEntry(_aclScopeFromEntity(object.acl[i].entity), diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index d8540cfc..90680a46 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -546,7 +546,7 @@ void runTests(Datastore datastore, String namespace) { {bool xg = false}) { Future test(List entities, Transaction transaction, value) { // Change entities: - var changedEntities = List(entities.length); + var changedEntities = List.filled(entities.length, null); for (var i = 0; i < entities.length; i++) { var entity = entities[i]; var newProperties = Map.from(entity.properties); From fc36e30c251ad885f5d7c142ff526ae594eb167c Mon Sep 17 00:00:00 2001 From: Simon Binder Date: Sat, 20 Mar 2021 23:34:00 +0100 Subject: [PATCH 159/239] Require latest dependency, start migration --- pkgs/gcloud/CHANGELOG.md | 8 +++++ pkgs/gcloud/example/main.dart | 1 + pkgs/gcloud/lib/common.dart | 6 ++-- pkgs/gcloud/lib/datastore.dart | 1 + pkgs/gcloud/lib/db.dart | 1 + pkgs/gcloud/lib/db/metamodel.dart | 1 + pkgs/gcloud/lib/pubsub.dart | 3 +- pkgs/gcloud/lib/service_scope.dart | 30 +++++++++---------- pkgs/gcloud/lib/src/datastore_impl.dart | 12 ++++---- pkgs/gcloud/lib/src/db/annotations.dart | 1 + pkgs/gcloud/lib/src/db/db.dart | 1 + pkgs/gcloud/lib/src/db/exceptions.dart | 1 + pkgs/gcloud/lib/src/db/model_db.dart | 1 + pkgs/gcloud/lib/src/db/model_db_impl.dart | 1 + pkgs/gcloud/lib/src/db/models.dart | 1 + pkgs/gcloud/lib/src/pubsub_impl.dart | 1 + pkgs/gcloud/lib/src/storage_impl.dart | 7 +++-- pkgs/gcloud/lib/storage.dart | 3 +- pkgs/gcloud/pubspec.yaml | 22 +++++++------- pkgs/gcloud/test/common.dart | 1 + pkgs/gcloud/test/common_e2e.dart | 1 + .../datastore/e2e/datastore_test_impl.dart | 1 + pkgs/gcloud/test/datastore/e2e/utils.dart | 1 + .../gcloud/test/datastore/error_matchers.dart | 1 + pkgs/gcloud/test/db/db_test.dart | 1 + pkgs/gcloud/test/db/e2e/db_test_impl.dart | 1 + .../test/db/e2e/metamodel_test_impl.dart | 1 + pkgs/gcloud/test/db/model_db_test.dart | 1 + .../db/model_dbs/duplicate_fieldname.dart | 1 + .../test/db/model_dbs/duplicate_kind.dart | 1 + .../test/db/model_dbs/duplicate_property.dart | 1 + .../db/model_dbs/multiple_annotations.dart | 1 + .../db/model_dbs/no_default_constructor.dart | 1 + pkgs/gcloud/test/db/properties_test.dart | 1 + pkgs/gcloud/test/db_all_e2e_test.dart | 1 + pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart | 1 + pkgs/gcloud/test/pubsub/pubsub_test.dart | 17 +++++++---- pkgs/gcloud/test/service_scope_test.dart | 7 +++-- pkgs/gcloud/test/storage/e2e_test.dart | 1 + pkgs/gcloud/test/storage/storage_test.dart | 17 +++++++---- 40 files changed, 105 insertions(+), 56 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 09bda8c4..06ae8161 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,11 @@ +## 0.8.0-dev + + * Require Dart 2.12 or later + * Partial migration to null safety: + * `package:gcloud/common.dart` + * `package:gcloud/http.dart` + * `package:gcloud/service_scope.dart` + ## 0.7.3 * Fixed issue in reflection code affecting `Model` and `Model`, but not `Model`. diff --git a/pkgs/gcloud/example/main.dart b/pkgs/gcloud/example/main.dart index 77dc6954..ed97ef6f 100644 --- a/pkgs/gcloud/example/main.dart +++ b/pkgs/gcloud/example/main.dart @@ -1,6 +1,7 @@ // Copyright (c) 2019, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 import 'dart:async' show Future; import 'dart:convert' show utf8; diff --git a/pkgs/gcloud/lib/common.dart b/pkgs/gcloud/lib/common.dart index ce675de8..6f311607 100644 --- a/pkgs/gcloud/lib/common.dart +++ b/pkgs/gcloud/lib/common.dart @@ -35,8 +35,8 @@ class StreamFromPages { bool _pendingRequest = false; bool _paused = false; bool _cancelled = false; - Page _currentPage; - StreamController _controller; + late Page _currentPage; + late final StreamController _controller; StreamFromPages(this._firstPageProvider) { _controller = StreamController( @@ -49,7 +49,7 @@ class StreamFromPages { Stream get stream => _controller.stream; - void _handleError(e, StackTrace s) { + void _handleError(Object e, StackTrace s) { _controller.addError(e, s); _controller.close(); } diff --git a/pkgs/gcloud/lib/datastore.dart b/pkgs/gcloud/lib/datastore.dart index 1146cf25..00bc9a28 100644 --- a/pkgs/gcloud/lib/datastore.dart +++ b/pkgs/gcloud/lib/datastore.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 /// This library provides a low-level API for accessing Google's Cloud /// Datastore. diff --git a/pkgs/gcloud/lib/db.dart b/pkgs/gcloud/lib/db.dart index 42e15514..554337d7 100644 --- a/pkgs/gcloud/lib/db.dart +++ b/pkgs/gcloud/lib/db.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 library gcloud.db; diff --git a/pkgs/gcloud/lib/db/metamodel.dart b/pkgs/gcloud/lib/db/metamodel.dart index 906ddae0..7418e733 100644 --- a/pkgs/gcloud/lib/db/metamodel.dart +++ b/pkgs/gcloud/lib/db/metamodel.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 library gcloud.db.meta_model; diff --git a/pkgs/gcloud/lib/pubsub.dart b/pkgs/gcloud/lib/pubsub.dart index 5f6645bf..cccf2cba 100644 --- a/pkgs/gcloud/lib/pubsub.dart +++ b/pkgs/gcloud/lib/pubsub.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 library gcloud.pubsub; @@ -112,7 +113,7 @@ void registerPubSubService(PubSub pubsub) { /// abstract class PubSub { /// List of required OAuth2 scopes for Pub/Sub operation. - static const SCOPES = [pubsub.PubsubApi.PubsubScope]; + static const SCOPES = [pubsub.PubsubApi.pubsubScope]; /// Access Pub/Sub using an authenticated client. /// diff --git a/pkgs/gcloud/lib/service_scope.dart b/pkgs/gcloud/lib/service_scope.dart index 0e8dbcbc..538c5aef 100644 --- a/pkgs/gcloud/lib/service_scope.dart +++ b/pkgs/gcloud/lib/service_scope.dart @@ -86,8 +86,8 @@ const Symbol _ServiceScopeKey = #gcloud.service_scope; final _ServiceScope _emptyServiceScope = _ServiceScope(); /// Returns the current [_ServiceScope] object. -_ServiceScope get _serviceScope => - Zone.current[_ServiceScopeKey] as _ServiceScope; +_ServiceScope? get _serviceScope => + Zone.current[_ServiceScopeKey] as _ServiceScope?; /// Start a new zone with a new service scope and run [func] inside it. /// @@ -96,7 +96,7 @@ _ServiceScope get _serviceScope => /// /// If an uncaught error occurs and [onError] is given, it will be called. The /// `onError` parameter can take the same values as `Zone.current.fork`. -Future fork(Future Function() func, {Function onError}) { +Future fork(Future Function() func, {Function? onError}) { var currentServiceScope = _serviceScope; currentServiceScope ??= _emptyServiceScope; return currentServiceScope._fork(func, onError: onError); @@ -109,7 +109,7 @@ Future fork(Future Function() func, {Function onError}) { /// /// The registered on-scope-exit functions are executed in reverse registration /// order. -void register(Object key, Object value, {ScopeExitCallback onScopeExit}) { +void register(Object key, Object value, {ScopeExitCallback? onScopeExit}) { var serviceScope = _serviceScope; if (serviceScope == null) { throw StateError('Not running inside a service scope zone.'); @@ -132,7 +132,7 @@ void registerScopeExitCallback(ScopeExitCallback onScopeExitCallback) { /// Look up an item by it's key in the currently active service scope. /// /// Returns `null` if there is no entry with the given key. -Object lookup(Object key) { +Object? lookup(Object key) { var serviceScope = _serviceScope; if (serviceScope == null) { throw StateError('Not running inside a service scope zone.'); @@ -157,7 +157,7 @@ class _ServiceScope { /// Looks up an object by it's service scope key - returns `null` if not /// found. - Object lookup(Object serviceScope) { + Object? lookup(Object serviceScope) { _ensureNotInDestroyingState(); var entry = _key2Values[serviceScope]; return entry != null ? entry.value : null; @@ -167,7 +167,7 @@ class _ServiceScope { /// /// Optionally calls a [onScopeExit] function once this service scope ends. void register(Object serviceScopeKey, Object value, - {ScopeExitCallback onScopeExit}) { + {ScopeExitCallback? onScopeExit}) { _ensureNotInCleaningState(); _ensureNotInDestroyingState(); @@ -191,13 +191,11 @@ class _ServiceScope { _ensureNotInCleaningState(); _ensureNotInDestroyingState(); - if (onScopeExitCallback != null) { - _registeredEntries.add(_RegisteredEntry(null, null, onScopeExitCallback)); - } + _registeredEntries.add(_RegisteredEntry(null, null, onScopeExitCallback)); } /// Start a new zone with a forked service scope. - Future _fork(Future Function() func, {Function onError}) { + Future _fork(Future Function() func, {Function? onError}) { _ensureNotInCleaningState(); _ensureNotInDestroyingState(); @@ -257,7 +255,7 @@ class _ServiceScope { _key2Values.remove(registeredEntry.key); } if (registeredEntry.scopeExitCallback != null) { - return Future.sync(registeredEntry.scopeExitCallback) + return Future.sync(registeredEntry.scopeExitCallback!) .catchError((e, s) => errors.add(e)); } else { return Future.value(); @@ -274,12 +272,12 @@ class _ServiceScope { } } -typedef ScopeExitCallback = Future Function(); +typedef ScopeExitCallback = FutureOr Function(); class _RegisteredEntry { - final Object key; - final Object value; - final ScopeExitCallback scopeExitCallback; + final Object? key; + final Object? value; + final ScopeExitCallback? scopeExitCallback; _RegisteredEntry(this.key, this.value, this.scopeExitCallback); } diff --git a/pkgs/gcloud/lib/src/datastore_impl.dart b/pkgs/gcloud/lib/src/datastore_impl.dart index 68838184..ae29f652 100644 --- a/pkgs/gcloud/lib/src/datastore_impl.dart +++ b/pkgs/gcloud/lib/src/datastore_impl.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 library gcloud.datastore_impl; @@ -19,8 +20,8 @@ class TransactionImpl implements datastore.Transaction { class DatastoreImpl implements datastore.Datastore { static const List SCOPES = [ - api.DatastoreApi.DatastoreScope, - api.DatastoreApi.CloudPlatformScope, + api.DatastoreApi.datastoreScope, + api.DatastoreApi.cloudPlatformScope, ]; final api.DatastoreApi _api; @@ -287,10 +288,9 @@ class DatastoreImpl implements datastore.Datastore { @override Future> allocateIds(List keys) { var request = api.AllocateIdsRequest(); - request - ..keys = keys.map((key) { - return _convertDatastore2ApiKey(key, enforceId: false); - }).toList(); + request.keys = keys.map((key) { + return _convertDatastore2ApiKey(key, enforceId: false); + }).toList(); return _api.projects.allocateIds(request, _project).then((response) { return response.keys.map(_convertApi2DatastoreKey).toList(); }, onError: _handleError); diff --git a/pkgs/gcloud/lib/src/db/annotations.dart b/pkgs/gcloud/lib/src/db/annotations.dart index ef6ead3a..4bcd037c 100644 --- a/pkgs/gcloud/lib/src/db/annotations.dart +++ b/pkgs/gcloud/lib/src/db/annotations.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 part of gcloud.db; diff --git a/pkgs/gcloud/lib/src/db/db.dart b/pkgs/gcloud/lib/src/db/db.dart index 81b17d0e..300c08cd 100644 --- a/pkgs/gcloud/lib/src/db/db.dart +++ b/pkgs/gcloud/lib/src/db/db.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 part of gcloud.db; diff --git a/pkgs/gcloud/lib/src/db/exceptions.dart b/pkgs/gcloud/lib/src/db/exceptions.dart index 11c48b1c..36dcb800 100644 --- a/pkgs/gcloud/lib/src/db/exceptions.dart +++ b/pkgs/gcloud/lib/src/db/exceptions.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 part of gcloud.db; diff --git a/pkgs/gcloud/lib/src/db/model_db.dart b/pkgs/gcloud/lib/src/db/model_db.dart index 4d7c44ee..9faa2dba 100644 --- a/pkgs/gcloud/lib/src/db/model_db.dart +++ b/pkgs/gcloud/lib/src/db/model_db.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 part of gcloud.db; diff --git a/pkgs/gcloud/lib/src/db/model_db_impl.dart b/pkgs/gcloud/lib/src/db/model_db_impl.dart index 03824413..719bbaac 100644 --- a/pkgs/gcloud/lib/src/db/model_db_impl.dart +++ b/pkgs/gcloud/lib/src/db/model_db_impl.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 part of gcloud.db; diff --git a/pkgs/gcloud/lib/src/db/models.dart b/pkgs/gcloud/lib/src/db/models.dart index 6739d648..539776ee 100644 --- a/pkgs/gcloud/lib/src/db/models.dart +++ b/pkgs/gcloud/lib/src/db/models.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 part of gcloud.db; diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart index 4f97ae30..36a9f6a0 100644 --- a/pkgs/gcloud/lib/src/pubsub_impl.dart +++ b/pkgs/gcloud/lib/src/pubsub_impl.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 part of gcloud.pubsub; diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index c3d85c10..adde37ea 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 part of gcloud.storage; @@ -220,7 +221,7 @@ class _BucketImpl implements Bucket { throw ArgumentError('length must have a value if offset is non-zero.'); } - var options = storage_api.DownloadOptions.FullMedia; + var options = storage_api.DownloadOptions.fullMedia; if (length != null) { if (length <= 0) { @@ -637,7 +638,7 @@ class _MediaUploadStreamSink implements StreamSink> { name: _objectName, predefinedAcl: _predefinedAcl, uploadMedia: media, - uploadOptions: storage_api.UploadOptions.Default) + uploadOptions: storage_api.UploadOptions.defaultOptions) .then((response) { _doneCompleter.complete(_ObjectInfoImpl(response)); }, onError: _completeError); @@ -651,7 +652,7 @@ class _MediaUploadStreamSink implements StreamSink> { name: _objectName, predefinedAcl: _predefinedAcl, uploadMedia: media, - uploadOptions: storage_api.UploadOptions.Resumable) + uploadOptions: storage_api.UploadOptions.resumable) .then((response) { _doneCompleter.complete(_ObjectInfoImpl(response)); }, onError: _completeError); diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index 4b3fa13f..f57492bc 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 /// This library provides access to Google Cloud Storage. /// @@ -505,7 +506,7 @@ abstract class BucketInfo { abstract class Storage { /// List of required OAuth2 scopes for Cloud Storage operation. static const List SCOPES = [ - storage_api.StorageApi.DevstorageFullControlScope + storage_api.StorageApi.devstorageFullControlScope ]; /// Initializes access to cloud storage. diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index efa251d6..ef13bacd 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,21 +1,21 @@ name: gcloud -version: 0.7.3 +version: 0.8.0-dev description: >- High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. homepage: https://github.com/dart-lang/gcloud environment: - sdk: '>=2.3.0 <3.0.0' + sdk: '>=2.12.0 <3.0.0' dependencies: - _discoveryapis_commons: '>=0.1.6+1 <0.3.0' - googleapis: '>=0.50.2 <1.0.0' - http: '>=0.11.0 <0.13.0' - meta: ^1.0.2 + _discoveryapis_commons: ^1.0.0 + googleapis: ^2.0.0 + http: '^0.13.0' + meta: ^1.3.0 dev_dependencies: - googleapis_auth: '>=0.2.3 <0.3.0' - http_parser: '>=2.0.0 <4.0.0' - mime: '>=0.9.0+3 <0.10.0' - pedantic: ^1.4.0 - test: ^1.5.1 + googleapis_auth: ^1.1.0 + http_parser: ^4.0.0 + mime: ^1.0.0 + pedantic: ^1.11.0 + test: ^1.16.0 diff --git a/pkgs/gcloud/test/common.dart b/pkgs/gcloud/test/common.dart index c79d75f5..73319652 100644 --- a/pkgs/gcloud/test/common.dart +++ b/pkgs/gcloud/test/common.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 import 'dart:async'; import 'dart:convert'; diff --git a/pkgs/gcloud/test/common_e2e.dart b/pkgs/gcloud/test/common_e2e.dart index 586b1fce..a2bb4293 100644 --- a/pkgs/gcloud/test/common_e2e.dart +++ b/pkgs/gcloud/test/common_e2e.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 library gcloud.test.common_e2e; diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index 90680a46..81f4c9e7 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 library datastore_test; diff --git a/pkgs/gcloud/test/datastore/e2e/utils.dart b/pkgs/gcloud/test/datastore/e2e/utils.dart index 1b5106a0..b3651da5 100644 --- a/pkgs/gcloud/test/datastore/e2e/utils.dart +++ b/pkgs/gcloud/test/datastore/e2e/utils.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 library raw_datastore_test_utils; diff --git a/pkgs/gcloud/test/datastore/error_matchers.dart b/pkgs/gcloud/test/datastore/error_matchers.dart index 085268b5..1f048b9d 100644 --- a/pkgs/gcloud/test/datastore/error_matchers.dart +++ b/pkgs/gcloud/test/datastore/error_matchers.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 library error_matchers; diff --git a/pkgs/gcloud/test/db/db_test.dart b/pkgs/gcloud/test/db/db_test.dart index e0f503ec..464ee352 100644 --- a/pkgs/gcloud/test/db/db_test.dart +++ b/pkgs/gcloud/test/db/db_test.dart @@ -1,6 +1,7 @@ // Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 library gcloud.db_test; diff --git a/pkgs/gcloud/test/db/e2e/db_test_impl.dart b/pkgs/gcloud/test/db/e2e/db_test_impl.dart index e84f8464..73edec97 100644 --- a/pkgs/gcloud/test/db/e2e/db_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/db_test_impl.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 library db_test; diff --git a/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart index 11a669fe..c4fa41f4 100644 --- a/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 library metamodel_test; diff --git a/pkgs/gcloud/test/db/model_db_test.dart b/pkgs/gcloud/test/db/model_db_test.dart index 8d798717..cd3941be 100644 --- a/pkgs/gcloud/test/db/model_db_test.dart +++ b/pkgs/gcloud/test/db/model_db_test.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 library gcloud.db_impl_test; diff --git a/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart b/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart index 6cd0cde8..3fc11b31 100644 --- a/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart +++ b/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 library gcloud.db.model_test.duplicate_fieldname; diff --git a/pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart b/pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart index 1859fdf9..dfd9cc68 100644 --- a/pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart +++ b/pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 library gcloud.db.model_test.duplicate_kind; diff --git a/pkgs/gcloud/test/db/model_dbs/duplicate_property.dart b/pkgs/gcloud/test/db/model_dbs/duplicate_property.dart index 6e770798..b1756cea 100644 --- a/pkgs/gcloud/test/db/model_dbs/duplicate_property.dart +++ b/pkgs/gcloud/test/db/model_dbs/duplicate_property.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 library gcloud.db.model_test.duplicate_property; diff --git a/pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart b/pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart index 3ffd27ca..858ce1e1 100644 --- a/pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart +++ b/pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 library gcloud.db.model_test.multiple_annotations; diff --git a/pkgs/gcloud/test/db/model_dbs/no_default_constructor.dart b/pkgs/gcloud/test/db/model_dbs/no_default_constructor.dart index 1c3b3d5e..a80c74d6 100644 --- a/pkgs/gcloud/test/db/model_dbs/no_default_constructor.dart +++ b/pkgs/gcloud/test/db/model_dbs/no_default_constructor.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 library gcloud.db.model_test.no_default_constructor; diff --git a/pkgs/gcloud/test/db/properties_test.dart b/pkgs/gcloud/test/db/properties_test.dart index 45f71d7b..2c97d014 100644 --- a/pkgs/gcloud/test/db/properties_test.dart +++ b/pkgs/gcloud/test/db/properties_test.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 library gcloud.db.properties_test; diff --git a/pkgs/gcloud/test/db_all_e2e_test.dart b/pkgs/gcloud/test/db_all_e2e_test.dart index 2a89eb54..720cc8eb 100644 --- a/pkgs/gcloud/test/db_all_e2e_test.dart +++ b/pkgs/gcloud/test/db_all_e2e_test.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 @Tags(['e2e']) @Timeout(Duration(seconds: 120)) diff --git a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart index 086030ae..d47a7b34 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart @@ -1,6 +1,7 @@ // Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 @Tags(['e2e']) @Timeout(Duration(seconds: 120)) diff --git a/pkgs/gcloud/test/pubsub/pubsub_test.dart b/pkgs/gcloud/test/pubsub/pubsub_test.dart index 546c736e..a597d771 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_test.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 import 'dart:async'; import 'dart:convert'; @@ -255,9 +256,11 @@ void main() { registerQueryMock(mock, 70, 50, 1); var api = PubSub(mock, PROJECT); - api.listTopics().listen((_) => throw 'Unexpected', - onDone: () => throw 'Unexpected') - ..cancel(); + api + .listTopics() + .listen((_) => throw 'Unexpected', + onDone: () => throw 'Unexpected') + .cancel(); }); test('cancel', () { @@ -654,9 +657,11 @@ void main() { registerQueryMock(mock, 70, 50, totalCalls: 1); var api = PubSub(mock, PROJECT); - api.listSubscriptions().listen((_) => throw 'Unexpected', - onDone: () => throw 'Unexpected') - ..cancel(); + api + .listSubscriptions() + .listen((_) => throw 'Unexpected', + onDone: () => throw 'Unexpected') + .cancel(); }); test('cancel', () { diff --git a/pkgs/gcloud/test/service_scope_test.dart b/pkgs/gcloud/test/service_scope_test.dart index 331e2bd9..45c09431 100644 --- a/pkgs/gcloud/test/service_scope_test.dart +++ b/pkgs/gcloud/test/service_scope_test.dart @@ -41,7 +41,8 @@ void main() { test('fork-callback-returns-non-future', () { // The closure passed to fork() must return a future. - expect(() => ss.fork(expectAsync0(() => null)), throwsA(isArgumentError)); + expect(() => ss.fork(expectAsync0(() => Future.value())), + throwsA(isArgumentError)); }); test('error-on-double-insert', () { @@ -209,8 +210,8 @@ void main() { })); expect(ss.lookup(rootKey), equals('root')); - Future spawnChild( - ownSubKey, otherSubKey, int i, ss.ScopeExitCallback cleanup) { + Future spawnChild(Object ownSubKey, Object otherSubKey, int i, + ss.ScopeExitCallback cleanup) { return ss.fork(expectAsync0(() => Future.sync(() { ss.register(subKey, 'fork$i'); ss.registerScopeExitCallback(cleanup); diff --git a/pkgs/gcloud/test/storage/e2e_test.dart b/pkgs/gcloud/test/storage/e2e_test.dart index 799782f4..c642c31d 100644 --- a/pkgs/gcloud/test/storage/e2e_test.dart +++ b/pkgs/gcloud/test/storage/e2e_test.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 @Tags(['e2e']) diff --git a/pkgs/gcloud/test/storage/storage_test.dart b/pkgs/gcloud/test/storage/storage_test.dart index 5b979a18..a10c5678 100644 --- a/pkgs/gcloud/test/storage/storage_test.dart +++ b/pkgs/gcloud/test/storage/storage_test.dart @@ -1,6 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// @dart=2.9 library gcloud.storage; @@ -270,9 +271,11 @@ void main() { test('immediate-cancel', () { withMockClient((mock, api) { - api.listBucketNames().listen((_) => throw 'Unexpected', - onDone: () => throw 'Unexpected') - ..cancel(); + api + .listBucketNames() + .listen((_) => throw 'Unexpected', + onDone: () => throw 'Unexpected') + .cancel(); }); }); @@ -1067,9 +1070,11 @@ void main() { test('immediate-cancel', () { withMockClient((mock, api) { var bucket = api.bucket(bucketName); - bucket.list().listen((_) => throw 'Unexpected', - onDone: () => throw 'Unexpected') - ..cancel(); + bucket + .list() + .listen((_) => throw 'Unexpected', + onDone: () => throw 'Unexpected') + .cancel(); }); }); From 447a3ad8d91713e623ed8c1743c8fbb8682ee372 Mon Sep 17 00:00:00 2001 From: Simon Binder Date: Sat, 20 Mar 2021 23:52:52 +0100 Subject: [PATCH 160/239] Fix tests --- pkgs/gcloud/.github/workflows/test-package.yml | 4 ++-- pkgs/gcloud/lib/service_scope.dart | 4 ---- pkgs/gcloud/lib/src/pubsub_impl.dart | 2 +- pkgs/gcloud/lib/src/storage_impl.dart | 4 ++-- pkgs/gcloud/test/pubsub/pubsub_test.dart | 2 +- pkgs/gcloud/test/service_scope_test.dart | 6 ------ 6 files changed, 6 insertions(+), 16 deletions(-) diff --git a/pkgs/gcloud/.github/workflows/test-package.yml b/pkgs/gcloud/.github/workflows/test-package.yml index b32b3903..9794c1a8 100644 --- a/pkgs/gcloud/.github/workflows/test-package.yml +++ b/pkgs/gcloud/.github/workflows/test-package.yml @@ -62,7 +62,7 @@ jobs: # Run tests on a matrix consisting of two dimensions: # 1. OS: ubuntu-latest, (macos-latest, windows-latest) - # 2. release: 2.3.0 + # 2. release: 2.12.0 test-legacy-sdk: needs: analyze runs-on: ${{ matrix.os }} @@ -71,7 +71,7 @@ jobs: matrix: # Add macos-latest and/or windows-latest if relevant for this package. os: [ubuntu-latest] - sdk: [2.3.0] + sdk: [2.12.0] steps: - uses: actions/checkout@v2 - uses: dart-lang/setup-dart@v0.3 diff --git a/pkgs/gcloud/lib/service_scope.dart b/pkgs/gcloud/lib/service_scope.dart index 538c5aef..8ed21730 100644 --- a/pkgs/gcloud/lib/service_scope.dart +++ b/pkgs/gcloud/lib/service_scope.dart @@ -203,10 +203,6 @@ class _ServiceScope { var map = {_ServiceScopeKey: serviceScope}; return runZoned(() { var f = func(); - if (f is! Future) { - throw ArgumentError('Forking a service scope zone requires the ' - 'callback function to return a future.'); - } return f.whenComplete(serviceScope._runScopeExitHandlers); // ignore: deprecated_member_use }, zoneValues: map, onError: onError); diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart index 36a9f6a0..9d3f9b27 100644 --- a/pkgs/gcloud/lib/src/pubsub_impl.dart +++ b/pkgs/gcloud/lib/src/pubsub_impl.dart @@ -31,7 +31,7 @@ class _PubSubImpl implements PubSub { } Future _createTopic(String name) { - return _api.projects.topics.create(null, name); + return _api.projects.topics.create(pubsub.Topic(), name); } Future _deleteTopic(String name) { diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index adde37ea..a8d3e7d9 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -107,8 +107,8 @@ class _StorageImpl implements Storage { var srcName = _AbsoluteName.parse(src); var destName = _AbsoluteName.parse(dest); return _api.objects - .copy(null, srcName.bucketName, srcName.objectName, destName.bucketName, - destName.objectName) + .copy(storage_api.Object(), srcName.bucketName, srcName.objectName, + destName.bucketName, destName.objectName) .then((_) => null); } diff --git a/pkgs/gcloud/test/pubsub/pubsub_test.dart b/pkgs/gcloud/test/pubsub/pubsub_test.dart index a597d771..3605557c 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_test.dart @@ -51,7 +51,7 @@ void main() { 'PUT', 'projects/$PROJECT/topics/test-topic', expectAsync1((http.Request request) { - expect(request.body, isEmpty); + expect(request.body, '{}'); return mock.respond(pubsub.Topic()..name = absoluteName); }, count: 2)); diff --git a/pkgs/gcloud/test/service_scope_test.dart b/pkgs/gcloud/test/service_scope_test.dart index 45c09431..dc90d510 100644 --- a/pkgs/gcloud/test/service_scope_test.dart +++ b/pkgs/gcloud/test/service_scope_test.dart @@ -39,12 +39,6 @@ void main() { })); }); - test('fork-callback-returns-non-future', () { - // The closure passed to fork() must return a future. - expect(() => ss.fork(expectAsync0(() => Future.value())), - throwsA(isArgumentError)); - }); - test('error-on-double-insert', () { // Ensure that inserting twice with the same key results in an error. return ss.fork(expectAsync0(() => Future.sync(() { From d20eb45d07bf46428d6f5b5674ab58c9660041c6 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Sun, 21 Mar 2021 14:43:56 -0700 Subject: [PATCH 161/239] Fix new lints (dart-lang/gcloud#110) --- pkgs/gcloud/lib/src/datastore_impl.dart | 8 ++++---- pkgs/gcloud/test/pubsub/pubsub_test.dart | 16 ++++++++++------ pkgs/gcloud/test/storage/storage_test.dart | 16 ++++++++++------ 3 files changed, 24 insertions(+), 16 deletions(-) diff --git a/pkgs/gcloud/lib/src/datastore_impl.dart b/pkgs/gcloud/lib/src/datastore_impl.dart index 68838184..102b0d6b 100644 --- a/pkgs/gcloud/lib/src/datastore_impl.dart +++ b/pkgs/gcloud/lib/src/datastore_impl.dart @@ -14,6 +14,7 @@ import '../datastore.dart' as datastore; class TransactionImpl implements datastore.Transaction { final String data; + TransactionImpl(this.data); } @@ -287,10 +288,9 @@ class DatastoreImpl implements datastore.Datastore { @override Future> allocateIds(List keys) { var request = api.AllocateIdsRequest(); - request - ..keys = keys.map((key) { - return _convertDatastore2ApiKey(key, enforceId: false); - }).toList(); + request.keys = keys.map((key) { + return _convertDatastore2ApiKey(key, enforceId: false); + }).toList(); return _api.projects.allocateIds(request, _project).then((response) { return response.keys.map(_convertApi2DatastoreKey).toList(); }, onError: _handleError); diff --git a/pkgs/gcloud/test/pubsub/pubsub_test.dart b/pkgs/gcloud/test/pubsub/pubsub_test.dart index 546c736e..d5ede424 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_test.dart @@ -255,9 +255,11 @@ void main() { registerQueryMock(mock, 70, 50, 1); var api = PubSub(mock, PROJECT); - api.listTopics().listen((_) => throw 'Unexpected', - onDone: () => throw 'Unexpected') - ..cancel(); + api + .listTopics() + .listen((_) => throw 'Unexpected', + onDone: () => throw 'Unexpected') + .cancel(); }); test('cancel', () { @@ -654,9 +656,11 @@ void main() { registerQueryMock(mock, 70, 50, totalCalls: 1); var api = PubSub(mock, PROJECT); - api.listSubscriptions().listen((_) => throw 'Unexpected', - onDone: () => throw 'Unexpected') - ..cancel(); + api + .listSubscriptions() + .listen((_) => throw 'Unexpected', + onDone: () => throw 'Unexpected') + .cancel(); }); test('cancel', () { diff --git a/pkgs/gcloud/test/storage/storage_test.dart b/pkgs/gcloud/test/storage/storage_test.dart index 5b979a18..bf6c4608 100644 --- a/pkgs/gcloud/test/storage/storage_test.dart +++ b/pkgs/gcloud/test/storage/storage_test.dart @@ -270,9 +270,11 @@ void main() { test('immediate-cancel', () { withMockClient((mock, api) { - api.listBucketNames().listen((_) => throw 'Unexpected', - onDone: () => throw 'Unexpected') - ..cancel(); + api + .listBucketNames() + .listen((_) => throw 'Unexpected', + onDone: () => throw 'Unexpected') + .cancel(); }); }); @@ -1067,9 +1069,11 @@ void main() { test('immediate-cancel', () { withMockClient((mock, api) { var bucket = api.bucket(bucketName); - bucket.list().listen((_) => throw 'Unexpected', - onDone: () => throw 'Unexpected') - ..cancel(); + bucket + .list() + .listen((_) => throw 'Unexpected', + onDone: () => throw 'Unexpected') + .cancel(); }); }); From 943cea5227ecc348ce6c545863c3dd8c94d7c498 Mon Sep 17 00:00:00 2001 From: Franklin Yow <58489007+franklinyow@users.noreply.github.com> Date: Fri, 2 Apr 2021 16:57:12 -0700 Subject: [PATCH 162/239] Update LICENSE Changes to comply with internal review --- pkgs/gcloud/LICENSE | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/LICENSE b/pkgs/gcloud/LICENSE index 5c60afea..000cd7be 100644 --- a/pkgs/gcloud/LICENSE +++ b/pkgs/gcloud/LICENSE @@ -1,4 +1,5 @@ -Copyright 2014, the Dart project authors. All rights reserved. +Copyright 2014, the Dart project authors. + Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: @@ -9,7 +10,7 @@ met: copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - * Neither the name of Google Inc. nor the names of its + * Neither the name of Google LLC nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. From b0203adcc80c47a478779eb4fa17741f2cabb500 Mon Sep 17 00:00:00 2001 From: pq Date: Mon, 26 Apr 2021 08:49:24 -0700 Subject: [PATCH 163/239] sort directives --- pkgs/gcloud/example/main.dart | 3 ++- pkgs/gcloud/lib/pubsub.dart | 2 +- pkgs/gcloud/lib/storage.dart | 5 ++--- pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart | 3 +-- pkgs/gcloud/test/datastore/error_matchers.dart | 2 +- pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart | 3 +-- pkgs/gcloud/test/db/properties_test.dart | 2 +- pkgs/gcloud/test/pubsub/pubsub_test.dart | 6 ++---- pkgs/gcloud/test/storage/e2e_test.dart | 2 +- pkgs/gcloud/test/storage/storage_test.dart | 6 ++---- 10 files changed, 14 insertions(+), 20 deletions(-) diff --git a/pkgs/gcloud/example/main.dart b/pkgs/gcloud/example/main.dart index 77dc6954..c634c654 100644 --- a/pkgs/gcloud/example/main.dart +++ b/pkgs/gcloud/example/main.dart @@ -4,8 +4,9 @@ import 'dart:async' show Future; import 'dart:convert' show utf8; -import 'package:googleapis_auth/auth_io.dart' as auth; + import 'package:gcloud/storage.dart'; +import 'package:googleapis_auth/auth_io.dart' as auth; // Note: The README.md contains more details on how to use this package. diff --git a/pkgs/gcloud/lib/pubsub.dart b/pkgs/gcloud/lib/pubsub.dart index 5f6645bf..cfa053e5 100644 --- a/pkgs/gcloud/lib/pubsub.dart +++ b/pkgs/gcloud/lib/pubsub.dart @@ -8,9 +8,9 @@ import 'dart:async'; import 'dart:collection'; import 'dart:convert'; import 'dart:io'; -import 'package:http/http.dart' as http; import 'package:googleapis/pubsub/v1.dart' as pubsub; +import 'package:http/http.dart' as http; import 'common.dart'; import 'service_scope.dart' as ss; diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index 4b3fa13f..1bcbe31e 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -52,10 +52,9 @@ import 'dart:async'; import 'dart:collection' show UnmodifiableListView, UnmodifiableMapView; import 'dart:convert'; -import 'package:http/http.dart' as http; - -import 'package:googleapis/storage/v1.dart' as storage_api; import 'package:_discoveryapis_commons/_discoveryapis_commons.dart' as commons; +import 'package:googleapis/storage/v1.dart' as storage_api; +import 'package:http/http.dart' as http; import 'common.dart'; import 'service_scope.dart' as ss; diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index 90680a46..229761c2 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -25,12 +25,11 @@ library datastore_test; /// $ gcloud datastore create-indexes index.yaml /// /// Now, wait for indexing done - import 'dart:async'; +import 'package:gcloud/common.dart'; import 'package:gcloud/datastore.dart'; import 'package:gcloud/src/datastore_impl.dart' as datastore_impl; -import 'package:gcloud/common.dart'; import 'package:http/http.dart'; import 'package:test/test.dart'; diff --git a/pkgs/gcloud/test/datastore/error_matchers.dart b/pkgs/gcloud/test/datastore/error_matchers.dart index 085268b5..44bdfb32 100644 --- a/pkgs/gcloud/test/datastore/error_matchers.dart +++ b/pkgs/gcloud/test/datastore/error_matchers.dart @@ -6,8 +6,8 @@ library error_matchers; import 'dart:io'; -import 'package:test/test.dart'; import 'package:gcloud/datastore.dart'; +import 'package:test/test.dart'; const isApplicationError = TypeMatcher(); diff --git a/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart index 11a669fe..054fc8a5 100644 --- a/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart @@ -6,12 +6,11 @@ library metamodel_test; import 'dart:async'; -import 'package:test/test.dart'; - import 'package:gcloud/datastore.dart'; import 'package:gcloud/datastore.dart' show Key, Partition; import 'package:gcloud/db.dart' as db; import 'package:gcloud/db/metamodel.dart'; +import 'package:test/test.dart'; List buildEntitiesWithDifferentNamespaces() { Key newKey(String namespace, String kind, int id) { diff --git a/pkgs/gcloud/test/db/properties_test.dart b/pkgs/gcloud/test/db/properties_test.dart index 45f71d7b..1bfb186f 100644 --- a/pkgs/gcloud/test/db/properties_test.dart +++ b/pkgs/gcloud/test/db/properties_test.dart @@ -6,8 +6,8 @@ library gcloud.db.properties_test; import 'dart:typed_data'; -import 'package:gcloud/db.dart'; import 'package:gcloud/datastore.dart' as datastore; +import 'package:gcloud/db.dart'; import 'package:test/test.dart'; void main() { diff --git a/pkgs/gcloud/test/pubsub/pubsub_test.dart b/pkgs/gcloud/test/pubsub/pubsub_test.dart index d5ede424..1cb8abfa 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_test.dart @@ -5,12 +5,10 @@ import 'dart:async'; import 'dart:convert'; -import 'package:http/http.dart' as http; -import 'package:test/test.dart'; - import 'package:gcloud/pubsub.dart'; - import 'package:googleapis/pubsub/v1.dart' as pubsub; +import 'package:http/http.dart' as http; +import 'package:test/test.dart'; import '../common.dart'; import '../common_e2e.dart'; diff --git a/pkgs/gcloud/test/storage/e2e_test.dart b/pkgs/gcloud/test/storage/e2e_test.dart index 799782f4..a8bea9de 100644 --- a/pkgs/gcloud/test/storage/e2e_test.dart +++ b/pkgs/gcloud/test/storage/e2e_test.dart @@ -8,8 +8,8 @@ library gcloud.storage; import 'dart:async'; -import 'package:googleapis/storage/v1.dart' as storage_api; import 'package:gcloud/storage.dart'; +import 'package:googleapis/storage/v1.dart' as storage_api; import 'package:test/test.dart'; import '../common_e2e.dart'; diff --git a/pkgs/gcloud/test/storage/storage_test.dart b/pkgs/gcloud/test/storage/storage_test.dart index bf6c4608..199c695e 100644 --- a/pkgs/gcloud/test/storage/storage_test.dart +++ b/pkgs/gcloud/test/storage/storage_test.dart @@ -7,12 +7,10 @@ library gcloud.storage; import 'dart:async'; import 'dart:convert'; -import 'package:http/http.dart' as http; -import 'package:test/test.dart'; - import 'package:gcloud/storage.dart'; - import 'package:googleapis/storage/v1.dart' as storage; +import 'package:http/http.dart' as http; +import 'package:test/test.dart'; import '../common.dart'; import '../common_e2e.dart'; From b66b49764ddd2d65cb22bc31dbba0218d2ce8e68 Mon Sep 17 00:00:00 2001 From: Simon Binder Date: Tue, 4 May 2021 22:47:59 +0200 Subject: [PATCH 164/239] Migrate storage.dart --- pkgs/gcloud/example/main.dart | 2 +- pkgs/gcloud/lib/src/storage_impl.dart | 229 ++++++++++----------- pkgs/gcloud/lib/storage.dart | 122 ++++++----- pkgs/gcloud/test/common.dart | 23 +-- pkgs/gcloud/test/common_e2e.dart | 1 - pkgs/gcloud/test/storage/e2e_test.dart | 29 ++- pkgs/gcloud/test/storage/storage_test.dart | 89 ++++---- 7 files changed, 237 insertions(+), 258 deletions(-) diff --git a/pkgs/gcloud/example/main.dart b/pkgs/gcloud/example/main.dart index bb2ab3c5..560a9e7a 100644 --- a/pkgs/gcloud/example/main.dart +++ b/pkgs/gcloud/example/main.dart @@ -1,7 +1,7 @@ // Copyright (c) 2019, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 + import 'dart:async' show Future; import 'dart:convert' show utf8; diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index a8d3e7d9..5bf4e775 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -1,7 +1,6 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 part of gcloud.storage; @@ -11,10 +10,12 @@ const String _DIRECTORY_DELIMITER = '/'; /// Representation of an absolute name consisting of bucket name and object /// name. class _AbsoluteName { - String bucketName; - String objectName; + final String bucketName; + final String objectName; + + _AbsoluteName._(this.bucketName, this.objectName); - _AbsoluteName.parse(String absoluteName) { + factory _AbsoluteName.parse(String absoluteName) { if (!absoluteName.startsWith(_ABSOLUTE_PREFIX)) { throw FormatException("Absolute name '$absoluteName' does not start " "with '$_ABSOLUTE_PREFIX'"); @@ -28,8 +29,10 @@ class _AbsoluteName { throw FormatException("Absolute name '$absoluteName' does not have " 'an object name'); } - bucketName = absoluteName.substring(_ABSOLUTE_PREFIX.length, index); - objectName = absoluteName.substring(index + 1); + final bucketName = absoluteName.substring(_ABSOLUTE_PREFIX.length, index); + final objectName = absoluteName.substring(index + 1); + + return _AbsoluteName._(bucketName, objectName); } } @@ -43,7 +46,7 @@ class _StorageImpl implements Storage { @override Future createBucket(String bucketName, - {PredefinedAcl predefinedAcl, Acl acl}) { + {PredefinedAcl? predefinedAcl, Acl? acl}) { var bucket = storage_api.Bucket()..name = bucketName; var predefinedName = predefinedAcl != null ? predefinedAcl._name : null; if (acl != null) { @@ -61,7 +64,7 @@ class _StorageImpl implements Storage { @override Bucket bucket(String bucketName, - {PredefinedAcl defaultPredefinedObjectAcl, Acl defaultObjectAcl}) { + {PredefinedAcl? defaultPredefinedObjectAcl, Acl? defaultObjectAcl}) { return _BucketImpl( this, bucketName, defaultPredefinedObjectAcl, defaultObjectAcl); } @@ -112,7 +115,8 @@ class _StorageImpl implements Storage { .then((_) => null); } - Future _listBuckets(int pageSize, String nextPageToken) { + Future _listBuckets( + int pageSize, String? nextPageToken) { return _api.buckets .list(project, maxResults: pageSize, pageToken: nextPageToken); } @@ -124,16 +128,16 @@ class _BucketInfoImpl implements BucketInfo { _BucketInfoImpl(this._bucket); @override - String get bucketName => _bucket.name; + String get bucketName => _bucket.name!; @override - String get etag => _bucket.etag; + String get etag => _bucket.etag!; @override - DateTime get created => _bucket.timeCreated; + DateTime get created => _bucket.timeCreated!; @override - String get id => _bucket.id; + String get id => _bucket.id!; @override Acl get acl => Acl._fromBucketAcl(_bucket); @@ -142,8 +146,8 @@ class _BucketInfoImpl implements BucketInfo { /// Bucket API implementation providing access to objects. class _BucketImpl implements Bucket { final storage_api.StorageApi _api; - final PredefinedAcl _defaultPredefinedObjectAcl; - final Acl _defaultObjectAcl; + final PredefinedAcl? _defaultPredefinedObjectAcl; + final Acl? _defaultObjectAcl; @override final String bucketName; @@ -158,11 +162,11 @@ class _BucketImpl implements Bucket { @override StreamSink> write(String objectName, - {int length, - ObjectMetadata metadata, - Acl acl, - PredefinedAcl predefinedAcl, - String contentType}) { + {int? length, + ObjectMetadata? metadata, + Acl? acl, + PredefinedAcl? predefinedAcl, + String? contentType}) { storage_api.Object object; if (metadata == null) { metadata = _ObjectMetadata(acl: acl, contentType: contentType); @@ -178,15 +182,15 @@ class _BucketImpl implements Bucket { object = objectMetadata._object; // If no predefined ACL is passed use the default (if any). - String predefinedName; + String? predefinedName; if (predefinedAcl != null || _defaultPredefinedObjectAcl != null) { - var predefined = predefinedAcl ?? _defaultPredefinedObjectAcl; + var predefined = predefinedAcl ?? _defaultPredefinedObjectAcl!; predefinedName = predefined._name; } // If no ACL is passed use the default (if any). if (object.acl == null && _defaultObjectAcl != null) { - object.acl = _defaultObjectAcl._toObjectAccessControlList(); + object.acl = _defaultObjectAcl!._toObjectAccessControlList(); } // Fill properties not passed in metadata. @@ -199,10 +203,10 @@ class _BucketImpl implements Bucket { @override Future writeBytes(String objectName, List bytes, - {ObjectMetadata metadata, - Acl acl, - PredefinedAcl predefinedAcl, - String contentType}) { + {ObjectMetadata? metadata, + Acl? acl, + PredefinedAcl? predefinedAcl, + String? contentType}) { var sink = write(objectName, length: bytes.length, metadata: metadata, @@ -214,7 +218,7 @@ class _BucketImpl implements Bucket { } @override - Stream> read(String objectName, {int offset, int length}) async* { + Stream> read(String objectName, {int? offset, int? length}) async* { offset ??= 0; if (offset != 0 && length == null) { @@ -254,7 +258,7 @@ class _BucketImpl implements Bucket { } @override - Stream list({String prefix, String delimiter}) { + Stream list({String? prefix, String? delimiter}) { delimiter ??= _DIRECTORY_DELIMITER; Future<_ObjectPageImpl> firstPage(int pageSize) async { final response = @@ -267,7 +271,7 @@ class _BucketImpl implements Bucket { @override Future> page( - {String prefix, String delimiter, int pageSize = 50}) async { + {String? prefix, String? delimiter, int pageSize = 50}) async { delimiter ??= _DIRECTORY_DELIMITER; final response = await _listObjects(bucketName, prefix, delimiter, pageSize, null); @@ -285,12 +289,12 @@ class _BucketImpl implements Bucket { if (md.contentType == null) { throw ArgumentError('Content-Type is required for update'); } - md._object.acl ??= _defaultObjectAcl._toObjectAccessControlList(); + md._object.acl ??= _defaultObjectAcl!._toObjectAccessControlList(); return _api.objects.update(object, bucketName, objectName); } - Future _listObjects(String bucketName, String prefix, - String delimiter, int pageSize, String nextPageToken) { + Future _listObjects(String bucketName, String? prefix, + String? delimiter, int pageSize, String? nextPageToken) { return _api.objects.list(bucketName, prefix: prefix, delimiter: delimiter, @@ -301,29 +305,27 @@ class _BucketImpl implements Bucket { class _BucketPageImpl implements Page { final _StorageImpl _storage; - final int _pageSize; - final String _nextPageToken; + final int? _pageSize; + final String? _nextPageToken; @override final List items; _BucketPageImpl(this._storage, this._pageSize, storage_api.Buckets response) - : items = List.filled( - response.items != null ? response.items.length : 0, null), - _nextPageToken = response.nextPageToken { - for (var i = 0; i < items.length; i++) { - items[i] = response.items[i].name; - } - } + : items = [ + for (final item in response.items ?? const []) + item.name! + ], + _nextPageToken = response.nextPageToken; @override bool get isLast => _nextPageToken == null; @override - Future> next({int pageSize}) { + Future> next({int? pageSize}) { if (isLast) return Future.value(null); pageSize ??= _pageSize; - return _storage._listBuckets(pageSize, _nextPageToken).then((response) { + return _storage._listBuckets(pageSize!, _nextPageToken).then((response) { return _BucketPageImpl(_storage, pageSize, response); }); } @@ -331,45 +333,34 @@ class _BucketPageImpl implements Page { class _ObjectPageImpl implements Page { final _BucketImpl _bucket; - final String _prefix; - final String _delimiter; - final int _pageSize; - final String _nextPageToken; + final String? _prefix; + final String? _delimiter; + final int? _pageSize; + final String? _nextPageToken; @override final List items; _ObjectPageImpl(this._bucket, this._prefix, this._delimiter, this._pageSize, storage_api.Objects response) - : items = List.filled( - (response.items != null ? response.items.length : 0) + - (response.prefixes != null ? response.prefixes.length : 0), - null), - _nextPageToken = response.nextPageToken { - var prefixes = 0; - if (response.prefixes != null) { - for (var i = 0; i < response.prefixes.length; i++) { - items[i] = BucketEntry._directory(response.prefixes[i]); - } - prefixes = response.prefixes.length; - } - if (response.items != null) { - for (var i = 0; i < response.items.length; i++) { - items[prefixes + i] = BucketEntry._object(response.items[i].name); - } - } - } + : items = [ + for (final item in response.prefixes ?? const []) + BucketEntry._directory(item), + for (final item in response.items ?? const []) + BucketEntry._object(item.name!) + ], + _nextPageToken = response.nextPageToken; @override bool get isLast => _nextPageToken == null; @override - Future> next({int pageSize}) { + Future> next({int? pageSize}) { if (isLast) return Future.value(null); pageSize ??= _pageSize; return _bucket ._listObjects( - _bucket.bucketName, _prefix, _delimiter, pageSize, _nextPageToken) + _bucket.bucketName, _prefix, _delimiter, pageSize!, _nextPageToken) .then((response) { return _ObjectPageImpl(_bucket, _prefix, _delimiter, pageSize, response); }); @@ -388,45 +379,43 @@ class _ObjectGenerationImpl implements ObjectGeneration { class _ObjectInfoImpl implements ObjectInfo { final storage_api.Object _object; final ObjectMetadata _metadata; - Uri _downloadLink; - ObjectGeneration _generation; + Uri? _downloadLink; + ObjectGeneration? _generation; _ObjectInfoImpl(storage_api.Object object) : _object = object, _metadata = _ObjectMetadata._(object); @override - String get name => _object.name; + String get name => _object.name!; @override - int get length => int.parse(_object.size); + int get length => int.parse(_object.size!); @override - DateTime get updated => _object.updated; + DateTime get updated => _object.updated!; @override - String get etag => _object.etag; + String get etag => _object.etag!; @override - List get md5Hash => base64.decode(_object.md5Hash); + List get md5Hash => base64.decode(_object.md5Hash!); @override int get crc32CChecksum { - var list = base64.decode(_object.crc32c); + var list = base64.decode(_object.crc32c!); return (list[3] << 24) | (list[2] << 16) | (list[1] << 8) | list[0]; } @override Uri get downloadLink { - _downloadLink ??= Uri.parse(_object.mediaLink); - return _downloadLink; + return _downloadLink ??= Uri.parse(_object.mediaLink!); } @override ObjectGeneration get generation { - _generation ??= _ObjectGenerationImpl( - _object.generation, int.parse(_object.metageneration)); - return _generation; + return _generation ??= _ObjectGenerationImpl( + _object.generation!, int.parse(_object.metageneration!)); } /// Additional metadata. @@ -436,18 +425,18 @@ class _ObjectInfoImpl implements ObjectInfo { class _ObjectMetadata implements ObjectMetadata { final storage_api.Object _object; - Acl _cachedAcl; - ObjectGeneration _cachedGeneration; - Map _cachedCustom; + Acl? _cachedAcl; + ObjectGeneration? _cachedGeneration; + Map? _cachedCustom; _ObjectMetadata( - {Acl acl, - String contentType, - String contentEncoding, - String cacheControl, - String contentDisposition, - String contentLanguage, - Map custom}) + {Acl? acl, + String? contentType, + String? contentEncoding, + String? cacheControl, + String? contentDisposition, + String? contentLanguage, + Map? custom}) : _object = storage_api.Object() { _object.acl = acl != null ? acl._toObjectAccessControlList() : null; _object.contentType = contentType; @@ -461,48 +450,48 @@ class _ObjectMetadata implements ObjectMetadata { _ObjectMetadata._(this._object); @override - Acl get acl { + Acl? get acl { _cachedAcl ??= Acl._fromObjectAcl(_object); return _cachedAcl; } @override - String get contentType => _object.contentType; + String? get contentType => _object.contentType; @override - String get contentEncoding => _object.contentEncoding; + String? get contentEncoding => _object.contentEncoding; @override - String get cacheControl => _object.cacheControl; + String? get cacheControl => _object.cacheControl; @override - String get contentDisposition => _object.contentDisposition; + String? get contentDisposition => _object.contentDisposition; @override - String get contentLanguage => _object.contentLanguage; + String? get contentLanguage => _object.contentLanguage; - ObjectGeneration get generation { - _cachedGeneration ??= - ObjectGeneration(_object.generation, int.parse(_object.metageneration)); + ObjectGeneration? get generation { + _cachedGeneration ??= ObjectGeneration( + _object.generation!, int.parse(_object.metageneration!)); return _cachedGeneration; } @override - Map get custom { + Map? get custom { if (_object.metadata == null) return null; - _cachedCustom ??= UnmodifiableMapView(_object.metadata); + _cachedCustom ??= UnmodifiableMapView(_object.metadata!); return _cachedCustom; } @override ObjectMetadata replace( - {Acl acl, - String contentType, - String contentEncoding, - String cacheControl, - String contentDisposition, - String contentLanguage, - Map custom}) { + {Acl? acl, + String? contentType, + String? contentEncoding, + String? cacheControl, + String? contentDisposition, + String? contentLanguage, + Map? custom}) { return _ObjectMetadata( acl: acl ?? this.acl, contentType: contentType ?? this.contentType, @@ -523,20 +512,20 @@ class _MediaUploadStreamSink implements StreamSink> { final String _bucketName; final String _objectName; final storage_api.Object _object; - final String _predefinedAcl; - final int _length; + final String? _predefinedAcl; + final int? _length; final int _maxNormalUploadLength; int _bufferLength = 0; final List> buffer = >[]; final _controller = StreamController>(sync: true); - StreamSubscription _subscription; - StreamController> _resumableController; + late StreamSubscription _subscription; + late StreamController> _resumableController; final _doneCompleter = Completer(); static const int _STATE_LENGTH_KNOWN = 0; static const int _STATE_PROBING_LENGTH = 1; static const int _STATE_DECIDED_RESUMABLE = 2; - int _state; + int? _state; _MediaUploadStreamSink(this._api, this._bucketName, this._objectName, this._object, this._predefinedAcl, this._length, @@ -545,7 +534,7 @@ class _MediaUploadStreamSink implements StreamSink> { // If the length is known in advance decide on the upload strategy // immediately _state = _STATE_LENGTH_KNOWN; - if (_length <= _maxNormalUploadLength) { + if (_length! <= _maxNormalUploadLength) { _startNormalUpload(_controller.stream, _length); } else { _startResumableUpload(_controller.stream, _length); @@ -565,7 +554,7 @@ class _MediaUploadStreamSink implements StreamSink> { } @override - void addError(errorEvent, [StackTrace stackTrace]) { + void addError(errorEvent, [StackTrace? stackTrace]) { _controller.addError(errorEvent, stackTrace); } @@ -612,7 +601,7 @@ class _MediaUploadStreamSink implements StreamSink> { } } - void _onError(e, StackTrace s) { + void _onError(Object e, StackTrace s) { // If still deciding on the strategy complete with error. Otherwise // forward the error for default processing. if (_state == _STATE_PROBING_LENGTH) { @@ -622,7 +611,7 @@ class _MediaUploadStreamSink implements StreamSink> { } } - void _completeError(e, StackTrace s) { + void _completeError(Object e, StackTrace s) { if (_state != _STATE_LENGTH_KNOWN) { // Always cancel subscription on error. _subscription.cancel(); @@ -630,7 +619,7 @@ class _MediaUploadStreamSink implements StreamSink> { _doneCompleter.completeError(e, s); } - void _startNormalUpload(Stream> stream, int length) { + void _startNormalUpload(Stream> stream, int? length) { var contentType = _object.contentType ?? 'application/octet-stream'; var media = storage_api.Media(stream, length, contentType: contentType); _api.objects @@ -644,7 +633,7 @@ class _MediaUploadStreamSink implements StreamSink> { }, onError: _completeError); } - void _startResumableUpload(Stream> stream, int length) { + void _startResumableUpload(Stream> stream, int? length) { var contentType = _object.contentType ?? 'application/octet-stream'; var media = storage_api.Media(stream, length, contentType: contentType); _api.objects diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index 2964e8e0..b2073168 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -1,7 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 + /// This library provides access to Google Cloud Storage. /// @@ -110,7 +110,7 @@ int _jenkinsHash(List e) { /// The access controls are described by [AclEntry] objects. class Acl { final List _entries; - int _cachedHashCode; + int? _cachedHashCode; // todo: late final /// The entries in the ACL. List get entries => UnmodifiableListView(_entries); @@ -119,28 +119,21 @@ class Acl { Acl(Iterable entries) : _entries = List.from(entries); Acl._fromBucketAcl(storage_api.Bucket bucket) - : _entries = - List.filled(bucket.acl == null ? 0 : bucket.acl.length, null) { - if (bucket.acl != null) { - for (var i = 0; i < bucket.acl.length; i++) { - _entries[i] = AclEntry(_aclScopeFromEntity(bucket.acl[i].entity), - _aclPermissionFromRole(bucket.acl[i].role)); - } - } - } + : _entries = [ + for (final control + in bucket.acl ?? const []) + AclEntry(_aclScopeFromEntity(control.entity!), + _aclPermissionFromRole(control.role)) + ]; Acl._fromObjectAcl(storage_api.Object object) - : _entries = - List.filled(object.acl == null ? 0 : object.acl.length, null) { - if (object.acl != null) { - for (var i = 0; i < object.acl.length; i++) { - _entries[i] = AclEntry(_aclScopeFromEntity(object.acl[i].entity), - _aclPermissionFromRole(object.acl[i].role)); - } - } - } + : _entries = [ + for (final entry in object.acl ?? []) + AclEntry(_aclScopeFromEntity(entry.entity!), + _aclPermissionFromRole(entry.role)), + ]; - AclScope _aclScopeFromEntity(String entity) { + static AclScope _aclScopeFromEntity(String entity) { if (entity.startsWith('user-')) { var tmp = entity.substring(5); var at = tmp.indexOf('@'); @@ -167,7 +160,7 @@ class Acl { return OpaqueScope(entity); } - AclPermission _aclPermissionFromRole(String role) { + static AclPermission _aclPermissionFromRole(String? role) { if (role == 'READER') return AclPermission.READ; if (role == 'WRITER') return AclPermission.WRITE; if (role == 'OWNER') return AclPermission.FULL_CONTROL; @@ -211,7 +204,7 @@ class Acl { class AclEntry { final AclScope scope; final AclPermission permission; - int _cachedHashCode; + int? _cachedHashCode; // todo: Late final AclEntry(this.scope, this.permission); @@ -256,7 +249,7 @@ class AclEntry { /// /// See https://cloud.google.com/storage/docs/accesscontrol for more details. abstract class AclScope { - int _cachedHashCode; + int? _cachedHashCode; // todo: late final /// ACL type for scope representing a Google Storage id. static const int _TYPE_STORAGE_ID = 0; @@ -386,7 +379,8 @@ class OpaqueScope extends AclScope { /// ACL scope for a all authenticated users. class AllAuthenticatedScope extends AclScope { - AllAuthenticatedScope() : super._(AclScope._TYPE_ALL_AUTHENTICATED, null); + AllAuthenticatedScope() + : super._(AclScope._TYPE_ALL_AUTHENTICATED, 'invalid'); @override String get _storageEntity => 'allAuthenticatedUsers'; @@ -394,7 +388,7 @@ class AllAuthenticatedScope extends AclScope { /// ACL scope for a all users. class AllUsersScope extends AclScope { - AllUsersScope() : super._(AclScope._TYPE_ALL_USERS, null); + AllUsersScope() : super._(AclScope._TYPE_ALL_USERS, 'invalid'); @override String get _storageEntity => 'allUsers'; @@ -521,7 +515,7 @@ abstract class Storage { /// /// Returns a [Future] which completes when the bucket has been created. Future createBucket(String bucketName, - {PredefinedAcl predefinedAcl, Acl acl}); + {PredefinedAcl? predefinedAcl, Acl? acl}); /// Delete a cloud storage bucket. /// @@ -549,7 +543,7 @@ abstract class Storage { /// /// Returns a `Bucket` instance. Bucket bucket(String bucketName, - {PredefinedAcl defaultPredefinedObjectAcl, Acl defaultObjectAcl}); + {PredefinedAcl? defaultPredefinedObjectAcl, Acl? defaultObjectAcl}); /// Check whether a cloud storage bucket exists. /// @@ -568,7 +562,7 @@ abstract class Storage { /// List names of all buckets. /// /// Returns a [Stream] of bucket names. - Stream listBucketNames(); + Stream listBucketNames(); /// Start paging through names of all buckets. /// @@ -576,7 +570,7 @@ abstract class Storage { /// /// Returns a [Future] which completes with a `Page` object holding the /// first page. Use the `Page` object to move to the next page of buckets. - Future> pageBucketNames({int pageSize = 50}); + Future> pageBucketNames({int pageSize = 50}); /// Copy an object. /// @@ -639,48 +633,48 @@ class ObjectGeneration { /// Access to object metadata. abstract class ObjectMetadata { factory ObjectMetadata( - {Acl acl, - String contentType, - String contentEncoding, - String cacheControl, - String contentDisposition, - String contentLanguage, - Map custom}) = _ObjectMetadata; + {Acl? acl, + String? contentType, + String? contentEncoding, + String? cacheControl, + String? contentDisposition, + String? contentLanguage, + Map? custom}) = _ObjectMetadata; /// ACL. - Acl get acl; + Acl? get acl; /// `Content-Type` for this object. - String get contentType; + String? get contentType; /// `Content-Encoding` for this object. - String get contentEncoding; + String? get contentEncoding; /// `Cache-Control` for this object. - String get cacheControl; + String? get cacheControl; /// `Content-Disposition` for this object. - String get contentDisposition; + String? get contentDisposition; /// `Content-Language` for this object. /// /// The value of this field must confirm to RFC 3282. - String get contentLanguage; + String? get contentLanguage; /// Custom metadata. - Map get custom; + Map? get custom; /// Create a copy of this object with some values replaced. /// // TODO: This cannot be used to set values to null. ObjectMetadata replace( - {Acl acl, - String contentType, - String contentEncoding, - String cacheControl, - String contentDisposition, - String contentLanguage, - Map custom}); + {Acl? acl, + String? contentType, + String? contentEncoding, + String? cacheControl, + String? contentDisposition, + String? contentLanguage, + Map? custom}); } /// Result from List objects in a bucket. @@ -740,11 +734,11 @@ abstract class Bucket { /// The object content has been written the `StreamSink` completes with /// an `ObjectInfo` instance with the information on the object created. StreamSink> write(String objectName, - {int length, - ObjectMetadata metadata, - Acl acl, - PredefinedAcl predefinedAcl, - String contentType}); + {int? length, + ObjectMetadata? metadata, + Acl? acl, + PredefinedAcl? predefinedAcl, + String? contentType}); /// Create an new object in the bucket with specified content. /// @@ -755,10 +749,10 @@ abstract class Bucket { /// Returns a `Future` which completes with an `ObjectInfo` instance when /// the object is written. Future writeBytes(String name, List bytes, - {ObjectMetadata metadata, - Acl acl, - PredefinedAcl predefinedAcl, - String contentType}); + {ObjectMetadata? metadata, + Acl? acl, + PredefinedAcl? predefinedAcl, + String? contentType}); /// Read object content as byte stream. /// @@ -768,7 +762,7 @@ abstract class Bucket { /// /// If there is a problem accessing the file, a [DetailedApiRequestError] is /// thrown. - Stream> read(String objectName, {int offset, int length}); + Stream> read(String objectName, {int? offset, int? length}); /// Lookup object metadata. /// @@ -797,7 +791,7 @@ abstract class Bucket { /// /// Returns a [Stream] of [BucketEntry]. Each element of the stream /// represents either an object or a directory component. - Stream list({String prefix, String delimiter}); + Stream list({String? prefix, String? delimiter}); /// Start paging through objects in the bucket. /// @@ -807,6 +801,6 @@ abstract class Bucket { /// /// Returns a `Future` which completes with a `Page` object holding the /// first page. Use the `Page` object to move to the next page. - Future> page( - {String prefix, String delimiter, int pageSize = 50}); + Future> page( + {String? prefix, String? delimiter, int pageSize = 50}); } diff --git a/pkgs/gcloud/test/common.dart b/pkgs/gcloud/test/common.dart index 73319652..5a3f77f9 100644 --- a/pkgs/gcloud/test/common.dart +++ b/pkgs/gcloud/test/common.dart @@ -1,7 +1,6 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 import 'dart:async'; import 'dart:convert'; @@ -26,7 +25,7 @@ class MockClient extends http.BaseClient { final Uri rootUri; Map> mocks = {}; - http_testing.MockClient client; + late http_testing.MockClient client; MockClient(String hostname, String rootPath) : hostname = hostname, @@ -71,8 +70,8 @@ class MockClient extends http.BaseClient { throw 'No mock handler for method ${request.method} found. ' 'Request URL was: ${request.url}'; } - http_testing.MockClientHandler mockHandler; - mocks[request.method] + http_testing.MockClientHandler? mockHandler; + mocks[request.method]! .forEach((pattern, http_testing.MockClientHandler handler) { if (pattern.matchAsPrefix(path) != null) { mockHandler = handler; @@ -82,7 +81,7 @@ class MockClient extends http.BaseClient { throw 'No mock handler for method ${request.method} and path ' '[$path] found. Request URL was: ${request.url}'; } - return mockHandler(request); + return mockHandler!(request); } @override @@ -122,8 +121,8 @@ class MockClient extends http.BaseClient { if (range != null) { var match = _bytesHeaderRegexp.allMatches(range).single; - var start = int.parse(match[1]); - var end = int.parse(match[2]); + var start = int.parse(match[1]!); + var end = int.parse(match[2]!); myBytes = bytes.sublist(start, end + 1); headers['content-length'] = myBytes.length.toString(); @@ -145,19 +144,19 @@ class MockClient extends http.BaseClient { var completer = Completer(); var contentType = - http_parser.MediaType.parse(request.headers['content-type']); + http_parser.MediaType.parse(request.headers['content-type']!); expect(contentType.mimeType, 'multipart/related'); var boundary = contentType.parameters['boundary']; var partCount = 0; - String json; + String? json; Stream.fromIterable([ request.bodyBytes, [13, 10] ]) - .transform(mime.MimeMultipartTransformer(boundary)) + .transform(mime.MimeMultipartTransformer(boundary!)) .listen(((mime.MimeMultipart mimeMultipart) { - var contentType = mimeMultipart.headers['content-type']; + var contentType = mimeMultipart.headers['content-type']!; partCount++; if (partCount == 1) { // First part in the object JSON. @@ -173,7 +172,7 @@ class MockClient extends http.BaseClient { .fold('', (p, e) => '$p$e') .then(base64.decode) .then((bytes) { - completer.complete(NormalMediaUpload(json, bytes, contentType)); + completer.complete(NormalMediaUpload(json!, bytes, contentType)); }); } else { // Exactly two parts expected. diff --git a/pkgs/gcloud/test/common_e2e.dart b/pkgs/gcloud/test/common_e2e.dart index a2bb4293..586b1fce 100644 --- a/pkgs/gcloud/test/common_e2e.dart +++ b/pkgs/gcloud/test/common_e2e.dart @@ -1,7 +1,6 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 library gcloud.test.common_e2e; diff --git a/pkgs/gcloud/test/storage/e2e_test.dart b/pkgs/gcloud/test/storage/e2e_test.dart index ac2122cc..a7b1026a 100644 --- a/pkgs/gcloud/test/storage/e2e_test.dart +++ b/pkgs/gcloud/test/storage/e2e_test.dart @@ -1,8 +1,6 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 - @Tags(['e2e']) library gcloud.storage; @@ -30,9 +28,10 @@ final bytesResumableUpload = List.generate(minResumableUpload, (e) => e & 255); void main() { - Storage storage; - String testBucketName; - Bucket testBucket; + var didSetUp = false; + late Storage storage; + late String testBucketName; + late Bucket testBucket; setUpAll(() { return withAuthClient(Storage.SCOPES, (String project, httpClient) { @@ -44,13 +43,14 @@ void main() { // Create a shared bucket for all object tests. return storage.createBucket(testBucketName).then((_) { testBucket = storage.bucket(testBucketName); + didSetUp = true; }); }); }); tearDownAll(() async { // Don't cleanup if setup failed - if (storage == null) { + if (!didSetUp) { return; } // Deleting a bucket relies on eventually consistent behaviour, hence @@ -125,9 +125,8 @@ void main() { return withTestBucket((Bucket bucket) { return bucket.writeBytes('test', bytes).then(expectAsync1((info) { expect(info, isNotNull); - return bucket - .read('test') - .fold([], (p, e) => p..addAll(e)).then(expectAsync1((result) { + return bucket.read('test').fold>( + [], (p, e) => p..addAll(e)).then(expectAsync1((result) { expect(result, bytes); return bucket.delete('test').then(expectAsync1((result) { expect(result, isNull); @@ -140,7 +139,7 @@ void main() { return Future.forEach([ () => test('test-1', [1, 2, 3]), () => test('test-2', bytesResumableUpload) - ], (f) => f().then(expectAsync1((_) {}))); + ], (Function f) => f().then(expectAsync1((_) {}))); }); test('create-with-predefined-acl-delete', () { @@ -155,7 +154,7 @@ void main() { var acl = info.metadata.acl; expect(info.name, objectName); expect(info.etag, isNotNull); - expect(acl.entries.length, expectedLength); + expect(acl!.entries.length, expectedLength); return bucket.delete(objectName).then(expectAsync1((result) { expect(result, isNull); })); @@ -170,7 +169,7 @@ void main() { () => test('test-4', PredefinedAcl.publicRead, 2), () => test('test-5', PredefinedAcl.bucketOwnerFullControl, 2), () => test('test-6', PredefinedAcl.bucketOwnerRead, 2) - ], (f) => f().then(expectAsync1((_) {}))); + ], (Function f) => f().then(expectAsync1((_) {}))); }); }, skip: 'unable to test with uniform buckets enforced for account'); @@ -185,7 +184,7 @@ void main() { var acl = info.metadata.acl; expect(info.name, objectName); expect(info.etag, isNotNull); - expect(acl.entries.length, expectedLength); + expect(acl!.entries.length, expectedLength); return bucket.delete(objectName).then(expectAsync1((result) { expect(result, isNull); })); @@ -219,7 +218,7 @@ void main() { () => test('test-2', acl2, acl2.entries.length + 1), () => test('test-3', acl3, acl3.entries.length + 1), () => test('test-4', acl4, acl4.entries.length + 1) - ], (f) => f().then(expectAsync1((_) {}))); + ], (Function f) => f().then(expectAsync1((_) {}))); }); }, skip: 'unable to test with uniform buckets enforced for account'); @@ -268,7 +267,7 @@ void main() { () => test('test-2', metadata2, [65, 66, 67]), () => test('test-3', metadata1, bytesResumableUpload), () => test('test-4', metadata2, bytesResumableUpload) - ], (f) => f().then(expectAsync1((_) {}))); + ], (Function f) => f().then(expectAsync1((_) {}))); }); }); }); diff --git a/pkgs/gcloud/test/storage/storage_test.dart b/pkgs/gcloud/test/storage/storage_test.dart index 5827303c..58a63bf4 100644 --- a/pkgs/gcloud/test/storage/storage_test.dart +++ b/pkgs/gcloud/test/storage/storage_test.dart @@ -1,7 +1,6 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 library gcloud.storage; @@ -111,16 +110,16 @@ void main() { expect(requestBucket.name, bucketName); expect(request.url.queryParameters['predefinedAcl'], isNull); expect(requestBucket.acl, isNotNull); - expect(requestBucket.acl.length, count + 1); - expect(requestBucket.acl[0].entity, 'user-user@example.com'); - expect(requestBucket.acl[0].role, 'OWNER'); + expect(requestBucket.acl!.length, count + 1); + expect(requestBucket.acl![0].entity, 'user-user@example.com'); + expect(requestBucket.acl![0].role, 'OWNER'); if (count > 0) { - expect(requestBucket.acl[1].entity, 'group-group@example.com'); - expect(requestBucket.acl[1].role, 'WRITER'); + expect(requestBucket.acl![1].entity, 'group-group@example.com'); + expect(requestBucket.acl![1].role, 'WRITER'); } if (count > 2) { - expect(requestBucket.acl[2].entity, 'domain-example.com'); - expect(requestBucket.acl[2].role, 'READER'); + expect(requestBucket.acl![2].entity, 'domain-example.com'); + expect(requestBucket.acl![2].role, 'READER'); } count++; return mock.respond(storage.Bucket()..name = bucketName); @@ -173,16 +172,16 @@ void main() { expect(request.url.queryParameters['predefinedAcl'], predefined[predefinedIndex][1]); expect(requestBucket.acl, isNotNull); - expect(requestBucket.acl.length, aclIndex + 1); - expect(requestBucket.acl[0].entity, 'user-user@example.com'); - expect(requestBucket.acl[0].role, 'OWNER'); + expect(requestBucket.acl!.length, aclIndex + 1); + expect(requestBucket.acl![0].entity, 'user-user@example.com'); + expect(requestBucket.acl![0].role, 'OWNER'); if (aclIndex > 0) { - expect(requestBucket.acl[1].entity, 'group-group@example.com'); - expect(requestBucket.acl[1].role, 'WRITER'); + expect(requestBucket.acl![1].entity, 'group-group@example.com'); + expect(requestBucket.acl![1].role, 'WRITER'); } if (aclIndex > 2) { - expect(requestBucket.acl[2].entity, 'domain-example.com'); - expect(requestBucket.acl[2].role, 'READER'); + expect(requestBucket.acl![2].entity, 'domain-example.com'); + expect(requestBucket.acl![2].role, 'READER'); } count++; return mock.respond(storage.Bucket()..name = bucketName); @@ -458,7 +457,7 @@ void main() { test('write-short-error', () { withMockClient((MockClient mock, api) { - Future test(int length) { + Future test(int? length) { mock.clear(); mock.registerUpload('POST', 'b/$bucketName/o', expectAsync1((request) { @@ -467,14 +466,14 @@ void main() { var bucket = api.bucket(bucketName); var sink = bucket.write(bucketName, length: length); - sink.done.then((_) => throw 'Unexpected').catchError( + sink.done.then((_) => throw 'Unexpected').catchError( expectAsync1(expectNotNull), test: testDetailedApiError); sink.done.catchError(expectAsync1(expectNotNull), test: testDetailedApiError); return Stream.fromIterable([bytesNormalUpload]) .pipe(sink) - .then((_) => throw 'Unexpected') + .then((_) => throw 'Unexpected') .catchError(expectAsync1(expectNotNull), test: testDetailedApiError); } @@ -504,12 +503,12 @@ void main() { var bucket = api.bucket(bucketName); var sink = bucket.write(bucketName); - sink.done.then((_) => throw 'Unexpected').catchError( + sink.done.then((_) => throw 'Unexpected').catchError( expectAsync1(expectNotNull), test: testDetailedApiError); return Stream.fromIterable([bytesResumableUpload]) .pipe(sink) - .then((_) => throw 'Unexpected') + .then((_) => throw 'Unexpected') .catchError(expectAsync1(expectNotNull), test: testDetailedApiError); } @@ -534,12 +533,12 @@ void main() { var bucket = api.bucket(bucketName); var sink = bucket.write(bucketName, length: length); - sink.done.then((_) => throw 'Unexpected').catchError( + sink.done.then((_) => throw 'Unexpected').catchError( expectAsync1(expectNotNull), test: (e) => e is String || e is storage.ApiRequestError); return Stream>.fromIterable(data) .pipe(sink) - .then((_) => throw 'Unexpected') + .then((_) => throw 'Unexpected') .catchError(expectAsync1(expectNotNull), test: (e) => e is String || e is storage.ApiRequestError); } @@ -557,7 +556,7 @@ void main() { var bucket = api.bucket(bucketName); var sink = bucket.write(bucketName); sink.done - .then((_) => throw 'Unexpected') + .then((_) => throw 'Unexpected') .catchError(expectAsync1(expectNotNull), test: testArgumentError); var stream = Stream.fromIterable([ [1, 2, 3] @@ -589,7 +588,7 @@ void main() { var bucket = api.bucket(bucketName); var sink = bucket.write(bucketName); sink.done - .then((_) => throw 'Unexpected') + .then((_) => throw 'Unexpected') .catchError(expectAsync1(expectNotNull), test: testArgumentError); var stream = Stream.fromIterable([bytesResumableUpload]); sink.addStream(stream).then((_) { @@ -794,16 +793,16 @@ void main() { expect(mediaUpload.contentType, 'application/octet-stream'); expect(request.url.queryParameters['predefinedAcl'], isNull); expect(object.acl, isNotNull); - expect(object.acl.length, count + 1); - expect(object.acl[0].entity, 'user-user@example.com'); - expect(object.acl[0].role, 'OWNER'); + expect(object.acl!.length, count + 1); + expect(object.acl![0].entity, 'user-user@example.com'); + expect(object.acl![0].role, 'OWNER'); if (count > 0) { - expect(object.acl[1].entity, 'group-group@example.com'); - expect(object.acl[1].role, 'OWNER'); + expect(object.acl![1].entity, 'group-group@example.com'); + expect(object.acl![1].role, 'OWNER'); } if (count > 2) { - expect(object.acl[2].entity, 'domain-example.com'); - expect(object.acl[2].role, 'READER'); + expect(object.acl![2].entity, 'domain-example.com'); + expect(object.acl![2].role, 'READER'); } count++; return mock.respond(storage.Object()..name = objectName); @@ -865,16 +864,16 @@ void main() { expect(request.url.queryParameters['predefinedAcl'], predefined[predefinedIndex][1]); expect(object.acl, isNotNull); - expect(object.acl.length, aclIndex + 1); - expect(object.acl[0].entity, 'user-user@example.com'); - expect(object.acl[0].role, 'OWNER'); + expect(object.acl!.length, aclIndex + 1); + expect(object.acl![0].entity, 'user-user@example.com'); + expect(object.acl![0].role, 'OWNER'); if (aclIndex > 0) { - expect(object.acl[1].entity, 'group-group@example.com'); - expect(object.acl[1].role, 'OWNER'); + expect(object.acl![1].entity, 'group-group@example.com'); + expect(object.acl![1].role, 'OWNER'); } if (aclIndex > 2) { - expect(object.acl[2].entity, 'domain-example.com'); - expect(object.acl[2].role, 'READER'); + expect(object.acl![2].entity, 'domain-example.com'); + expect(object.acl![2].role, 'READER'); } count++; return mock.respond(storage.Object()..name = objectName); @@ -1040,13 +1039,13 @@ void main() { var bucket = api.bucket(bucketName); bucket.info(objectName).then(expectAsync1((ObjectInfo info) { expect(info.name, objectName); - expect(info.metadata.acl.entries.length, 3); - expect(info.metadata.acl.entries[0] is AclEntry, isTrue); - expect(info.metadata.acl.entries[0].scope is StorageIdScope, isTrue); - expect(info.metadata.acl.entries[1] is AclEntry, isTrue); - expect(info.metadata.acl.entries[1].scope is AccountScope, isTrue); - expect(info.metadata.acl.entries[2] is AclEntry, isTrue); - expect(info.metadata.acl.entries[2].scope is OpaqueScope, isTrue); + expect(info.metadata.acl!.entries.length, 3); + expect(info.metadata.acl!.entries[0] is AclEntry, isTrue); + expect(info.metadata.acl!.entries[0].scope is StorageIdScope, isTrue); + expect(info.metadata.acl!.entries[1] is AclEntry, isTrue); + expect(info.metadata.acl!.entries[1].scope is AccountScope, isTrue); + expect(info.metadata.acl!.entries[2] is AclEntry, isTrue); + expect(info.metadata.acl!.entries[2].scope is OpaqueScope, isTrue); })); }); }); From 20ac336dfee34888b38b90ea89daafc7c13d6129 Mon Sep 17 00:00:00 2001 From: Simon Binder Date: Tue, 4 May 2021 22:49:59 +0200 Subject: [PATCH 165/239] Use late final for cached hashCode --- pkgs/gcloud/lib/storage.dart | 19 +++++++------------ 1 file changed, 7 insertions(+), 12 deletions(-) diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index b2073168..caa00cfc 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -2,7 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. - /// This library provides access to Google Cloud Storage. /// /// Google Cloud Storage is an object store for binary objects. Each @@ -110,7 +109,6 @@ int _jenkinsHash(List e) { /// The access controls are described by [AclEntry] objects. class Acl { final List _entries; - int? _cachedHashCode; // todo: late final /// The entries in the ACL. List get entries => UnmodifiableListView(_entries); @@ -177,7 +175,7 @@ class Acl { } @override - int get hashCode => _cachedHashCode ??= _jenkinsHash(_entries); + late final int hashCode = _jenkinsHash(_entries); @override bool operator ==(Object other) { @@ -204,7 +202,6 @@ class Acl { class AclEntry { final AclScope scope; final AclPermission permission; - int? _cachedHashCode; // todo: Late final AclEntry(this.scope, this.permission); @@ -223,7 +220,7 @@ class AclEntry { } @override - int get hashCode => _cachedHashCode ??= _jenkinsHash([scope, permission]); + late final int hashCode = _jenkinsHash([scope, permission]); @override bool operator ==(Object other) { @@ -249,8 +246,6 @@ class AclEntry { /// /// See https://cloud.google.com/storage/docs/accesscontrol for more details. abstract class AclScope { - int? _cachedHashCode; // todo: late final - /// ACL type for scope representing a Google Storage id. static const int _TYPE_STORAGE_ID = 0; @@ -291,7 +286,7 @@ abstract class AclScope { AclScope._(this._type, this._id); @override - int get hashCode => _cachedHashCode ??= _jenkinsHash([_type, _id]); + late final int hashCode = _jenkinsHash([_type, _id]); @override bool operator ==(Object other) { @@ -562,7 +557,7 @@ abstract class Storage { /// List names of all buckets. /// /// Returns a [Stream] of bucket names. - Stream listBucketNames(); + Stream listBucketNames(); /// Start paging through names of all buckets. /// @@ -570,7 +565,7 @@ abstract class Storage { /// /// Returns a [Future] which completes with a `Page` object holding the /// first page. Use the `Page` object to move to the next page of buckets. - Future> pageBucketNames({int pageSize = 50}); + Future> pageBucketNames({int pageSize = 50}); /// Copy an object. /// @@ -791,7 +786,7 @@ abstract class Bucket { /// /// Returns a [Stream] of [BucketEntry]. Each element of the stream /// represents either an object or a directory component. - Stream list({String? prefix, String? delimiter}); + Stream list({String? prefix, String? delimiter}); /// Start paging through objects in the bucket. /// @@ -801,6 +796,6 @@ abstract class Bucket { /// /// Returns a `Future` which completes with a `Page` object holding the /// first page. Use the `Page` object to move to the next page. - Future> page( + Future> page( {String? prefix, String? delimiter, int pageSize = 50}); } From f36f0bffa3ccd77d13d22cbbc4c3f49f202aaf38 Mon Sep 17 00:00:00 2001 From: Simon Binder Date: Tue, 4 May 2021 23:01:43 +0200 Subject: [PATCH 166/239] Format example/main.dart --- pkgs/gcloud/example/main.dart | 1 - 1 file changed, 1 deletion(-) diff --git a/pkgs/gcloud/example/main.dart b/pkgs/gcloud/example/main.dart index 560a9e7a..c634c654 100644 --- a/pkgs/gcloud/example/main.dart +++ b/pkgs/gcloud/example/main.dart @@ -2,7 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. - import 'dart:async' show Future; import 'dart:convert' show utf8; From f9582825dcd45bef38885224b91e35b4bb99c68c Mon Sep 17 00:00:00 2001 From: Simon Binder Date: Tue, 4 May 2021 23:26:02 +0200 Subject: [PATCH 167/239] Fix storage_test.dart --- pkgs/gcloud/test/storage/storage_test.dart | 61 ++++++---------------- 1 file changed, 17 insertions(+), 44 deletions(-) diff --git a/pkgs/gcloud/test/storage/storage_test.dart b/pkgs/gcloud/test/storage/storage_test.dart index 58a63bf4..4360f07c 100644 --- a/pkgs/gcloud/test/storage/storage_test.dart +++ b/pkgs/gcloud/test/storage/storage_test.dart @@ -324,12 +324,7 @@ void main() { var bytesResumableUpload = List.generate(minResumableUpload, (e) => e & 255); - bool testArgumentError(e) => e is ArgumentError; - bool testDetailedApiError(e) => e is storage.DetailedApiRequestError; - final expectNotNull = (o) async { - expect(o, isNotNull); - return null; - }; + final isDetailedApiError = isA(); void expectNormalUpload(MockClient mock, data, String objectName) { var bytes = data.fold([], (p, e) => p..addAll(e)); @@ -466,16 +461,10 @@ void main() { var bucket = api.bucket(bucketName); var sink = bucket.write(bucketName, length: length); - sink.done.then((_) => throw 'Unexpected').catchError( - expectAsync1(expectNotNull), - test: testDetailedApiError); - sink.done.catchError(expectAsync1(expectNotNull), - test: testDetailedApiError); - return Stream.fromIterable([bytesNormalUpload]) - .pipe(sink) - .then((_) => throw 'Unexpected') - .catchError(expectAsync1(expectNotNull), - test: testDetailedApiError); + expect(sink.done, throwsA(isDetailedApiError)); + return expectLater( + Stream.fromIterable([bytesNormalUpload]).pipe(sink), + throwsA(isDetailedApiError)); } test(null) // Unknown length. @@ -503,14 +492,10 @@ void main() { var bucket = api.bucket(bucketName); var sink = bucket.write(bucketName); - sink.done.then((_) => throw 'Unexpected').catchError( - expectAsync1(expectNotNull), - test: testDetailedApiError); - return Stream.fromIterable([bytesResumableUpload]) - .pipe(sink) - .then((_) => throw 'Unexpected') - .catchError(expectAsync1(expectNotNull), - test: testDetailedApiError); + expect(sink.done, throwsA(isDetailedApiError)); + return expectLater( + Stream.fromIterable([bytesResumableUpload]).pipe(sink), + throwsA(isDetailedApiError)); } test(null) // Unknown length. @@ -533,14 +518,10 @@ void main() { var bucket = api.bucket(bucketName); var sink = bucket.write(bucketName, length: length); - sink.done.then((_) => throw 'Unexpected').catchError( - expectAsync1(expectNotNull), - test: (e) => e is String || e is storage.ApiRequestError); - return Stream>.fromIterable(data) - .pipe(sink) - .then((_) => throw 'Unexpected') - .catchError(expectAsync1(expectNotNull), - test: (e) => e is String || e is storage.ApiRequestError); + expect(sink.done, + throwsA(anyOf(isA(), isA()))); + return expectLater(Stream>.fromIterable(data).pipe(sink), + throwsA(anyOf(isA(), isA()))); } test([bytesResumableUpload], bytesResumableUpload.length + 1) @@ -555,17 +536,13 @@ void main() { withMockClient((mock, api) { var bucket = api.bucket(bucketName); var sink = bucket.write(bucketName); - sink.done - .then((_) => throw 'Unexpected') - .catchError(expectAsync1(expectNotNull), test: testArgumentError); + expect(sink.done, throwsArgumentError); var stream = Stream.fromIterable([ [1, 2, 3] ]); sink.addStream(stream).then((_) { sink.addError(ArgumentError()); - sink - .close() - .catchError(expectAsync1(expectNotNull), test: testArgumentError); + expect(sink.close(), throwsArgumentError); }); }); }); @@ -587,15 +564,11 @@ void main() { var bucket = api.bucket(bucketName); var sink = bucket.write(bucketName); - sink.done - .then((_) => throw 'Unexpected') - .catchError(expectAsync1(expectNotNull), test: testArgumentError); + expect(sink.done, throwsArgumentError); var stream = Stream.fromIterable([bytesResumableUpload]); sink.addStream(stream).then((_) { sink.addError(ArgumentError()); - sink - .close() - .catchError(expectAsync1(expectNotNull), test: testArgumentError); + expect(sink.close(), throwsArgumentError); }); }); }); From edd88d566b4faf449a71a6fe71cbfb8fbbf827fd Mon Sep 17 00:00:00 2001 From: Simon Binder Date: Thu, 27 May 2021 14:52:51 +0200 Subject: [PATCH 168/239] Support latest googleapis, prepare release --- pkgs/gcloud/CHANGELOG.md | 2 +- pkgs/gcloud/pubspec.yaml | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 06ae8161..5ddbd021 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,4 +1,4 @@ -## 0.8.0-dev +## 0.8.0-dev.0 * Require Dart 2.12 or later * Partial migration to null safety: diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index ef13bacd..13e70664 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.8.0-dev +version: 0.8.0-dev.0 description: >- High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. homepage: https://github.com/dart-lang/gcloud @@ -9,8 +9,8 @@ environment: dependencies: _discoveryapis_commons: ^1.0.0 - googleapis: ^2.0.0 - http: '^0.13.0' + googleapis: ^3.0.0 + http: ^0.13.0 meta: ^1.3.0 dev_dependencies: @@ -18,4 +18,4 @@ dev_dependencies: http_parser: ^4.0.0 mime: ^1.0.0 pedantic: ^1.11.0 - test: ^1.16.0 + test: ^1.17.5 From c47267501166f6f30b1be754bf29af42e79d7d0a Mon Sep 17 00:00:00 2001 From: Jonas Finnemann Jensen Date: Thu, 27 May 2021 15:17:33 +0200 Subject: [PATCH 169/239] Migrated pubsub implementation --- pkgs/gcloud/lib/pubsub.dart | 5 +- pkgs/gcloud/lib/src/pubsub_impl.dart | 145 +++++++++++++-------------- 2 files changed, 74 insertions(+), 76 deletions(-) diff --git a/pkgs/gcloud/lib/pubsub.dart b/pkgs/gcloud/lib/pubsub.dart index 4434f097..468e89dc 100644 --- a/pkgs/gcloud/lib/pubsub.dart +++ b/pkgs/gcloud/lib/pubsub.dart @@ -1,7 +1,6 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 library gcloud.pubsub; @@ -311,7 +310,7 @@ abstract class Subscription { /// The URI for the push endpoint. /// /// If this is a pull subscription this is `null`. - Uri get endpoint; + Uri? get endpoint; /// Update the push configuration with a new endpoint. /// @@ -338,7 +337,7 @@ abstract class Subscription { /// /// If [wait] is `false`, the method will complete the returned `Future` /// with `null` if it finds that there are no messages available. - Future pull({bool wait = true}); + Future pull({bool wait = true}); } /// The content of a Pub/Sub message. diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart index 9d3f9b27..82f35d3a 100644 --- a/pkgs/gcloud/lib/src/pubsub_impl.dart +++ b/pkgs/gcloud/lib/src/pubsub_impl.dart @@ -1,7 +1,6 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 part of gcloud.pubsub; @@ -44,13 +43,13 @@ class _PubSubImpl implements PubSub { } Future _listTopics( - int pageSize, String nextPageToken) { + int pageSize, String? nextPageToken) { return _api.projects.topics.list('projects/$project', pageSize: pageSize, pageToken: nextPageToken); } Future _createSubscription( - String name, String topic, Uri endpoint) { + String name, String topic, Uri? endpoint) { var subscription = pubsub.Subscription()..topic = topic; if (endpoint != null) { var pushConfig = pubsub.PushConfig()..pushEndpoint = endpoint.toString(); @@ -71,12 +70,12 @@ class _PubSubImpl implements PubSub { } Future _listSubscriptions( - String topic, int pageSize, String nextPageToken) { + String? topic, int pageSize, String? nextPageToken) { return _api.projects.subscriptions.list('projects/$project', pageSize: pageSize, pageToken: nextPageToken); } - Future _modifyPushConfig(String subscription, Uri endpoint) { + Future _modifyPushConfig(String subscription, Uri? endpoint) { var pushConfig = pubsub.PushConfig() ..pushEndpoint = endpoint != null ? endpoint.toString() : null; var request = pubsub.ModifyPushConfigRequest()..pushConfig = pushConfig; @@ -89,7 +88,7 @@ class _PubSubImpl implements PubSub { ..messages = [ (pubsub.PubsubMessage() ..dataAsBytes = message - ..attributes = attributes) + ..attributes = attributes.isEmpty ? null : attributes) ]; // TODO(sgjesse): Handle PublishResponse containing message ids. return _api.projects.topics.publish(request, topic).then((_) => null); @@ -174,7 +173,7 @@ class _PubSubImpl implements PubSub { @override Future createSubscription(String name, String topic, - {Uri endpoint}) { + {Uri? endpoint}) { _checkSubscriptionName(name); _checkTopicName(topic); return _createSubscription( @@ -196,7 +195,7 @@ class _PubSubImpl implements PubSub { } @override - Stream listSubscriptions([String query]) { + Stream listSubscriptions([String? query]) { Future> firstPage(int pageSize) { return _listSubscriptions(query, pageSize, null).then( (response) => _SubscriptionPageImpl(this, query, pageSize, response)); @@ -207,7 +206,7 @@ class _PubSubImpl implements PubSub { @override Future> pageSubscriptions( - {String topic, int pageSize = 50}) { + {String? topic, int pageSize = 50}) { return _listSubscriptions(topic, pageSize, null).then((response) { return _SubscriptionPageImpl(this, topic, pageSize, response); }); @@ -219,26 +218,30 @@ class _PubSubImpl implements PubSub { class _MessageImpl implements Message { // The message body, if it is a `String`. In that case, [bytesMessage] is // null. - final String _stringMessage; + final String? _stringMessage; // The message body, if it is a byte list. In that case, [stringMessage] is // null. - final List _bytesMessage; + final List? _bytesMessage; @override final Map attributes; - _MessageImpl.withString(this._stringMessage, {this.attributes}) - : _bytesMessage = null; + _MessageImpl.withString( + this._stringMessage, { + Map? attributes, + }) : _bytesMessage = null, + attributes = attributes ?? {}; - _MessageImpl.withBytes(this._bytesMessage, {this.attributes}) - : _stringMessage = null; + _MessageImpl.withBytes(this._bytesMessage, {Map? attributes}) + : _stringMessage = null, + attributes = attributes ?? {}; @override - List get asBytes => _bytesMessage ?? utf8.encode(_stringMessage); + List get asBytes => _bytesMessage ?? utf8.encode(_stringMessage!); @override - String get asString => _stringMessage ?? utf8.decode(_bytesMessage); + String get asString => _stringMessage ?? utf8.decode(_bytesMessage!); } /// Message received using [Subscription.pull]. @@ -249,25 +252,26 @@ class _MessageImpl implements Message { /// The labels map is lazily created when first accessed. class _PullMessage implements Message { final pubsub.PubsubMessage _message; - List _bytes; - String _string; + List? _bytes; + String? _string; _PullMessage(this._message); @override List get asBytes { _bytes ??= _message.dataAsBytes; - return _bytes; + return _bytes!; } @override String get asString { _string ??= utf8.decode(_message.dataAsBytes); - return _string; + return _string!; } @override - Map get attributes => _message.attributes; + Map get attributes => + _message.attributes ?? {}; } /// Message received through Pub/Sub push delivery. @@ -308,11 +312,11 @@ class _PullEventImpl implements PullEvent { _PullEventImpl( this._api, this._subscriptionName, pubsub.PullResponse response) : _response = response, - message = _PullMessage(response.receivedMessages[0].message); + message = _PullMessage(response.receivedMessages![0].message!); @override Future acknowledge() { - return _api._ack(_response.receivedMessages[0].ackId, _subscriptionName); + return _api._ack(_response.receivedMessages![0].ackId!, _subscriptionName); } } @@ -360,35 +364,37 @@ class _TopicImpl implements Topic { @override String get name { - assert(_topic.name.startsWith(_api._topicPrefix)); - return _topic.name.substring(_api._topicPrefix.length); + assert(_topic.name!.startsWith(_api._topicPrefix)); + return _topic.name!.substring(_api._topicPrefix.length); } @override String get project { - assert(_topic.name.startsWith(_api._topicPrefix)); + assert(_topic.name!.startsWith(_api._topicPrefix)); return _api.project; } @override - String get absoluteName => _topic.name; + String get absoluteName => _topic.name!; @override Future publish(Message message) { - return _api._publish(_topic.name, message.asBytes, message.attributes); + return _api._publish(_topic.name!, message.asBytes, message.attributes); } @override - Future delete() => _api._deleteTopic(_topic.name); + Future delete() => _api._deleteTopic(_topic.name!); @override - Future publishString(String message, {Map attributes}) { - return _api._publish(_topic.name, utf8.encode(message), attributes); + Future publishString(String message, {Map? attributes}) { + attributes ??= {}; + return _api._publish(_topic.name!, utf8.encode(message), attributes); } @override - Future publishBytes(List message, {Map attributes}) { - return _api._publish(_topic.name, message, attributes); + Future publishBytes(List message, {Map? attributes}) { + attributes ??= {}; + return _api._publish(_topic.name!, message, attributes); } } @@ -400,18 +406,18 @@ class _SubscriptionImpl implements Subscription { @override String get name { - assert(_subscription.name.startsWith(_api._subscriptionPrefix)); - return _subscription.name.substring(_api._subscriptionPrefix.length); + assert(_subscription.name!.startsWith(_api._subscriptionPrefix)); + return _subscription.name!.substring(_api._subscriptionPrefix.length); } @override String get project { - assert(_subscription.name.startsWith(_api._subscriptionPrefix)); + assert(_subscription.name!.startsWith(_api._subscriptionPrefix)); return _api.project; } @override - String get absoluteName => _subscription.name; + String get absoluteName => _subscription.name!; @override Topic get topic { @@ -420,25 +426,25 @@ class _SubscriptionImpl implements Subscription { } @override - Future delete() => _api._deleteSubscription(_subscription.name); + Future delete() => _api._deleteSubscription(_subscription.name!); @override - Future pull({bool wait = true}) { - return _api._pull(_subscription.name, !wait).then((response) { + Future pull({bool wait = true}) { + return _api._pull(_subscription.name!, !wait).then((response) { // The documentation says 'Returns an empty list if there are no // messages available in the backlog'. However the receivedMessages // property can also be null in that case. if (response.receivedMessages == null || - response.receivedMessages.isEmpty) { + response.receivedMessages!.isEmpty) { return null; } - return _PullEventImpl(_api, _subscription.name, response); + return _PullEventImpl(_api, _subscription.name!, response); }).catchError((e) => null, test: (e) => e is pubsub.DetailedApiRequestError && e.status == 400); } @override - Uri get endpoint => null; + Uri? get endpoint => null; @override bool get isPull => endpoint == null; @@ -448,25 +454,22 @@ class _SubscriptionImpl implements Subscription { @override Future updatePushConfiguration(Uri endpoint) { - return _api._modifyPushConfig(_subscription.name, endpoint); + return _api._modifyPushConfig(_subscription.name!, endpoint); } } class _TopicPageImpl implements Page { final _PubSubImpl _api; final int _pageSize; - final String _nextPageToken; + final String? _nextPageToken; @override - final List items; + final List items = []; _TopicPageImpl(this._api, this._pageSize, pubsub.ListTopicsResponse response) - : items = List.filled( - response.topics != null ? response.topics.length : 0, null), - _nextPageToken = response.nextPageToken { - if (response.topics != null) { - for (var i = 0; i < response.topics.length; i++) { - items[i] = _TopicImpl(_api, response.topics[i]); - } + : _nextPageToken = response.nextPageToken { + final topics = response.topics; + if (topics != null) { + items.addAll(topics.map((t) => _TopicImpl(_api, t))); } } @@ -474,34 +477,30 @@ class _TopicPageImpl implements Page { bool get isLast => _nextPageToken == null; @override - Future> next({int pageSize}) { + Future> next({int? pageSize}) { if (isLast) return Future.value(null); - pageSize ??= _pageSize; + final pageSize_ = pageSize ?? _pageSize; - return _api._listTopics(pageSize, _nextPageToken).then((response) { - return _TopicPageImpl(_api, pageSize, response); + return _api._listTopics(pageSize_, _nextPageToken).then((response) { + return _TopicPageImpl(_api, pageSize_, response); }); } } class _SubscriptionPageImpl implements Page { final _PubSubImpl _api; - final String _topic; + final String? _topic; final int _pageSize; - final String _nextPageToken; + final String? _nextPageToken; @override - final List items; + final List items = []; _SubscriptionPageImpl(this._api, this._topic, this._pageSize, pubsub.ListSubscriptionsResponse response) - : items = List.filled( - response.subscriptions != null ? response.subscriptions.length : 0, - null), - _nextPageToken = response.nextPageToken { - if (response.subscriptions != null) { - for (var i = 0; i < response.subscriptions.length; i++) { - items[i] = _SubscriptionImpl(_api, response.subscriptions[i]); - } + : _nextPageToken = response.nextPageToken { + final subscriptions = response.subscriptions; + if (subscriptions != null) { + items.addAll(subscriptions.map((s) => _SubscriptionImpl(_api, s))); } } @@ -509,14 +508,14 @@ class _SubscriptionPageImpl implements Page { bool get isLast => _nextPageToken == null; @override - Future> next({int pageSize}) { + Future> next({int? pageSize}) { if (_nextPageToken == null) return Future.value(null); - pageSize ??= _pageSize; + final pageSize_ = pageSize ?? _pageSize; return _api - ._listSubscriptions(_topic, pageSize, _nextPageToken) + ._listSubscriptions(_topic, pageSize_, _nextPageToken) .then((response) { - return _SubscriptionPageImpl(_api, _topic, pageSize, response); + return _SubscriptionPageImpl(_api, _topic, pageSize_, response); }); } } From 7f0751d55b4fc4203ac55a38931f103a6fe46998 Mon Sep 17 00:00:00 2001 From: Jonas Finnemann Jensen Date: Thu, 27 May 2021 18:24:47 +0200 Subject: [PATCH 170/239] Migrated datastore to null-safety --- pkgs/gcloud/lib/datastore.dart | 55 +++-- pkgs/gcloud/lib/src/datastore_impl.dart | 204 +++++++++--------- pkgs/gcloud/lib/src/db/db.dart | 6 +- .../datastore/e2e/datastore_test_impl.dart | 2 +- pkgs/gcloud/test/datastore/e2e/utils.dart | 2 +- 5 files changed, 140 insertions(+), 129 deletions(-) diff --git a/pkgs/gcloud/lib/datastore.dart b/pkgs/gcloud/lib/datastore.dart index 00bc9a28..b8aba812 100644 --- a/pkgs/gcloud/lib/datastore.dart +++ b/pkgs/gcloud/lib/datastore.dart @@ -1,7 +1,6 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 /// This library provides a low-level API for accessing Google's Cloud /// Datastore. @@ -49,7 +48,7 @@ class ApplicationError implements Exception { class DatastoreError implements Exception { final String message; - DatastoreError([String message]) + DatastoreError([String? message]) : message = (message ?? 'DatastoreError: An unknown error occured'); @override @@ -107,10 +106,11 @@ class QuotaExceededError extends DatastoreError { /// relevant if the value is a list of primitive values). class Entity { final Key key; - final Map properties; + final Map properties; final Set unIndexedProperties; - Entity(this.key, this.properties, {this.unIndexedProperties}); + Entity(this.key, this.properties, + {this.unIndexedProperties = const {}}); } /// A complete or partial key. @@ -137,11 +137,10 @@ class Key { /// The path of `KeyElement`s. final List elements; - Key(this.elements, {Partition partition}) - : partition = (partition == null) ? Partition.DEFAULT : partition; + Key(this.elements, {this.partition = Partition.DEFAULT}); - factory Key.fromParent(String kind, int id, {Key parent}) { - Partition partition; + factory Key.fromParent(String kind, int id, {Key? parent}) { + var partition = Partition.DEFAULT; var elements = []; if (parent != null) { partition = parent.partition; @@ -189,7 +188,9 @@ class Partition { static const Partition DEFAULT = Partition._default(); /// The namespace of this partition. - final String namespace; + /// + /// The default namespace is `null`. + final String? namespace; Partition(this.namespace) { if (namespace == '') { @@ -217,12 +218,9 @@ class KeyElement { /// This may be `null`, in which case it does not identify an Entity. It is /// possible to insert [Entity]s with incomplete keys and let Datastore /// automatically select a unused integer ID. - final id; + final dynamic id; KeyElement(this.kind, this.id) { - if (kind == null) { - throw ArgumentError("'kind' must not be null"); - } if (id != null) { if (id is! int && id is! String) { throw ArgumentError("'id' must be either null, a String or an int"); @@ -309,30 +307,31 @@ class Order { /// var query = new Query(ancestorKey: personKey, kind: 'Address') class Query { /// Restrict the result set to entities of this kind. - final String kind; + final String? kind; /// Restrict the result set to entities which have this ancestorKey / parent. - final Key ancestorKey; + final Key? ancestorKey; /// Restrict the result set by a list of property [Filter]s. - final List filters; + final List? filters; /// Order the matching entities following the given property [Order]s. - final List orders; + final List? orders; /// Skip the first [offset] entities in the result set. - final int offset; + final int? offset; /// Limit the number of entities returned to [limit]. - final int limit; - - Query( - {this.ancestorKey, - this.kind, - this.filters, - this.orders, - this.offset, - this.limit}); + final int? limit; + + Query({ + this.ancestorKey, + this.kind, + this.filters, + this.orders, + this.offset, + this.limit, + }); } /// The result of a commit. @@ -408,7 +407,7 @@ abstract class Datastore { /// returned [Entity]s is the same as in [keys]. /// /// If a [transaction] is given, the lookup will be within this transaction. - Future> lookup(List keys, {Transaction transaction}); + Future> lookup(List keys, {Transaction transaction}); /// Runs a query on the dataset and returns a [Page] of matching [Entity]s. /// diff --git a/pkgs/gcloud/lib/src/datastore_impl.dart b/pkgs/gcloud/lib/src/datastore_impl.dart index fc8af787..92631170 100644 --- a/pkgs/gcloud/lib/src/datastore_impl.dart +++ b/pkgs/gcloud/lib/src/datastore_impl.dart @@ -1,7 +1,6 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 library gcloud.datastore_impl; @@ -61,41 +60,47 @@ class DatastoreImpl implements datastore.Datastore { } static datastore.Key _convertApi2DatastoreKey(api.Key key) { - var elements = key.path.map((api.PathElement element) { + var elements = key.path!.map((api.PathElement element) { if (element.id != null) { - return datastore.KeyElement(element.kind, int.parse(element.id)); + return datastore.KeyElement(element.kind!, int.parse(element.id!)); } else if (element.name != null) { - return datastore.KeyElement(element.kind, element.name); + return datastore.KeyElement(element.kind!, element.name); } else { throw datastore.DatastoreError( 'Invalid server response: Expected allocated name/id.'); } }).toList(); - datastore.Partition partition; + var partition = datastore.Partition.DEFAULT; if (key.partitionId != null) { - partition = datastore.Partition(key.partitionId.namespaceId); + partition = datastore.Partition(key.partitionId!.namespaceId); // TODO: assert projectId. } return datastore.Key(elements, partition: partition); } bool _compareApiKey(api.Key a, api.Key b) { - if (a.path.length != b.path.length) return false; + if (a.path!.length != b.path!.length) return false; // FIXME(Issue #2): Is this comparison working correctly? if (a.partitionId != null) { - if (b.partitionId == null) return false; - if (a.partitionId.projectId != b.partitionId.projectId) return false; - if (a.partitionId.namespaceId != b.partitionId.namespaceId) return false; - } else { - if (b.partitionId != null) return false; + if (b.partitionId == null) { + return false; + } + if (a.partitionId!.projectId != b.partitionId!.projectId) { + return false; + } + if (a.partitionId!.namespaceId != b.partitionId!.namespaceId) { + return false; + } + } else if (b.partitionId != null) { + return false; } - for (var i = 0; i < a.path.length; i++) { - if (a.path[i].id != b.path[i].id || - a.path[i].name != b.path[i].name || - a.path[i].kind != b.path[i].kind) return false; + for (var i = 0; i < a.path!.length; i++) { + if (a.path![i].id != b.path![i].id || + a.path![i].name != b.path![i].name || + a.path![i].kind != b.path![i].kind) return false; } return true; } @@ -142,19 +147,19 @@ class DatastoreImpl implements datastore.Datastore { if (value.booleanValue != null) { return value.booleanValue; } else if (value.integerValue != null) { - return int.parse(value.integerValue); + return int.parse(value.integerValue!); } else if (value.doubleValue != null) { return value.doubleValue; } else if (value.stringValue != null) { return value.stringValue; } else if (value.timestampValue != null) { - return DateTime.parse(value.timestampValue); + return DateTime.parse(value.timestampValue!); } else if (value.blobValue != null) { return datastore.BlobValue(value.blobValueAsBytes); } else if (value.keyValue != null) { - return _convertApi2DatastoreKey(value.keyValue); - } else if (value.arrayValue != null && value.arrayValue.values != null) { - return value.arrayValue.values + return _convertApi2DatastoreKey(value.keyValue!); + } else if (value.arrayValue != null && value.arrayValue!.values != null) { + return value.arrayValue!.values! .map(_convertApi2DatastoreProperty) .toList(); } else if (value.entityValue != null) { @@ -167,17 +172,17 @@ class DatastoreImpl implements datastore.Datastore { static datastore.Entity _convertApi2DatastoreEntity(api.Entity entity) { var unindexedProperties = {}; - var properties = {}; + var properties = {}; if (entity.properties != null) { - entity.properties.forEach((String name, api.Value value) { + entity.properties!.forEach((String name, api.Value value) { properties[name] = _convertApi2DatastoreProperty(value); - if (value.excludeFromIndexes != null && value.excludeFromIndexes) { + if (value.excludeFromIndexes != null && value.excludeFromIndexes!) { unindexedProperties.add(name); } }); } - return datastore.Entity(_convertApi2DatastoreKey(entity.key), properties, + return datastore.Entity(_convertApi2DatastoreKey(entity.key!), properties, unIndexedProperties: unindexedProperties); } @@ -186,16 +191,12 @@ class DatastoreImpl implements datastore.Datastore { var apiEntity = api.Entity(); apiEntity.key = _convertDatastore2ApiKey(entity.key, enforceId: enforceId); - apiEntity.properties = {}; - if (entity.properties != null) { + final properties = apiEntity.properties = {}; + if (entity.properties.isNotEmpty) { for (var key in entity.properties.keys) { var value = entity.properties[key]; - var indexed = false; - if (entity.unIndexedProperties != null) { - indexed = !entity.unIndexedProperties.contains(key); - } - var property = _convertDatastore2ApiPropertyValue(value, indexed); - apiEntity.properties[key] = property; + final indexed = !entity.unIndexedProperties.contains(key); + properties[key] = _convertDatastore2ApiPropertyValue(value, indexed); } } return apiEntity; @@ -231,8 +232,10 @@ class DatastoreImpl implements datastore.Datastore { return api.Filter()..propertyFilter = pf; } - api.Filter _convertDatastore2ApiFilters( - List filters, datastore.Key ancestorKey) { + api.Filter? _convertDatastore2ApiFilters( + List? filters, + datastore.Key? ancestorKey, + ) { if ((filters == null || filters.isEmpty) && ancestorKey == null) { return null; } @@ -246,7 +249,7 @@ class DatastoreImpl implements datastore.Datastore { if (compFilter.filters == null) { compFilter.filters = [filter]; } else { - compFilter.filters.add(filter); + compFilter.filters!.add(filter); } } compFilter.op = 'AND'; @@ -263,17 +266,22 @@ class DatastoreImpl implements datastore.Datastore { ..property = property; } - List _convertDatastore2ApiOrders( - List orders) { + List? _convertDatastore2ApiOrders( + List? orders) { if (orders == null) return null; return orders.map(_convertDatastore2ApiOrder).toList(); } - static Future _handleError(error, StackTrace stack) { + static Future _handleError(Object error, StackTrace stack) { if (error is api.DetailedApiRequestError) { if (error.status == 400) { - return Future.error(datastore.ApplicationError(error.message), stack); + return Future.error( + datastore.ApplicationError( + error.message ?? 'An unknown error occured', + ), + stack, + ); } else if (error.status == 409) { // NOTE: This is reported as: // "too much contention on these datastore entities" @@ -293,7 +301,7 @@ class DatastoreImpl implements datastore.Datastore { return _convertDatastore2ApiKey(key, enforceId: false); }).toList(); return _api.projects.allocateIds(request, _project).then((response) { - return response.keys.map(_convertApi2DatastoreKey).toList(); + return (response.keys ?? []).map(_convertApi2DatastoreKey).toList(); }, onError: _handleError); } @@ -302,17 +310,18 @@ class DatastoreImpl implements datastore.Datastore { {bool crossEntityGroup = false}) { var request = api.BeginTransactionRequest(); return _api.projects.beginTransaction(request, _project).then((result) { - return TransactionImpl(result.transaction); + return TransactionImpl(result.transaction!); }, onError: _handleError); } @override - Future commit( - {List inserts, - List autoIdInserts, - List deletes, - datastore.Transaction transaction}) { - var request = api.CommitRequest(); + Future commit({ + List inserts = const [], + List autoIdInserts = const [], + List deletes = const [], + datastore.Transaction? transaction, + }) { + final request = api.CommitRequest(); if (transaction != null) { request.mode = 'TRANSACTIONAL'; @@ -322,14 +331,14 @@ class DatastoreImpl implements datastore.Datastore { } var mutations = request.mutations = []; - if (inserts != null) { + if (inserts.isNotEmpty) { for (var i = 0; i < inserts.length; i++) { mutations.add(api.Mutation() ..upsert = _convertDatastore2ApiEntity(inserts[i], enforceId: true)); } } var autoIdStartIndex = -1; - if (autoIdInserts != null) { + if (autoIdInserts.isNotEmpty) { autoIdStartIndex = mutations.length; for (var i = 0; i < autoIdInserts.length; i++) { mutations.add(api.Mutation() @@ -337,24 +346,24 @@ class DatastoreImpl implements datastore.Datastore { _convertDatastore2ApiEntity(autoIdInserts[i], enforceId: false)); } } - if (deletes != null) { + if (deletes.isNotEmpty) { for (var i = 0; i < deletes.length; i++) { mutations.add(api.Mutation() ..delete = _convertDatastore2ApiKey(deletes[i], enforceId: true)); } } return _api.projects.commit(request, _project).then((result) { - List keys; - if (autoIdInserts != null && autoIdInserts.isNotEmpty) { - var mutationResults = result.mutationResults; + var keys = []; + if (autoIdInserts.isNotEmpty) { + assert(result.mutationResults != null); + var mutationResults = result.mutationResults!; assert(autoIdStartIndex != -1); assert(mutationResults.length >= (autoIdStartIndex + autoIdInserts.length)); keys = mutationResults .skip(autoIdStartIndex) .take(autoIdInserts.length) - .map( - (api.MutationResult r) => _convertApi2DatastoreKey(r.key)) + .map((r) => _convertApi2DatastoreKey(r.key!)) .toList(); } return datastore.CommitResult(keys); @@ -362,8 +371,10 @@ class DatastoreImpl implements datastore.Datastore { } @override - Future> lookup(List keys, - {datastore.Transaction transaction}) { + Future> lookup( + List keys, { + datastore.Transaction? transaction, + }) { var apiKeys = keys.map((key) { return _convertDatastore2ApiKey(key, enforceId: true); }).toList(); @@ -371,11 +382,11 @@ class DatastoreImpl implements datastore.Datastore { request.keys = apiKeys; if (transaction != null) { // TODO: Make readOptions more configurable. - request.readOptions = api.ReadOptions(); - request.readOptions.transaction = (transaction as TransactionImpl).data; + request.readOptions = api.ReadOptions() + ..transaction = (transaction as TransactionImpl).data; } return _api.projects.lookup(request, _project).then((response) { - if (response.deferred != null && response.deferred.isNotEmpty) { + if (response.deferred != null && response.deferred!.isNotEmpty) { throw datastore.DatastoreError( 'Could not successfully look up all keys due to resource ' 'constraints.'); @@ -396,16 +407,16 @@ class DatastoreImpl implements datastore.Datastore { // // A list of keys that were not looked up due to resource constraints. // repeated Key deferred = 3; // } - var entities = List.filled(apiKeys.length, null); + var entities = List.filled(apiKeys.length, null); for (var i = 0; i < apiKeys.length; i++) { var apiKey = apiKeys[i]; var found = false; if (response.found != null) { - for (var result in response.found) { - if (_compareApiKey(apiKey, result.entity.key)) { - entities[i] = _convertApi2DatastoreEntity(result.entity); + for (var result in response.found!) { + if (_compareApiKey(apiKey, result.entity!.key!)) { + entities[i] = _convertApi2DatastoreEntity(result.entity!); found = true; break; } @@ -415,8 +426,8 @@ class DatastoreImpl implements datastore.Datastore { if (found) continue; if (response.missing != null) { - for (var result in response.missing) { - if (_compareApiKey(apiKey, result.entity.key)) { + for (var result in response.missing!) { + if (_compareApiKey(apiKey, result.entity!.key!)) { entities[i] = null; found = true; break; @@ -435,8 +446,11 @@ class DatastoreImpl implements datastore.Datastore { } @override - Future> query(datastore.Query query, - {datastore.Partition partition, datastore.Transaction transaction}) { + Future> query( + datastore.Query query, { + datastore.Partition partition = datastore.Partition.DEFAULT, + datastore.Transaction? transaction, + }) { // NOTE: We explicitly do not set 'limit' here, since this is handled by // QueryPageImpl.runQuery. var apiQuery = api.Query() @@ -452,10 +466,10 @@ class DatastoreImpl implements datastore.Datastore { request.query = apiQuery; if (transaction != null) { // TODO: Make readOptions more configurable. - request.readOptions = api.ReadOptions(); - request.readOptions.transaction = (transaction as TransactionImpl).data; + request.readOptions = api.ReadOptions() + ..transaction = (transaction as TransactionImpl).data; } - if (partition != null) { + if (partition != datastore.Partition.DEFAULT) { request.partitionId = api.PartitionId() ..namespaceId = partition.namespace; } @@ -483,54 +497,52 @@ class QueryPageImpl implements Page { final bool _isLast; // This might be `null` in which case we request as many as we can get. - final int _remainingNumberOfEntities; + final int? _remainingNumberOfEntities; QueryPageImpl(this._api, this._project, this._nextRequest, this._entities, this._isLast, this._remainingNumberOfEntities); static Future runQuery(api.DatastoreApi api, String project, - api.RunQueryRequest request, int limit, - {int batchSize}) { - var batchLimit = batchSize; - batchLimit ??= MAX_ENTITIES_PER_RESPONSE; - if (limit != null && limit < batchLimit) { - batchLimit = limit; + api.RunQueryRequest request, int? limit, + {int batchSize = MAX_ENTITIES_PER_RESPONSE}) { + if (limit != null && limit < batchSize) { + batchSize = limit; } - request.query.limit = batchLimit; + request.query!.limit = batchSize; return api.projects.runQuery(request, project).then((response) { var returnedEntities = const []; - var batch = response.batch; + final batch = response.batch!; if (batch.entityResults != null) { - returnedEntities = batch.entityResults - .map((result) => result.entity) + returnedEntities = batch.entityResults! + .map((result) => result.entity!) .map(DatastoreImpl._convertApi2DatastoreEntity) .toList(); } // This check is only necessary for the first request/response pair // (if offset was supplied). - if (request.query.offset != null && - request.query.offset > 0 && - request.query.offset != response.batch.skippedResults) { + if (request.query!.offset != null && + request.query!.offset! > 0 && + request.query!.offset != batch.skippedResults) { throw datastore.DatastoreError( - 'Server did not skip over the specified ${request.query.offset} ' + 'Server did not skip over the specified ${request.query!.offset} ' 'entities.'); } if (limit != null && returnedEntities.length > limit) { throw datastore.DatastoreError( 'Server returned more entities then the limit for the request' - '(${request.query.limit}) was.'); + '(${request.query!.limit}) was.'); } // FIXME: TODO: Big hack! // It looks like Apiary/Atlas is currently broken. /* if (limit != null && - returnedEntities.length < batchLimit && + returnedEntities.length < batchSize && response.batch.moreResults == 'MORE_RESULTS_AFTER_LIMIT') { throw new datastore.DatastoreError( 'Server returned response with less entities then the limit was, ' @@ -541,7 +553,7 @@ class QueryPageImpl implements Page { // In case a limit was specified, we need to subtraction the number of // entities we already got. // (the checks above guarantee that this subtraction is >= 0). - int remainingEntities; + int? remainingEntities; if (limit != null) { remainingEntities = limit - returnedEntities.length; } @@ -550,10 +562,10 @@ class QueryPageImpl implements Page { // limit or our limit has not been reached, we set `moreBatches` to // `true`. var moreBatches = (remainingEntities == null || remainingEntities > 0) && - response.batch.moreResults == 'MORE_RESULTS_AFTER_LIMIT'; + batch.moreResults == 'MORE_RESULTS_AFTER_LIMIT'; var gotAll = limit != null && remainingEntities == 0; - var noMore = response.batch.moreResults == 'NO_MORE_RESULTS'; + var noMore = batch.moreResults == 'NO_MORE_RESULTS'; var isLast = gotAll || noMore; // As a sanity check, we assert that `moreBatches XOR isLast`. @@ -568,7 +580,7 @@ class QueryPageImpl implements Page { moreBatches = false; } - if (!isLast && response.batch.endCursor == null) { + if (!isLast && batch.endCursor == null) { throw datastore.DatastoreError( 'Server did not supply an end cursor, even though the query ' 'is not done.'); @@ -582,11 +594,11 @@ class QueryPageImpl implements Page { // The offset will be 0 from now on, since the first request will have // skipped over the first `offset` results. - request.query.offset = 0; + request.query!.offset = 0; // Furthermore we set the startCursor to the endCursor of the previous // result batch, so we can continue where we left off. - request.query.startCursor = batch.endCursor; + request.query!.startCursor = batch.endCursor; return QueryPageImpl( api, project, request, returnedEntities, false, remainingEntities); @@ -601,7 +613,7 @@ class QueryPageImpl implements Page { List get items => _entities; @override - Future> next({int pageSize}) { + Future> next({int? pageSize}) { // NOTE: We do not respect [pageSize] here, the only mechanism we can // really use is `query.limit`, but this is user-specified when making // the query. diff --git a/pkgs/gcloud/lib/src/db/db.dart b/pkgs/gcloud/lib/src/db/db.dart index 300c08cd..684bab5c 100644 --- a/pkgs/gcloud/lib/src/db/db.dart +++ b/pkgs/gcloud/lib/src/db/db.dart @@ -405,9 +405,9 @@ Future _commitHelper(DatastoreDB db, return db.datastore .commit( - inserts: entityInserts, - autoIdInserts: entityAutoIdInserts, - deletes: entityDeletes, + inserts: entityInserts ?? [], + autoIdInserts: entityAutoIdInserts ?? [], + deletes: entityDeletes ?? [], transaction: datastoreTransaction) .then((ds.CommitResult result) { if (entityAutoIdInserts != null && entityAutoIdInserts.isNotEmpty) { diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index ce5b7e1a..99ca833b 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -49,7 +49,7 @@ Future> consumePages(FirstPageProvider provider) { } void runTests(Datastore datastore, String namespace) { - var partition = Partition(namespace); + final partition = Partition(namespace); Future withTransaction(FutureOr Function(Transaction t) f, {bool xg = false}) { diff --git a/pkgs/gcloud/test/datastore/e2e/utils.dart b/pkgs/gcloud/test/datastore/e2e/utils.dart index b3651da5..c26650b9 100644 --- a/pkgs/gcloud/test/datastore/e2e/utils.dart +++ b/pkgs/gcloud/test/datastore/e2e/utils.dart @@ -22,7 +22,7 @@ final TEST_BLOB_INDEXED_VALUE = BlobValue([0xaa, 0xaa, 0xff, 0xff]); Key buildKey(int i, {Function idFunction, String kind = TEST_KIND, Partition p}) { var path = [KeyElement(kind, idFunction == null ? null : idFunction(i))]; - return Key(path, partition: p); + return Key(path, partition: p ?? Partition.DEFAULT); } Map buildProperties(int i) { From 4050a96cbdc400aaceb4288977c87501d1cc4c0c Mon Sep 17 00:00:00 2001 From: Jonas Finnemann Jensen Date: Thu, 27 May 2021 22:34:40 +0200 Subject: [PATCH 171/239] Migrated db to null-safety --- pkgs/gcloud/lib/db.dart | 1 - pkgs/gcloud/lib/db/metamodel.dart | 3 +- pkgs/gcloud/lib/src/db/annotations.dart | 70 ++++++------ pkgs/gcloud/lib/src/db/db.dart | 131 +++++++++++++--------- pkgs/gcloud/lib/src/db/exceptions.dart | 1 - pkgs/gcloud/lib/src/db/model_db.dart | 7 +- pkgs/gcloud/lib/src/db/model_db_impl.dart | 69 ++++++------ pkgs/gcloud/lib/src/db/models.dart | 25 ++--- 8 files changed, 161 insertions(+), 146 deletions(-) diff --git a/pkgs/gcloud/lib/db.dart b/pkgs/gcloud/lib/db.dart index 554337d7..42e15514 100644 --- a/pkgs/gcloud/lib/db.dart +++ b/pkgs/gcloud/lib/db.dart @@ -1,7 +1,6 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 library gcloud.db; diff --git a/pkgs/gcloud/lib/db/metamodel.dart b/pkgs/gcloud/lib/db/metamodel.dart index 7418e733..09616f8c 100644 --- a/pkgs/gcloud/lib/db/metamodel.dart +++ b/pkgs/gcloud/lib/db/metamodel.dart @@ -1,7 +1,6 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 library gcloud.db.meta_model; @@ -11,7 +10,7 @@ import '../db.dart' as db; class Namespace extends db.ExpandoModel { static const int EmptyNamespaceId = 1; - String get name { + String? get name { // The default namespace will be reported with id 1. if (id == Namespace.EmptyNamespaceId) return null; return id as String; diff --git a/pkgs/gcloud/lib/src/db/annotations.dart b/pkgs/gcloud/lib/src/db/annotations.dart index 4bcd037c..38ad145d 100644 --- a/pkgs/gcloud/lib/src/db/annotations.dart +++ b/pkgs/gcloud/lib/src/db/annotations.dart @@ -1,7 +1,6 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 part of gcloud.db; @@ -34,7 +33,7 @@ class Kind { /// /// If `null` the name will be the same as the class name at which the /// annotation is placed. - final String name; + final String? name; /// The type, either [ID_TYPE_INTEGER] or [ID_TYPE_STRING]. final IdType idType; @@ -71,7 +70,7 @@ abstract class Property { /// /// If it is `null`, the name will be the same as used in the /// model class. - final String propertyName; + final String? propertyName; /// Specifies whether this property is required or not. /// @@ -88,29 +87,30 @@ abstract class Property { const Property( {this.propertyName, this.required = false, this.indexed = true}); - bool validate(ModelDB db, Object value) { + bool validate(ModelDB db, Object? value) { if (required && value == null) return false; return true; } - Object encodeValue(ModelDB db, Object value, {bool forComparison = false}); + Object? encodeValue(ModelDB db, Object? value, {bool forComparison = false}); - Object decodePrimitiveValue(ModelDB db, Object value); + Object? decodePrimitiveValue(ModelDB db, Object? value); } /// An abstract base class for primitive properties which can e.g. be used /// within a composed `ListProperty`. abstract class PrimitiveProperty extends Property { const PrimitiveProperty( - {String propertyName, bool required = false, bool indexed = true}) + {String? propertyName, bool required = false, bool indexed = true}) : super(propertyName: propertyName, required: required, indexed: indexed); @override - Object encodeValue(ModelDB db, Object value, {bool forComparison = false}) => + Object? encodeValue(ModelDB db, Object? value, + {bool forComparison = false}) => value; @override - Object decodePrimitiveValue(ModelDB db, Object value) => value; + Object? decodePrimitiveValue(ModelDB db, Object? value) => value; } /// A boolean [Property]. @@ -119,11 +119,11 @@ abstract class PrimitiveProperty extends Property { /// datastore and when reading them back. class BoolProperty extends PrimitiveProperty { const BoolProperty( - {String propertyName, bool required = false, bool indexed = true}) + {String? propertyName, bool required = false, bool indexed = true}) : super(propertyName: propertyName, required: required, indexed: indexed); @override - bool validate(ModelDB db, Object value) => + bool validate(ModelDB db, Object? value) => super.validate(db, value) && (value == null || value is bool); } @@ -133,11 +133,11 @@ class BoolProperty extends PrimitiveProperty { /// datastore and when reading them back. class IntProperty extends PrimitiveProperty { const IntProperty( - {String propertyName, bool required = false, bool indexed = true}) + {String? propertyName, bool required = false, bool indexed = true}) : super(propertyName: propertyName, required: required, indexed: indexed); @override - bool validate(ModelDB db, Object value) => + bool validate(ModelDB db, Object? value) => super.validate(db, value) && (value == null || value is int); } @@ -147,11 +147,11 @@ class IntProperty extends PrimitiveProperty { /// datastore and when reading them back. class DoubleProperty extends PrimitiveProperty { const DoubleProperty( - {String propertyName, bool required = false, bool indexed = true}) + {String? propertyName, bool required = false, bool indexed = true}) : super(propertyName: propertyName, required: required, indexed: indexed); @override - bool validate(ModelDB db, Object value) => + bool validate(ModelDB db, Object? value) => super.validate(db, value) && (value == null || value is double); } @@ -161,11 +161,11 @@ class DoubleProperty extends PrimitiveProperty { /// datastore and when reading them back. class StringProperty extends PrimitiveProperty { const StringProperty( - {String propertyName, bool required = false, bool indexed = true}) + {String? propertyName, bool required = false, bool indexed = true}) : super(propertyName: propertyName, required: required, indexed: indexed); @override - bool validate(ModelDB db, Object value) => + bool validate(ModelDB db, Object? value) => super.validate(db, value) && (value == null || value is String); } @@ -175,21 +175,21 @@ class StringProperty extends PrimitiveProperty { /// datastore and when reading them back. class ModelKeyProperty extends PrimitiveProperty { const ModelKeyProperty( - {String propertyName, bool required = false, bool indexed = true}) + {String? propertyName, bool required = false, bool indexed = true}) : super(propertyName: propertyName, required: required, indexed: indexed); @override - bool validate(ModelDB db, Object value) => + bool validate(ModelDB db, Object? value) => super.validate(db, value) && (value == null || value is Key); @override - Object encodeValue(ModelDB db, Object value, {bool forComparison = false}) { + Object? encodeValue(ModelDB db, Object? value, {bool forComparison = false}) { if (value == null) return null; return db.toDatastoreKey(value as Key); } @override - Object decodePrimitiveValue(ModelDB db, Object value) { + Object? decodePrimitiveValue(ModelDB db, Object? value) { if (value == null) return null; return db.fromDatastoreKey(value as ds.Key); } @@ -201,7 +201,7 @@ class ModelKeyProperty extends PrimitiveProperty { /// datastore and when reading them back. Blob values will be represented by /// List. class BlobProperty extends PrimitiveProperty { - const BlobProperty({String propertyName, bool required = false}) + const BlobProperty({String? propertyName, bool required = false}) : super(propertyName: propertyName, required: required, indexed: false); // NOTE: We don't validate that the entries of the list are really integers @@ -209,17 +209,17 @@ class BlobProperty extends PrimitiveProperty { // If an untyped list was created the type check will always succeed. i.e. // "[1, true, 'bar'] is List" evaluates to `true` @override - bool validate(ModelDB db, Object value) => + bool validate(ModelDB db, Object? value) => super.validate(db, value) && (value == null || value is List); @override - Object encodeValue(ModelDB db, Object value, {bool forComparison = false}) { + Object? encodeValue(ModelDB db, Object? value, {bool forComparison = false}) { if (value == null) return null; return ds.BlobValue(value as List); } @override - Object decodePrimitiveValue(ModelDB db, Object value) { + Object? decodePrimitiveValue(ModelDB db, Object? value) { if (value == null) return null; return (value as ds.BlobValue).bytes; @@ -232,15 +232,15 @@ class BlobProperty extends PrimitiveProperty { /// datastore and when reading them back. class DateTimeProperty extends PrimitiveProperty { const DateTimeProperty( - {String propertyName, bool required = false, bool indexed = true}) + {String? propertyName, bool required = false, bool indexed = true}) : super(propertyName: propertyName, required: required, indexed: indexed); @override - bool validate(ModelDB db, Object value) => + bool validate(ModelDB db, Object? value) => super.validate(db, value) && (value == null || value is DateTime); @override - Object decodePrimitiveValue(ModelDB db, Object value) { + Object? decodePrimitiveValue(ModelDB db, Object? value) { if (value is int) { return DateTime.fromMillisecondsSinceEpoch(value ~/ 1000, isUtc: true); } @@ -259,11 +259,11 @@ class ListProperty extends Property { // TODO: We want to support optional list properties as well. // Get rid of "required: true" here. const ListProperty(this.subProperty, - {String propertyName, bool indexed = true}) + {String? propertyName, bool indexed = true}) : super(propertyName: propertyName, required: true, indexed: indexed); @override - bool validate(ModelDB db, Object value) { + bool validate(ModelDB db, Object? value) { if (!super.validate(db, value) || value is! List) return false; for (var entry in value) { @@ -273,7 +273,7 @@ class ListProperty extends Property { } @override - Object encodeValue(ModelDB db, Object value, {bool forComparison = false}) { + Object? encodeValue(ModelDB db, Object? value, {bool forComparison = false}) { if (forComparison) { // If we have comparison of list properties (i.e. repeated property names) // the comparison object must not be a list, but the value itself. @@ -303,10 +303,10 @@ class ListProperty extends Property { } @override - Object decodePrimitiveValue(ModelDB db, Object value) { + Object decodePrimitiveValue(ModelDB db, Object? value) { if (value == null) return []; if (value is! List) return [subProperty.decodePrimitiveValue(db, value)]; - return (value as List) + return value .map((entry) => subProperty.decodePrimitiveValue(db, entry)) .toList(); } @@ -314,12 +314,12 @@ class ListProperty extends Property { /// A convenience [Property] for list of strings. class StringListProperty extends ListProperty { - const StringListProperty({String propertyName, bool indexed = true}) + const StringListProperty({String? propertyName, bool indexed = true}) : super(const StringProperty(), propertyName: propertyName, indexed: indexed); @override - Object decodePrimitiveValue(ModelDB db, Object value) { + Object decodePrimitiveValue(ModelDB db, Object? value) { return (super.decodePrimitiveValue(db, value) as core.List).cast(); } } diff --git a/pkgs/gcloud/lib/src/db/db.dart b/pkgs/gcloud/lib/src/db/db.dart index 684bab5c..8cb3a4b5 100644 --- a/pkgs/gcloud/lib/src/db/db.dart +++ b/pkgs/gcloud/lib/src/db/db.dart @@ -1,7 +1,6 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 part of gcloud.db; @@ -33,9 +32,12 @@ class Transaction { Transaction(this.db, this._datastoreTransaction); /// Looks up [keys] within this transaction. - Future> lookup(List keys) { - return _lookupHelper(db, keys, - datastoreTransaction: _datastoreTransaction); + Future> lookup(List keys) { + return _lookupHelper( + db, + keys, + datastoreTransaction: _datastoreTransaction, + ); } /// Looks up a single [key] within this transaction, and returns the @@ -47,7 +49,8 @@ class Transaction { /// /// If the [key] is not found within the transaction and [orElse] was not /// specified, then a [KeyNotFoundException] will be thrown. - Future lookupValue(Key key, {T Function() orElse}) async { + Future lookupValue(Key key, + {T Function()? orElse}) async { final values = await lookup([key]); assert(values.length == 1); var value = values.single; @@ -62,7 +65,7 @@ class Transaction { } /// Enqueues [inserts] and [deletes] which should be committed at commit time. - void queueMutations({List inserts, List deletes}) { + void queueMutations({List? inserts, List? deletes}) { _checkSealed(); if (inserts != null) { _inserts.addAll(inserts); @@ -76,7 +79,7 @@ class Transaction { /// /// Note that [ancestorKey] is required, since a transaction is not allowed to /// touch/look at an arbitrary number of rows. - Query query(Key ancestorKey, {Partition partition}) { + Query query(Key ancestorKey, {Partition? partition}) { // TODO(#25): The `partition` element is redundant and should be removed. if (partition == null) { partition = ancestorKey.partition; @@ -112,7 +115,7 @@ class Transaction { } } - void _checkSealed({int changeState, bool allowFailed = false}) { + void _checkSealed({int? changeState, bool allowFailed = false}) { if (_state == _TRANSACTION_COMMITTED) { throw StateError('The transaction has already been committed.'); } else if (_state == _TRANSACTION_ROLLED_BACK) { @@ -136,21 +139,21 @@ class Query { }; final DatastoreDB _db; - final ds.Transaction _transaction; + final ds.Transaction? _transaction; final String _kind; - final Partition _partition; - final Key _ancestorKey; + final Partition? _partition; + final Key? _ancestorKey; final List _filters = []; final List _orders = []; - int _offset; - int _limit; + int? _offset; + int? _limit; Query(DatastoreDB dbImpl, - {Partition partition, - Key ancestorKey, - ds.Transaction datastoreTransaction}) + {Partition? partition, + Key? ancestorKey, + ds.Transaction? datastoreTransaction}) : _db = dbImpl, _kind = dbImpl.modelDB.kindName(T), _partition = partition, @@ -169,7 +172,7 @@ class Query { /// * '=' (equal) /// /// [comparisonObject] is the object for comparison. - void filter(String filterString, Object comparisonObject) { + void filter(String filterString, Object? comparisonObject) { var parts = filterString.split(' '); if (parts.length != 2 || !_relationMapping.containsKey(parts[1])) { throw ArgumentError("Invalid filter string '$filterString'."); @@ -187,7 +190,7 @@ class Query { .toDatastoreValue(_kind, name, comparisonObject, forComparison: true); } _filters.add(ds.Filter( - _relationMapping[comparison], propertyName, comparisonObject)); + _relationMapping[comparison]!, propertyName, comparisonObject!)); } /// Adds an order to this [Query]. @@ -225,9 +228,9 @@ class Query { /// return the newest updates performed on the datastore since updates /// will be reflected in the indices in an eventual consistent way. Stream run() { - ds.Key ancestorKey; + ds.Key? ancestorKey; if (_ancestorKey != null) { - ancestorKey = _db.modelDB.toDatastoreKey(_ancestorKey); + ancestorKey = _db.modelDB.toDatastoreKey(_ancestorKey!); } var query = ds.Query( ancestorKey: ancestorKey, @@ -237,15 +240,24 @@ class Query { offset: _offset, limit: _limit); - ds.Partition partition; + ds.Partition? partition; if (_partition != null) { - partition = ds.Partition(_partition.namespace); + partition = ds.Partition(_partition!.namespace); } return StreamFromPages((int pageSize) { - return _db.datastore - .query(query, transaction: _transaction, partition: partition); - }).stream.map(_db.modelDB.fromDatastoreEntity); + if (_transaction != null) { + if (partition != null) { + return _db.datastore + .query(query, transaction: _transaction!, partition: partition); + } + return _db.datastore.query(query, transaction: _transaction!); + } + if (partition != null) { + return _db.datastore.query(query, partition: partition); + } + return _db.datastore.query(query); + }).stream.map((e) => _db.modelDB.fromDatastoreEntity(e)!); } // TODO: @@ -265,12 +277,11 @@ class Query { class DatastoreDB { final ds.Datastore datastore; final ModelDB _modelDB; - Partition _defaultPartition; + final Partition _defaultPartition; - DatastoreDB(this.datastore, {ModelDB modelDB, Partition defaultPartition}) - : _modelDB = modelDB ?? ModelDBImpl() { - _defaultPartition = defaultPartition ?? Partition(null); - } + DatastoreDB(this.datastore, {ModelDB? modelDB, Partition? defaultPartition}) + : _modelDB = modelDB ?? ModelDBImpl(), + _defaultPartition = defaultPartition ?? Partition(null); /// The [ModelDB] used to serialize/deserialize objects. ModelDB get modelDB => _modelDB; @@ -304,7 +315,7 @@ class DatastoreDB { } /// Build a query for [kind] models. - Query query({Partition partition, Key ancestorKey}) { + Query query({Partition? partition, Key? ancestorKey}) { // TODO(#26): There is only one case where `partition` is not redundant // Namely if `ancestorKey == null` and `partition != null`. We could // say we get rid of `partition` and enforce `ancestorKey` to @@ -335,7 +346,7 @@ class DatastoreDB { /// /// * [lookupValue], which looks a single value up by its key, requiring a /// successful lookup. - Future> lookup(List keys) { + Future> lookup(List keys) { return _lookupHelper(this, keys); } @@ -348,7 +359,8 @@ class DatastoreDB { /// /// If the [key] is not found in the datastore and [orElse] was not /// specified, then a [KeyNotFoundException] will be thrown. - Future lookupValue(Key key, {T Function() orElse}) async { + Future lookupValue(Key key, + {T Function()? orElse}) async { final values = await lookup([key]); assert(values.length == 1); var value = values.single; @@ -370,18 +382,18 @@ class DatastoreDB { /// /// For transactions, please use `beginTransaction` and it's returned /// [Transaction] object. - Future commit({List inserts, List deletes}) { + Future commit({List? inserts, List? deletes}) { return _commitHelper(this, inserts: inserts, deletes: deletes); } } Future _commitHelper(DatastoreDB db, - {List inserts, - List deletes, - ds.Transaction datastoreTransaction}) { - List entityInserts, entityAutoIdInserts; - List entityDeletes; - var autoIdModelInserts; + {List? inserts, + List? deletes, + ds.Transaction? datastoreTransaction}) { + List? entityInserts, entityAutoIdInserts; + List? entityDeletes; + late var autoIdModelInserts; if (inserts != null) { entityInserts = []; entityAutoIdInserts = []; @@ -402,14 +414,21 @@ Future _commitHelper(DatastoreDB db, if (deletes != null) { entityDeletes = deletes.map(db.modelDB.toDatastoreKey).toList(); } + Future r; + if (datastoreTransaction != null) { + r = db.datastore.commit( + inserts: entityInserts ?? [], + autoIdInserts: entityAutoIdInserts ?? [], + deletes: entityDeletes ?? [], + transaction: datastoreTransaction); + } else { + r = db.datastore.commit( + inserts: entityInserts ?? [], + autoIdInserts: entityAutoIdInserts ?? [], + deletes: entityDeletes ?? []); + } - return db.datastore - .commit( - inserts: entityInserts ?? [], - autoIdInserts: entityAutoIdInserts ?? [], - deletes: entityDeletes ?? [], - transaction: datastoreTransaction) - .then((ds.CommitResult result) { + return r.then((ds.CommitResult result) { if (entityAutoIdInserts != null && entityAutoIdInserts.isNotEmpty) { for (var i = 0; i < result.autoIdInsertKeys.length; i++) { var key = db.modelDB.fromDatastoreKey(result.autoIdInsertKeys[i]); @@ -420,12 +439,18 @@ Future _commitHelper(DatastoreDB db, }); } -Future> _lookupHelper(DatastoreDB db, List keys, - {ds.Transaction datastoreTransaction}) { +Future> _lookupHelper(DatastoreDB db, List keys, + {ds.Transaction? datastoreTransaction}) { var entityKeys = keys.map(db.modelDB.toDatastoreKey).toList(); - return db.datastore - .lookup(entityKeys, transaction: datastoreTransaction) - .then((List entities) { - return entities.map(db.modelDB.fromDatastoreEntity).toList(); + + if (datastoreTransaction != null) { + return db.datastore + .lookup(entityKeys, transaction: datastoreTransaction) + .then((List entities) { + return entities.map(db.modelDB.fromDatastoreEntity).toList(); + }); + } + return db.datastore.lookup(entityKeys).then((List entities) { + return entities.map(db.modelDB.fromDatastoreEntity).toList(); }); } diff --git a/pkgs/gcloud/lib/src/db/exceptions.dart b/pkgs/gcloud/lib/src/db/exceptions.dart index 36dcb800..11c48b1c 100644 --- a/pkgs/gcloud/lib/src/db/exceptions.dart +++ b/pkgs/gcloud/lib/src/db/exceptions.dart @@ -1,7 +1,6 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 part of gcloud.db; diff --git a/pkgs/gcloud/lib/src/db/model_db.dart b/pkgs/gcloud/lib/src/db/model_db.dart index 9faa2dba..ba19caee 100644 --- a/pkgs/gcloud/lib/src/db/model_db.dart +++ b/pkgs/gcloud/lib/src/db/model_db.dart @@ -1,7 +1,6 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 part of gcloud.db; @@ -19,16 +18,16 @@ abstract class ModelDB { ds.Entity toDatastoreEntity(Model model); /// Converts a [ds.Entity] to a [Model] instance. - T fromDatastoreEntity(ds.Entity entity); + T? fromDatastoreEntity(ds.Entity? entity); /// Returns the kind name for instances of [type]. String kindName(Type type); /// Returns the property name used for [fieldName] // TODO: Get rid of this eventually. - String fieldNameToPropertyName(String kind, String fieldName); + String? fieldNameToPropertyName(String kind, String fieldName); /// Converts [value] according to the [Property] named [fieldName] in [kind]. - Object toDatastoreValue(String kind, String fieldName, Object value, + Object? toDatastoreValue(String kind, String fieldName, Object? value, {bool forComparison = false}); } diff --git a/pkgs/gcloud/lib/src/db/model_db_impl.dart b/pkgs/gcloud/lib/src/db/model_db_impl.dart index 719bbaac..c018db30 100644 --- a/pkgs/gcloud/lib/src/db/model_db_impl.dart +++ b/pkgs/gcloud/lib/src/db/model_db_impl.dart @@ -1,7 +1,6 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 part of gcloud.db; @@ -64,7 +63,7 @@ class ModelDBImpl implements ModelDB { var namespace = Partition(datastoreKey.partition.namespace); var key = namespace.emptyKey; for (var element in datastoreKey.elements) { - var type = _type2ModelDesc[_kind2ModelDesc[element.kind]]; + var type = _type2ModelDesc[_kind2ModelDesc[element.kind]!]; if (type == null) { throw StateError( 'Could not find a model associated with kind "${element.kind}". ' @@ -92,7 +91,7 @@ class ModelDBImpl implements ModelDB { while (!currentKey.isEmpty) { var id = currentKey.id; - var modelDescription = _modelDescriptionForType(currentKey.type); + var modelDescription = _modelDescriptionForType(currentKey.type)!; var kind = modelDescription.kindName(this); var useIntegerId = modelDescription.useIntegerId; @@ -107,7 +106,7 @@ class ModelDBImpl implements ModelDB { } elements.add(ds.KeyElement(kind, id)); - currentKey = currentKey.parent; + currentKey = currentKey.parent!; } var partition = currentKey._parent as Partition; return ds.Key(elements.reversed.toList(), @@ -118,7 +117,7 @@ class ModelDBImpl implements ModelDB { @override ds.Entity toDatastoreEntity(Model model) { try { - var modelDescription = _modelDescriptionForType(model.runtimeType); + var modelDescription = _modelDescriptionForType(model.runtimeType)!; return modelDescription.encodeModel(this, model); } catch (error, stack) { throw ArgumentError('Error while encoding entity ($error, $stack).'); @@ -127,9 +126,10 @@ class ModelDBImpl implements ModelDB { /// Converts a [ds.Entity] to a [Model] instance. @override - T fromDatastoreEntity(ds.Entity entity) { - if (entity == null) return null; - + T? fromDatastoreEntity(ds.Entity? entity) { + if (entity == null) { + return null; + } var key = fromDatastoreKey(entity.key); var kind = entity.key.elements.last.kind; var modelDescription = _kind2ModelDesc[kind]; @@ -160,7 +160,7 @@ class ModelDBImpl implements ModelDB { /// Returns the name of the property corresponding to the kind [kind] and /// [fieldName]. @override - String fieldNameToPropertyName(String kind, String fieldName) { + String? fieldNameToPropertyName(String kind, String fieldName) { var modelDescription = _kind2ModelDesc[kind]; if (modelDescription == null) { throw ArgumentError('The kind "$kind" is unknown.'); @@ -170,7 +170,7 @@ class ModelDBImpl implements ModelDB { /// Converts [value] according to the [Property] named [name] in [type]. @override - Object toDatastoreValue(String kind, String fieldName, Object value, + Object? toDatastoreValue(String kind, String fieldName, Object? value, {bool forComparison = false}) { var modelDescription = _kind2ModelDesc[kind]; if (modelDescription == null) { @@ -186,14 +186,14 @@ class ModelDBImpl implements ModelDB { Map _propertiesForModel( _ModelDescription modelDescription) { - return _modelDesc2Properties[modelDescription]; + return _modelDesc2Properties[modelDescription]!; } - _ModelDescription _modelDescriptionForType(Type type) { - return _modelDesc2Type[type]; + _ModelDescription? _modelDescriptionForType(Type? type) { + return _modelDesc2Type[type!]; } - mirrors.ClassMirror _modelClass(_ModelDescription md) { + mirrors.ClassMirror? _modelClass(_ModelDescription md) { return _modelDesc2ClassMirror[md]; } @@ -226,7 +226,7 @@ class ModelDBImpl implements ModelDB { } void _tryLoadNewModelClass(mirrors.ClassMirror classMirror) { - Kind kindAnnotation; + Kind? kindAnnotation; for (var instance in classMirror.metadata) { if (instance.reflectee.runtimeType == Kind) { if (kindAnnotation != null) { @@ -234,7 +234,7 @@ class ModelDBImpl implements ModelDB { 'Cannot have more than one ModelMetadata() annotation ' 'on a Model class'); } - kindAnnotation = instance.reflectee as Kind; + kindAnnotation = instance.reflectee as Kind?; } } @@ -324,8 +324,7 @@ class ModelDBImpl implements ModelDB { .forEach((Symbol fieldSymbol, mirrors.DeclarationMirror decl) { // Look if the symbol is a getter and we have metadata attached to it. if (memberMap.containsKey(fieldSymbol) && - memberMap[fieldSymbol].isGetter && - decl.metadata != null) { + memberMap[fieldSymbol]!.isGetter) { final propertyAnnotations = decl.metadata .map((mirrors.InstanceMirror mirror) => mirror.reflectee) .whereType() @@ -361,7 +360,7 @@ class ModelDBImpl implements ModelDB { } } }); - modelClassMirror = modelClassMirror.superclass; + modelClassMirror = modelClassMirror.superclass!; } return properties; @@ -370,7 +369,7 @@ class ModelDBImpl implements ModelDB { final _originalExpandoModelClass = mirrors.reflectClass(ExpandoModel); final _originalModelClass = mirrors.reflectClass(Model); - bool _isExpandoClass(mirrors.ClassMirror modelClass) { + bool _isExpandoClass(mirrors.ClassMirror? modelClass) { while (modelClass != null && modelClass.superclass != modelClass) { if (modelClass.originalDeclaration == _originalExpandoModelClass) { return true; @@ -425,7 +424,7 @@ class _ModelDescription { ds.Entity encodeModel(ModelDBImpl db, T model) { var key = db.toDatastoreKey(model.key); - var properties = {}; + var properties = {}; var mirror = mirrors.reflect(model); db._propertiesForModel(this).forEach((String fieldName, Property prop) { @@ -457,10 +456,8 @@ class _ModelDescription { } H decodeEntity(ModelDBImpl db, Key key, ds.Entity entity) { - if (entity == null) return null; - // NOTE: this assumes a default constructor for the model classes! - var classMirror = db._modelClass(this); + var classMirror = db._modelClass(this)!; var mirror = classMirror.newInstance(const Symbol(''), []); // Set the id and the parent key @@ -477,7 +474,7 @@ class _ModelDescription { mirrors.InstanceMirror mirror, String fieldName, Property prop) { var propertyName = fieldNameToPropertyName(fieldName); - var rawValue = entity.properties[propertyName]; + var rawValue = entity.properties[propertyName!]; var value = prop.decodePrimitiveValue(db, rawValue); if (!prop.validate(db, value)) { @@ -494,15 +491,15 @@ class _ModelDescription { } } - String fieldNameToPropertyName(String fieldName) { + String? fieldNameToPropertyName(String fieldName) { return _field2PropertyName[fieldName]; } - String propertyNameToFieldName(ModelDBImpl db, String propertySearchName) { + String? propertyNameToFieldName(ModelDBImpl db, String propertySearchName) { return _property2FieldName[propertySearchName]; } - Object encodeField(ModelDBImpl db, String fieldName, Object value, + Object? encodeField(ModelDBImpl db, String fieldName, Object? value, {bool enforceFieldExists = true, bool forComparison = false}) { var property = db._propertiesForModel(this)[fieldName]; if (property != null) { @@ -525,9 +522,9 @@ class _ModelDescription { // - we may end up dropping added properties in a write // ([usedNames] := [realFieldNames] + [realPropertyNames]) class _ExpandoModelDescription extends _ModelDescription { - Set realFieldNames; - Set realPropertyNames; - Set usedNames; + late Set realFieldNames; + late Set realPropertyNames; + late Set usedNames; _ExpandoModelDescription(String kind, bool useIntegerId) : super(kind, useIntegerId); @@ -545,7 +542,7 @@ class _ExpandoModelDescription extends _ModelDescription { ds.Entity encodeModel(ModelDBImpl db, ExpandoModel model) { var entity = super.encodeModel(db, model); var properties = entity.properties; - model.additionalProperties.forEach((String key, Object value) { + model.additionalProperties.forEach((String key, Object? value) { // NOTE: All expanded properties will be indexed. if (!usedNames.contains(key)) { properties[key] = value; @@ -556,11 +553,9 @@ class _ExpandoModelDescription extends _ModelDescription { @override T decodeEntity(ModelDBImpl db, Key key, ds.Entity entity) { - if (entity == null) return null; - var model = super.decodeEntity(db, key, entity) as ExpandoModel; var properties = entity.properties; - properties.forEach((String key, Object value) { + properties.forEach((String key, Object? value) { if (!usedNames.contains(key)) { model.additionalProperties[key] = value; } @@ -588,7 +583,7 @@ class _ExpandoModelDescription extends _ModelDescription { } @override - Object encodeField(ModelDBImpl db, String fieldName, Object value, + Object encodeField(ModelDBImpl db, String fieldName, Object? value, {bool enforceFieldExists = true, bool forComparison = false}) { // The [enforceFieldExists] argument is intentionally ignored. @@ -600,6 +595,6 @@ class _ExpandoModelDescription extends _ModelDescription { // If value != null then superclass will return != null. // TODO: Ensure [value] is primitive in this case. primitiveValue ??= value; - return primitiveValue; + return primitiveValue!; } } diff --git a/pkgs/gcloud/lib/src/db/models.dart b/pkgs/gcloud/lib/src/db/models.dart index 539776ee..96a1b032 100644 --- a/pkgs/gcloud/lib/src/db/models.dart +++ b/pkgs/gcloud/lib/src/db/models.dart @@ -1,7 +1,6 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 part of gcloud.db; @@ -13,8 +12,8 @@ class Key { // Either KeyImpl or PartitionImpl final Object _parent; - final Type type; - final T id; + final Type? type; + final T? id; Key(Key parent, this.type, this.id) : _parent = parent { if (type == null) { @@ -31,7 +30,7 @@ class Key { id = null; /// Parent of this [Key]. - Key get parent { + Key? get parent { if (_parent is Key) { return _parent as Key; } @@ -44,10 +43,10 @@ class Key { while (obj is! Partition) { obj = (obj as Key)._parent; } - return obj as Partition; + return obj; } - Key append(Type modelType, {U id}) { + Key append(Type modelType, {U? id}) { return Key(this, modelType, id); } @@ -65,7 +64,7 @@ class Key { int get hashCode => _parent.hashCode ^ type.hashCode ^ id.hashCode; /// Converts `Key` to `Key`. - Key cast() => Key(parent, type, id as U); + Key cast() => Key(parent!, type, id as U?); } /// Represents a datastore partition. @@ -73,7 +72,7 @@ class Key { /// A datastore is partitioned into namespaces. The default namespace is /// `null`. class Partition { - final String namespace; + final String? namespace; Partition(this.namespace) { if (namespace == '') { @@ -101,10 +100,10 @@ class Partition { /// Every model class has a [id] of type [T] which must be `int` or `String`, and /// a [parentKey]. The [key] getter is returning the key for the model object. abstract class Model { - T id; - Key parentKey; + T? id; + Key? parentKey; - Key get key => parentKey.append(runtimeType, id: id); + Key get key => parentKey!.append(runtimeType, id: id); } /// Superclass for all expanded model classes. @@ -113,10 +112,10 @@ abstract class Model { /// set arbitrary fields on these models. The expanded values must be values /// accepted by the [RawDatastore] implementation. abstract class ExpandoModel extends Model { - final Map additionalProperties = {}; + final Map additionalProperties = {}; @override - Object noSuchMethod(Invocation invocation) { + Object? noSuchMethod(Invocation invocation) { var name = mirrors.MirrorSystem.getName(invocation.memberName); if (name.endsWith('=')) name = name.substring(0, name.length - 1); if (invocation.isGetter) { From 623548de88eecfcf949fd9d1309e5b0477336464 Mon Sep 17 00:00:00 2001 From: Jonas Finnemann Jensen Date: Mon, 31 May 2021 17:11:17 +0200 Subject: [PATCH 172/239] Prepare 0.8.0 release --- pkgs/gcloud/CHANGELOG.md | 7 ++----- pkgs/gcloud/pubspec.yaml | 3 +-- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 5ddbd021..ae898e71 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,10 +1,7 @@ -## 0.8.0-dev.0 +## 0.8.0 * Require Dart 2.12 or later - * Partial migration to null safety: - * `package:gcloud/common.dart` - * `package:gcloud/http.dart` - * `package:gcloud/service_scope.dart` + * Migration to null safety. ## 0.7.3 * Fixed issue in reflection code affecting `Model` and `Model`, diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 13e70664..71c03c6c 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,9 +1,8 @@ name: gcloud -version: 0.8.0-dev.0 +version: 0.8.0 description: >- High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. homepage: https://github.com/dart-lang/gcloud - environment: sdk: '>=2.12.0 <3.0.0' From 9896260841b447920dd3668d354dfbf53bdba998 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Sat, 5 Jun 2021 13:25:23 -0700 Subject: [PATCH 173/239] Add dependabot --- pkgs/gcloud/.github/dependabot.yml | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 pkgs/gcloud/.github/dependabot.yml diff --git a/pkgs/gcloud/.github/dependabot.yml b/pkgs/gcloud/.github/dependabot.yml new file mode 100644 index 00000000..430a85e7 --- /dev/null +++ b/pkgs/gcloud/.github/dependabot.yml @@ -0,0 +1,11 @@ +# Set update schedule for GitHub Actions +# See https://docs.github.com/en/free-pro-team@latest/github/administering-a-repository/keeping-your-actions-up-to-date-with-dependabot + +version: 2 +updates: + +- package-ecosystem: "github-actions" + directory: "/" + schedule: + # Check for updates to GitHub Actions every weekday + interval: "daily" From 7afc1f5f94bc7fa26eefc010011f611a80438185 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 5 Jun 2021 13:38:46 -0700 Subject: [PATCH 174/239] Bump dart-lang/setup-dart from 0.3 to 1 (dart-lang/gcloud#118) Bumps [dart-lang/setup-dart](https://github.com/dart-lang/setup-dart) from 0.3 to 1. - [Release notes](https://github.com/dart-lang/setup-dart/releases) - [Changelog](https://github.com/dart-lang/setup-dart/blob/main/CHANGELOG.md) - [Commits](https://github.com/dart-lang/setup-dart/compare/v0.3...v1) --- updated-dependencies: - dependency-name: dart-lang/setup-dart dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Kevin Moore --- .../gcloud/.github/workflows/test-package.yml | 30 ++----------------- 1 file changed, 3 insertions(+), 27 deletions(-) diff --git a/pkgs/gcloud/.github/workflows/test-package.yml b/pkgs/gcloud/.github/workflows/test-package.yml index 9794c1a8..b790ff82 100644 --- a/pkgs/gcloud/.github/workflows/test-package.yml +++ b/pkgs/gcloud/.github/workflows/test-package.yml @@ -23,7 +23,7 @@ jobs: sdk: [dev] steps: - uses: actions/checkout@v2 - - uses: dart-lang/setup-dart@v0.3 + - uses: dart-lang/setup-dart@v1 with: sdk: ${{ matrix.sdk }} - id: install @@ -47,10 +47,10 @@ jobs: matrix: # Add macos-latest and/or windows-latest if relevant for this package. os: [ubuntu-latest] - sdk: [dev] + sdk: [2.12.0, dev] steps: - uses: actions/checkout@v2 - - uses: dart-lang/setup-dart@v0.3 + - uses: dart-lang/setup-dart@v1 with: sdk: ${{ matrix.sdk }} - id: install @@ -59,27 +59,3 @@ jobs: - name: Run VM tests run: dart test --platform vm -P ci if: always() && steps.install.outcome == 'success' - - # Run tests on a matrix consisting of two dimensions: - # 1. OS: ubuntu-latest, (macos-latest, windows-latest) - # 2. release: 2.12.0 - test-legacy-sdk: - needs: analyze - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - # Add macos-latest and/or windows-latest if relevant for this package. - os: [ubuntu-latest] - sdk: [2.12.0] - steps: - - uses: actions/checkout@v2 - - uses: dart-lang/setup-dart@v0.3 - with: - sdk: ${{ matrix.sdk }} - - id: install - name: Install dependencies - run: pub get - - name: Run VM tests - run: pub run test --platform vm -P ci - if: always() && steps.install.outcome == 'success' From f47c3e30a0a9b17ce2d002c5d24f2089d6dd6462 Mon Sep 17 00:00:00 2001 From: Istvan Soos Date: Fri, 18 Jun 2021 13:18:28 +0200 Subject: [PATCH 175/239] lookupOrNull in DatastoreBD and Transaction --- pkgs/gcloud/CHANGELOG.md | 4 ++++ pkgs/gcloud/lib/src/db/db.dart | 20 ++++++++++++++++++++ pkgs/gcloud/pubspec.yaml | 2 +- 3 files changed, 25 insertions(+), 1 deletion(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index ae898e71..516a0be6 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.8.1 + + * `lookupOrNull` method in `DatastoreDB` and `Transaction`. + ## 0.8.0 * Require Dart 2.12 or later diff --git a/pkgs/gcloud/lib/src/db/db.dart b/pkgs/gcloud/lib/src/db/db.dart index 8cb3a4b5..c86f85cf 100644 --- a/pkgs/gcloud/lib/src/db/db.dart +++ b/pkgs/gcloud/lib/src/db/db.dart @@ -64,6 +64,16 @@ class Transaction { return value; } + /// Looks up a single [key] in the datastore, and returns the associated + /// [Model] object. + /// + /// If the [key] is not found in the datastore, null will be returned. + Future lookupOrNull(Key key) async { + final values = await lookup([key]); + assert(values.length == 1); + return values.single; + } + /// Enqueues [inserts] and [deletes] which should be committed at commit time. void queueMutations({List? inserts, List? deletes}) { _checkSealed(); @@ -374,6 +384,16 @@ class DatastoreDB { return value; } + /// Looks up a single [key] in the datastore, and returns the associated + /// [Model] object. + /// + /// If the [key] is not found in the datastore, null will be returned. + Future lookupOrNull(Key key) async { + final values = await lookup([key]); + assert(values.length == 1); + return values.single; + } + /// Add [inserts] to the datastore and remove [deletes] from it. /// /// The order of inserts and deletes is not specified. When the commit is done diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 71c03c6c..5bff209b 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.8.0 +version: 0.8.1 description: >- High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. homepage: https://github.com/dart-lang/gcloud From ba377c9476256122e37492ad3e3c58d40cbe46a7 Mon Sep 17 00:00:00 2001 From: Jonas Finnemann Jensen Date: Thu, 15 Jul 2021 16:39:30 +0200 Subject: [PATCH 176/239] Fix typo in error message --- pkgs/gcloud/lib/service_scope.dart | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/gcloud/lib/service_scope.dart b/pkgs/gcloud/lib/service_scope.dart index 8ed21730..97b5d5d3 100644 --- a/pkgs/gcloud/lib/service_scope.dart +++ b/pkgs/gcloud/lib/service_scope.dart @@ -174,7 +174,7 @@ class _ServiceScope { var isParentCopy = _parentCopies.contains(serviceScopeKey); if (!isParentCopy && _key2Values.containsKey(serviceScopeKey)) { throw ArgumentError( - 'Servie scope already contains key $serviceScopeKey.'); + 'Service scope already contains key $serviceScopeKey.'); } var entry = _RegisteredEntry(serviceScopeKey, value, onScopeExit); From faabf94d8da532501662a17fb5431f2764e0fef6 Mon Sep 17 00:00:00 2001 From: Jonas Finnemann Jensen Date: Fri, 16 Jul 2021 15:16:31 +0200 Subject: [PATCH 177/239] Fix `Page.next()` when `Page.isLast` is `true`. Prior to null-safety migration `Page.next()` would return `Future.value(null)` when `Page.isLast` is `true`. This was not correctly migrated to null-safety as a result code-bases that are fully migrated to null-safety would get a runtime error. Code-bases that were not migrated to null-safety or only partially migrated to null-safety would get `null` in a result that is not nullable. This affects Pub-Sub and GCS clients users, if and only if they call `Page.next()` when `Page.isLast` is `true`, and their code-bases are not migrated to null-safety. The Cloud Datastore client already throws an `Error`, so no-one is broken by changing which `Error` is thrown as we generally do not make promises about which `Error` is thrown. --- pkgs/gcloud/CHANGELOG.md | 8 ++ pkgs/gcloud/lib/common.dart | 3 +- pkgs/gcloud/lib/src/datastore_impl.dart | 4 +- pkgs/gcloud/lib/src/db/model_db_impl.dart | 4 +- pkgs/gcloud/lib/src/pubsub_impl.dart | 10 +- pkgs/gcloud/lib/src/storage_impl.dart | 12 +- pkgs/gcloud/pubspec.yaml | 2 +- pkgs/gcloud/test/common.dart | 2 +- pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart | 23 ++-- pkgs/gcloud/test/pubsub/pubsub_test.dart | 109 ++++++++++--------- 10 files changed, 100 insertions(+), 77 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 516a0be6..a68b1c9e 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,11 @@ +## 0.8.2 + + * **BREAKING CHANGE:** `Page.next()` throws if `Page.isLast`, this change only + affects code not migrated to null-safety, when paging through results in + pub-sub and storage without checking `Page.isLast`. + Code fully migrated to null-safety will have experienced a runtime null check + error, and paging code for datastore already throw an `Error`. + ## 0.8.1 * `lookupOrNull` method in `DatastoreDB` and `Transaction`. diff --git a/pkgs/gcloud/lib/common.dart b/pkgs/gcloud/lib/common.dart index 6f311607..f8e00c5c 100644 --- a/pkgs/gcloud/lib/common.dart +++ b/pkgs/gcloud/lib/common.dart @@ -21,8 +21,7 @@ abstract class Page { /// /// The future returned completes with the next page or results. /// - /// If [next] is called on the last page the returned future completes - /// with `null`. + /// Throws if [next] is called on the last page. Future> next({int pageSize}); } diff --git a/pkgs/gcloud/lib/src/datastore_impl.dart b/pkgs/gcloud/lib/src/datastore_impl.dart index 92631170..5d86fc8e 100644 --- a/pkgs/gcloud/lib/src/datastore_impl.dart +++ b/pkgs/gcloud/lib/src/datastore_impl.dart @@ -619,7 +619,9 @@ class QueryPageImpl implements Page { // the query. if (isLast) { return Future.sync(() { - throw ArgumentError('Cannot call next() on last page.'); + throw StateError( + 'Page.next() cannot be called when Page.isLast == true', + ); }); } diff --git a/pkgs/gcloud/lib/src/db/model_db_impl.dart b/pkgs/gcloud/lib/src/db/model_db_impl.dart index c018db30..fc4749a7 100644 --- a/pkgs/gcloud/lib/src/db/model_db_impl.dart +++ b/pkgs/gcloud/lib/src/db/model_db_impl.dart @@ -535,7 +535,9 @@ class _ExpandoModelDescription extends _ModelDescription { realFieldNames = Set.from(_field2PropertyName.keys); realPropertyNames = Set.from(_property2FieldName.keys); - usedNames = {}..addAll(realFieldNames)..addAll(realPropertyNames); + usedNames = {} + ..addAll(realFieldNames) + ..addAll(realPropertyNames); } @override diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart index 82f35d3a..3ba28f67 100644 --- a/pkgs/gcloud/lib/src/pubsub_impl.dart +++ b/pkgs/gcloud/lib/src/pubsub_impl.dart @@ -477,8 +477,10 @@ class _TopicPageImpl implements Page { bool get isLast => _nextPageToken == null; @override - Future> next({int? pageSize}) { - if (isLast) return Future.value(null); + Future> next({int? pageSize}) async { + if (isLast) { + throw StateError('Page.next() cannot be called when Page.isLast == true'); + } final pageSize_ = pageSize ?? _pageSize; return _api._listTopics(pageSize_, _nextPageToken).then((response) { @@ -509,7 +511,9 @@ class _SubscriptionPageImpl implements Page { @override Future> next({int? pageSize}) { - if (_nextPageToken == null) return Future.value(null); + if (isLast) { + throw StateError('Page.next() cannot be called when Page.isLast == true'); + } final pageSize_ = pageSize ?? _pageSize; return _api diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index 5bf4e775..6370280d 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -321,8 +321,10 @@ class _BucketPageImpl implements Page { bool get isLast => _nextPageToken == null; @override - Future> next({int? pageSize}) { - if (isLast) return Future.value(null); + Future> next({int? pageSize}) async { + if (isLast) { + throw StateError('Page.next() cannot be called when Page.isLast == true'); + } pageSize ??= _pageSize; return _storage._listBuckets(pageSize!, _nextPageToken).then((response) { @@ -354,8 +356,10 @@ class _ObjectPageImpl implements Page { bool get isLast => _nextPageToken == null; @override - Future> next({int? pageSize}) { - if (isLast) return Future.value(null); + Future> next({int? pageSize}) async { + if (isLast) { + throw StateError('Page.next() cannot be called when Page.isLast == true'); + } pageSize ??= _pageSize; return _bucket diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 5bff209b..3bfb96ec 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.8.1 +version: 0.8.2 description: >- High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. homepage: https://github.com/dart-lang/gcloud diff --git a/pkgs/gcloud/test/common.dart b/pkgs/gcloud/test/common.dart index 5a3f77f9..0e1e5c14 100644 --- a/pkgs/gcloud/test/common.dart +++ b/pkgs/gcloud/test/common.dart @@ -95,7 +95,7 @@ class MockClient extends http.BaseClient { } Future respondEmpty() { - return Future.value(http.Response('', 200, headers: RESPONSE_HEADERS)); + return Future.value(http.Response('{}', 200, headers: RESPONSE_HEADERS)); } Future respondInitiateResumableUpload(project) { diff --git a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart index d47a7b34..cb051307 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart @@ -1,7 +1,6 @@ // Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 @Tags(['e2e']) @Timeout(Duration(seconds: 120)) @@ -13,16 +12,19 @@ import 'package:test/test.dart'; import '../common_e2e.dart'; void main() { - PubSub pubsub; - String project; - String prefix; - Client client; + late PubSub pubsub; + late String project; + late String prefix; + late Client client; - setUpAll(() { + setUpAll(() async { // Generate a unique prefix for all names generated by the tests. var id = DateTime.now().millisecondsSinceEpoch; prefix = 'dart-e2e-test-$id'; - return withAuthClient(PubSub.SCOPES, (String _project, httpClient) async { + await withAuthClient(PubSub.SCOPES, ( + String _project, + httpClient, + ) async { // Share the same pubsub connection for all tests. pubsub = PubSub(httpClient, _project); project = _project; @@ -34,11 +36,6 @@ void main() { var leftovers = false; var cleanupErrors = false; - // Don't cleanup if setup failed - if (pubsub == null) { - return; - } - print('checking for leftover subscriptions'); try { // Try to delete any leftover subscriptions from the tests. @@ -189,7 +186,7 @@ void main() { expect(await topic.publishString('Hello, world!'), isNull); var pullEvent = await subscription.pull(); expect(pullEvent, isNotNull); - expect(pullEvent.message.asString, 'Hello, world!'); + expect(pullEvent!.message.asString, 'Hello, world!'); expect(await pullEvent.acknowledge(), isNull); await pubsub.deleteSubscription(subscriptionName); diff --git a/pkgs/gcloud/test/pubsub/pubsub_test.dart b/pkgs/gcloud/test/pubsub/pubsub_test.dart index 4b759323..5a9d54ef 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_test.dart @@ -1,7 +1,6 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 import 'dart:async'; import 'dart:convert'; @@ -144,7 +143,7 @@ void main() { pubsub.ListTopicsResponse response, int first, int count) { response.topics = []; for (var i = 0; i < count; i++) { - response.topics.add(pubsub.Topic()..name = 'topic-${first + i}'); + response.topics!.add(pubsub.Topic()..name = 'topic-${first + i}'); } } @@ -154,7 +153,7 @@ void main() { MockClient mock, int n, int pageSize, [ - int totalCalls, + int? totalCalls, ]) { var totalPages = (n + pageSize - 1) ~/ pageSize; // No items still generate one request. @@ -341,14 +340,14 @@ void main() { return api.pageTopics().then(expectAsync1((page) { expect(page.items.length, 0); expect(page.isLast, isTrue); - expect(page.next(), completion(isNull)); + expect(() => page.next(), throwsStateError); mock.clear(); registerQueryMock(mock, 0, 20); return api.pageTopics(pageSize: 20).then(expectAsync1((page) { expect(page.items.length, 0); expect(page.isLast, isTrue); - expect(page.next(), completion(isNull)); + expect(() => page.next(), throwsStateError); })); })); }); @@ -361,14 +360,14 @@ void main() { return api.pageTopics().then(expectAsync1((page) { expect(page.items.length, 10); expect(page.isLast, isTrue); - expect(page.next(), completion(isNull)); + expect(() => page.next(), throwsStateError); mock.clear(); registerQueryMock(mock, 20, 20); return api.pageTopics(pageSize: 20).then(expectAsync1((page) { expect(page.items.length, 20); expect(page.isLast, isTrue); - expect(page.next(), completion(isNull)); + expect(() => page.next(), throwsStateError); })); })); }); @@ -387,14 +386,15 @@ void main() { expect(page.isLast, pageCount == totalPages); expect(page.items.length, page.isLast ? n - (totalPages - 1) * pageSize : pageSize); - page.next().then(expectAsync1((page) { - if (page != null) { + if (!page.isLast) { + page.next().then(expectAsync1((page) { handlePage(page); - } else { - expect(pageCount, totalPages); - completer.complete(); - } - })); + })); + } else { + expect(() => page.next(), throwsStateError); + expect(pageCount, totalPages); + completer.complete(); + } } var api = PubSub(mock, PROJECT); @@ -536,7 +536,7 @@ void main() { pubsub.ListSubscriptionsResponse response, int first, int count) { response.subscriptions = []; for (var i = 0; i < count; i++) { - response.subscriptions + response.subscriptions! .add(pubsub.Subscription()..name = 'subscription-${first + i}'); } } @@ -544,7 +544,7 @@ void main() { // Mock that expect/generates [n] subscriptions in pages of page size // [pageSize]. void registerQueryMock(MockClient mock, int n, int pageSize, - {String topic, int totalCalls}) { + {String? topic, int? totalCalls}) { var totalPages = (n + pageSize - 1) ~/ pageSize; // No items still generate one request. if (totalPages == 0) totalPages = 1; @@ -577,13 +577,14 @@ void main() { } group('list', () { - Future q(String topic, int count) { + Future q(String? topic, int count) { var mock = mockClient(); registerQueryMock(mock, count, 50, topic: topic); var api = PubSub(mock, PROJECT); - return api - .listSubscriptions(topic) + return (topic == null + ? api.listSubscriptions() + : api.listSubscriptions(topic)) .listen(expectAsync1((_) => null, count: count)) .asFuture(); } @@ -735,26 +736,28 @@ void main() { }); group('page', () { - Future emptyTest(String topic) { + Future emptyTest(String? topic) { var mock = mockClient(); registerQueryMock(mock, 0, 50, topic: topic); var api = PubSub(mock, PROJECT); - return api - .pageSubscriptions(topic: topic) + return (topic == null + ? api.pageSubscriptions() + : api.pageSubscriptions(topic: topic)) .then(expectAsync1((page) { expect(page.items.length, 0); expect(page.isLast, isTrue); - expect(page.next(), completion(isNull)); + expect(() => page.next(), throwsStateError); mock.clear(); registerQueryMock(mock, 0, 20, topic: topic); - return api - .pageSubscriptions(topic: topic, pageSize: 20) + return (topic == null + ? api.pageSubscriptions(pageSize: 20) + : api.pageSubscriptions(topic: topic, pageSize: 20)) .then(expectAsync1((page) { expect(page.items.length, 0); expect(page.isLast, isTrue); - expect(page.next(), completion(isNull)); + expect(() => page.next(), throwsStateError); })); })); } @@ -764,26 +767,28 @@ void main() { emptyTest('topic'); }); - Future singleTest(String topic) { + Future singleTest(String? topic) { var mock = mockClient(); registerQueryMock(mock, 10, 50, topic: topic); var api = PubSub(mock, PROJECT); - return api - .pageSubscriptions(topic: topic) + return (topic == null + ? api.pageSubscriptions() + : api.pageSubscriptions(topic: topic)) .then(expectAsync1((page) { expect(page.items.length, 10); expect(page.isLast, isTrue); - expect(page.next(), completion(isNull)); + expect(() => page.next(), throwsStateError); mock.clear(); registerQueryMock(mock, 20, 20, topic: topic); - return api - .pageSubscriptions(topic: topic, pageSize: 20) + return (topic == null + ? api.pageSubscriptions(pageSize: 20) + : api.pageSubscriptions(topic: topic, pageSize: 20)) .then(expectAsync1((page) { expect(page.items.length, 20); expect(page.isLast, isTrue); - expect(page.next(), completion(isNull)); + expect(() => page.next(), throwsStateError); })); })); } @@ -793,7 +798,7 @@ void main() { singleTest('topic'); }); - Future multipleTest(int n, int pageSize, String topic) { + Future multipleTest(int n, int pageSize, String? topic) { var totalPages = (n + pageSize - 1) ~/ pageSize; var pageCount = 0; @@ -806,19 +811,21 @@ void main() { expect(page.isLast, pageCount == totalPages); expect(page.items.length, page.isLast ? n - (totalPages - 1) * pageSize : pageSize); - page.next().then((page) { - if (page != null) { + if (!page.isLast) { + page.next().then((page) { handlingPage(page); - } else { - expect(pageCount, totalPages); - completer.complete(); - } - }); + }); + } else { + expect(() => page.next(), throwsStateError); + expect(pageCount, totalPages); + completer.complete(); + } } var api = PubSub(mock, PROJECT); - api - .pageSubscriptions(topic: topic, pageSize: pageSize) + (topic == null + ? api.pageSubscriptions(pageSize: pageSize) + : api.pageSubscriptions(topic: topic, pageSize: pageSize)) .then(handlingPage); return completer.future; @@ -889,9 +896,9 @@ void main() { return api.lookupTopic(name).then(expectAsync1((topic) { mock.clear(); registerPublish(mock, 4, ((request) { - expect(request.messages.length, 1); - expect(request.messages[0].data, messageBase64); - expect(request.messages[0].attributes, isNull); + expect(request.messages!.length, 1); + expect(request.messages![0].data, messageBase64); + expect(request.messages![0].attributes, isNull); return mock.respond(pubsub.PublishResponse()..messageIds = ['0']); })); @@ -922,11 +929,11 @@ void main() { return api.lookupTopic(name).then(expectAsync1((topic) { mock.clear(); registerPublish(mock, 4, ((request) { - expect(request.messages.length, 1); - expect(request.messages[0].data, messageBase64); - expect(request.messages[0].attributes, isNotNull); - expect(request.messages[0].attributes.length, attributes.length); - expect(request.messages[0].attributes, attributes); + expect(request.messages!.length, 1); + expect(request.messages![0].data, messageBase64); + expect(request.messages![0].attributes, isNotNull); + expect(request.messages![0].attributes!.length, attributes.length); + expect(request.messages![0].attributes, attributes); return mock.respond(pubsub.PublishResponse()..messageIds = ['0']); })); From 6ff639dd049407bc01b66fa20feba939e8cc8987 Mon Sep 17 00:00:00 2001 From: Jonas Finnemann Jensen Date: Sat, 17 Jul 2021 13:08:55 +0200 Subject: [PATCH 178/239] Added a constructor to Datastore --- pkgs/gcloud/lib/datastore.dart | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/pkgs/gcloud/lib/datastore.dart b/pkgs/gcloud/lib/datastore.dart index b8aba812..c26f155d 100644 --- a/pkgs/gcloud/lib/datastore.dart +++ b/pkgs/gcloud/lib/datastore.dart @@ -11,8 +11,10 @@ library gcloud.datastore; import 'dart:async'; +import 'package:http/http.dart' as http; import 'common.dart' show Page; import 'service_scope.dart' as ss; +import 'src/datastore_impl.dart' show DatastoreImpl; const Symbol _datastoreKey = #gcloud.datastore; @@ -362,6 +364,23 @@ abstract class Transaction {} /// It can be used to insert/update/delete [Entity]s, lookup/query [Entity]s /// and allocate IDs from the auto ID allocation policy. abstract class Datastore { + /// List of required OAuth2 scopes for Datastore operation. + static const Scopes = DatastoreImpl.SCOPES; + + /// Access Datastore using an authenticated client. + /// + /// The [client] is an authenticated HTTP client. This client must + /// provide access to at least the scopes in `Datastore.Scopes`. + /// + /// The [project] is the name of the Google Cloud project. + /// + /// Returs an object providing access to Datastore. The passed-in [client] + /// will not be closed automatically. The caller is responsible for closing + /// it. + factory Datastore(http.Client client, String project) { + return DatastoreImpl(client, project); + } + /// Allocate integer IDs for the partially populated [keys] given as argument. /// /// The returned [Key]s will be fully populated with the allocated IDs. From cd30a3909d7cc6ba0b1c1733731853ee1ca1e3d4 Mon Sep 17 00:00:00 2001 From: Jonas Finnemann Jensen Date: Sat, 17 Jul 2021 13:09:02 +0200 Subject: [PATCH 179/239] Migrated remaining tests --- .../datastore/e2e/datastore_test_impl.dart | 123 ++++++----- pkgs/gcloud/test/datastore/e2e/utils.dart | 10 +- .../gcloud/test/datastore/error_matchers.dart | 2 +- pkgs/gcloud/test/db/db_test.dart | 25 ++- pkgs/gcloud/test/db/e2e/db_test_impl.dart | 63 +++--- .../test/db/e2e/metamodel_test_impl.dart | 26 +-- pkgs/gcloud/test/db/model_db_test.dart | 2 +- .../db/model_dbs/duplicate_fieldname.dart | 6 +- .../test/db/model_dbs/duplicate_kind.dart | 2 +- .../test/db/model_dbs/duplicate_property.dart | 6 +- .../db/model_dbs/multiple_annotations.dart | 2 +- .../db/model_dbs/no_default_constructor.dart | 2 +- pkgs/gcloud/test/db/properties_test.dart | 208 +++++++++--------- 13 files changed, 252 insertions(+), 225 deletions(-) diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index 99ca833b..b92ce5a1 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -1,7 +1,6 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 library datastore_test; @@ -48,7 +47,7 @@ Future> consumePages(FirstPageProvider provider) { return StreamFromPages(provider).stream.toList(); } -void runTests(Datastore datastore, String namespace) { +void runTests(Datastore datastore, String? namespace) { final partition = Partition(namespace); Future withTransaction(FutureOr Function(Transaction t) f, @@ -97,7 +96,7 @@ void runTests(Datastore datastore, String namespace) { } } - Future> lookup(List keys, {bool transactional = true}) { + Future> lookup(List keys, {bool transactional = true}) { if (transactional) { return withTransaction((Transaction transaction) { return datastore.lookup(keys, transaction: transaction); @@ -168,9 +167,11 @@ void runTests(Datastore datastore, String namespace) { group('insert', () { Future> testInsert(List entities, {bool transactional = false, bool xg = false, bool unnamed = true}) { - Future> test(Transaction transaction) { - return datastore - .commit(autoIdInserts: entities, transaction: transaction) + Future> test(Transaction? transaction) { + return (transaction == null + ? datastore.commit(autoIdInserts: entities) + : datastore.commit( + autoIdInserts: entities, transaction: transaction)) .then((CommitResult result) { expect(result.autoIdInsertKeys.length, equals(entities.length)); @@ -196,10 +197,12 @@ void runTests(Datastore datastore, String namespace) { FutureOr testInsertNegative(List entities, {bool transactional = false, bool xg = false}) { - void test(Transaction transaction) { + void test(Transaction? transaction) { expect( - datastore.commit( - autoIdInserts: entities, transaction: transaction), + transaction == null + ? datastore.commit(autoIdInserts: entities) + : datastore.commit( + autoIdInserts: entities, transaction: transaction), throwsA(isApplicationError)); } @@ -218,8 +221,8 @@ void runTests(Datastore datastore, String namespace) { test('insert', () { return testInsert(unnamedEntities5, transactional: false).then((keys) { return delete(keys).then((_) { - return lookup(keys).then((List entities) { - entities.forEach((Entity e) => expect(e, isNull)); + return lookup(keys).then((List entities) { + entities.forEach((Entity? e) => expect(e, isNull)); }); }); }); @@ -228,8 +231,8 @@ void runTests(Datastore datastore, String namespace) { test('insert_transactional', () { return testInsert(unnamedEntities1, transactional: true).then((keys) { return delete(keys).then((_) { - return lookup(keys).then((List entities) { - entities.forEach((Entity e) => expect(e, isNull)); + return lookup(keys).then((List entities) { + entities.forEach((Entity? e) => expect(e, isNull)); }); }); }); @@ -239,8 +242,8 @@ void runTests(Datastore datastore, String namespace) { return testInsert(unnamedEntities5, transactional: true, xg: true) .then((keys) { return delete(keys).then((_) { - return lookup(keys).then((List entities) { - entities.forEach((Entity e) => expect(e, isNull)); + return lookup(keys).then((List entities) { + entities.forEach((Entity? e) => expect(e, isNull)); }); }); }); @@ -315,8 +318,8 @@ void runTests(Datastore datastore, String namespace) { isTrue); } - Future test(Transaction transaction) { - return datastore.lookup(keysToLookup).then((List entities) { + Future test(Transaction? transaction) { + return datastore.lookup(keysToLookup).then((List entities) { expect(entities.length, equals(keysToLookup.length)); if (negative) { for (var i = 0; i < entities.length; i++) { @@ -324,9 +327,9 @@ void runTests(Datastore datastore, String namespace) { } } else { for (var i = 0; i < entities.length; i++) { - expect(compareKey(entities[i].key, keysToLookup[i]), isTrue); + expect(compareKey(entities[i]!.key, keysToLookup[i]), isTrue); expect( - compareEntity(entities[i], entitiesToLookup[i], + compareEntity(entities[i]!, entitiesToLookup[i], ignoreIds: !named), isTrue); } @@ -396,7 +399,7 @@ void runTests(Datastore datastore, String namespace) { group('delete', () { Future testDelete(List keys, {bool transactional = false, bool xg = false}) { - Future test(Transaction transaction) { + Future test(Transaction? transaction) { return datastore.commit(deletes: keys).then((_) { if (transaction != null) { return datastore.commit(transaction: transaction); @@ -467,7 +470,7 @@ void runTests(Datastore datastore, String namespace) { return withTransaction((Transaction transaction) { return datastore .lookup(keys, transaction: transaction) - .then((List entities) { + .then((List entities) { return datastore.rollback(transaction); }); }, xg: xg); @@ -493,10 +496,12 @@ void runTests(Datastore datastore, String namespace) { group('empty_commit', () { Future testEmptyCommit(List keys, {bool transactional = false, bool xg = false}) { - Future test(Transaction transaction) { - return datastore - .lookup(keys, transaction: transaction) - .then((List entities) { + Future test(Transaction? transaction) { + return (transaction == null + ? datastore.lookup(keys) + : datastore.lookup(keys, transaction: transaction)) + .then((List entities) { + if (transaction == null) return datastore.commit(); return datastore.commit(transaction: transaction); }); } @@ -544,11 +549,11 @@ void runTests(Datastore datastore, String namespace) { group('conflicting_transaction', () { Future testConflictingTransaction(List entities, {bool xg = false}) { - Future test(List entities, Transaction transaction, value) { + Future test(List entities, Transaction transaction, value) { // Change entities: - var changedEntities = List.filled(entities.length, null); + var changedEntities = List.filled(entities.length, null); for (var i = 0; i < entities.length; i++) { - var entity = entities[i]; + var entity = entities[i]!; var newProperties = Map.from(entity.properties); for (var prop in newProperties.keys) { newProperties[prop] = '${newProperties[prop]}conflict$value'; @@ -556,7 +561,8 @@ void runTests(Datastore datastore, String namespace) { changedEntities[i] = Entity(entity.key, newProperties); } return datastore.commit( - inserts: changedEntities, transaction: transaction); + inserts: changedEntities as List, + transaction: transaction); } // Insert first @@ -573,11 +579,11 @@ void runTests(Datastore datastore, String namespace) { return Future.wait(transactions) .then((List transactions) { // Do a lookup for the entities in every transaction - var lookups = >>[]; + List>> lookups = >>[]; for (var transaction in transactions) { lookups.add(datastore.lookup(keys, transaction: transaction)); } - return Future.wait(lookups).then((List> results) { + return Future.wait(lookups).then((List> results) { // Do a conflicting commit in every transaction. var commits = []; for (var i = 0; i < transactions.length; i++) { @@ -608,13 +614,13 @@ void runTests(Datastore datastore, String namespace) { group('query', () { Future> testQuery(String kind, - {List filters, - List orders, + {List? filters, + List? orders, bool transactional = false, bool xg = false, - int offset, - int limit}) { - Future> test(Transaction transaction) { + int? offset, + int? limit}) { + Future> test(Transaction? transaction) { var query = Query( kind: kind, filters: filters, @@ -640,13 +646,13 @@ void runTests(Datastore datastore, String namespace) { } Future testQueryAndCompare(String kind, List expectedEntities, - {List filters, - List orders, + {List? filters, + List? orders, bool transactional = false, bool xg = false, bool correctOrder = true, - int offset, - int limit}) { + int? offset, + int? limit}) { return testQuery(kind, filters: filters, orders: orders, @@ -676,7 +682,7 @@ void runTests(Datastore datastore, String namespace) { } Future testOffsetLimitQuery(String kind, List expectedEntities, - {List orders, bool transactional = false, bool xg = false}) { + {List? orders, bool transactional = false, bool xg = false}) { // We query for all subsets of expectedEntities // NOTE: This is O(0.5 * n^2) queries, but n is currently only 6. var queryTests = []; @@ -705,7 +711,7 @@ void runTests(Datastore datastore, String namespace) { limit: expectedEntities.length * 10); }); - return Future.forEach(queryTests, (f) => f()); + return Future.forEach(queryTests, (dynamic f) => f()); } const TEST_QUERY_KIND = 'TestQueryKind'; @@ -862,7 +868,7 @@ void runTests(Datastore datastore, String namespace) { () => testQueryAndCompare(TEST_QUERY_KIND, [], transactional: false, filters: filters, orders: orders), ]; - return Future.forEach(tests, (f) => f()); + return Future.forEach(tests, (dynamic f) => f()); }); }); @@ -900,14 +906,14 @@ void runTests(Datastore datastore, String namespace) { () { return datastore .lookup([rootKey, subKey, subSubKey, subSubKey2]).then( - (List entities) { + (List entities) { expect(entities.length, 4); expect(entities[0], isNull); expect(entities[1], isNull); expect(entities[2], isNotNull); expect(entities[3], isNotNull); - expect(compareEntity(entity, entities[2]), isTrue); - expect(compareEntity(entity2, entities[3]), isTrue); + expect(compareEntity(entity, entities[2]!), isTrue); + expect(compareEntity(entity2, entities[3]!), isTrue); }); }, @@ -1020,28 +1026,29 @@ void runTests(Datastore datastore, String namespace) { return datastore.commit(deletes: [subSubKey, subSubKey2]); } ]; - return Future.forEach(futures, (f) => f()).then(expectAsync1((_) {})); + return Future.forEach(futures, (dynamic f) => f()) + .then(expectAsync1((_) {})); }); }); }); }); } -Future cleanupDB(Datastore db, String namespace) { - Future> getKinds(String namespace) { +Future cleanupDB(Datastore db, String? namespace) { + Future> getKinds(String? namespace) { var partition = Partition(namespace); var q = Query(kind: '__kind__'); return consumePages((_) => db.query(q, partition: partition)) .then((List entities) { return entities - .map((Entity e) => e.key.elements.last.id as String) - .where((String kind) => !kind.contains('__')) + .map((Entity e) => e.key.elements.last.id as String?) + .where((String? kind) => !kind!.contains('__')) .toList(); }); } // cleanup() will call itself again as long as the DB is not clean. - Future cleanup(String namespace, String kind) { + Future cleanup(String? namespace, String? kind) { var partition = Partition(namespace); var q = Query(kind: kind, limit: 500); return consumePages((_) => db.query(q, partition: partition)) @@ -1054,8 +1061,8 @@ Future cleanupDB(Datastore db, String namespace) { }); } - return getKinds(namespace).then((List kinds) { - return Future.forEach(kinds, (String kind) { + return getKinds(namespace).then((List kinds) { + return Future.forEach(kinds, (String? kind) { return cleanup(namespace, kind); }); }); @@ -1076,10 +1083,10 @@ Future waitUntilEntitiesHelper( keysByKind.putIfAbsent(key.elements.last.kind, () => []).add(key); } - Future waitForKeys(String kind, List keys) { + Future waitForKeys(String kind, List? keys) { var q = Query(kind: kind); return consumePages((_) => db.query(q, partition: p)).then((entities) { - for (var key in keys) { + for (var key in keys!) { var found = false; for (var entity in entities) { if (key == entity.key) found = true; @@ -1100,8 +1107,8 @@ Future waitUntilEntitiesHelper( } Future main() async { - Datastore datastore; - Client client; + late Datastore datastore; + late Client client; var scopes = datastore_impl.DatastoreImpl.SCOPES; await withAuthClient(scopes, (String project, Client httpClient) { diff --git a/pkgs/gcloud/test/datastore/e2e/utils.dart b/pkgs/gcloud/test/datastore/e2e/utils.dart index c26650b9..7f508d92 100644 --- a/pkgs/gcloud/test/datastore/e2e/utils.dart +++ b/pkgs/gcloud/test/datastore/e2e/utils.dart @@ -1,7 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 + library raw_datastore_test_utils; @@ -20,7 +20,7 @@ const TEST_BLOB_INDEXED_PROPERTY = 'blobPropertyIndexed'; final TEST_BLOB_INDEXED_VALUE = BlobValue([0xaa, 0xaa, 0xff, 0xff]); Key buildKey(int i, - {Function idFunction, String kind = TEST_KIND, Partition p}) { + {Function? idFunction, String kind = TEST_KIND, Partition? p}) { var path = [KeyElement(kind, idFunction == null ? null : idFunction(i))]; return Key(path, partition: p ?? Partition.DEFAULT); } @@ -41,7 +41,7 @@ Map buildProperties(int i) { } List buildKeys(int from, int to, - {Function idFunction, String kind = TEST_KIND, Partition partition}) { + {Function? idFunction, String kind = TEST_KIND, Partition? partition}) { var keys = []; for (var i = from; i < to; i++) { keys.add(buildKey(i, idFunction: idFunction, kind: kind, p: partition)); @@ -50,7 +50,7 @@ List buildKeys(int from, int to, } List buildEntities(int from, int to, - {Function idFunction, String kind = TEST_KIND, Partition partition}) { + {Function? idFunction, String kind = TEST_KIND, Partition? partition}) { var entities = []; var unIndexedProperties = {}; for (var i = from; i < to; i++) { @@ -64,7 +64,7 @@ List buildEntities(int from, int to, } List buildEntityWithAllProperties(int from, int to, - {String kind = TEST_KIND, Partition partition}) { + {String kind = TEST_KIND, Partition? partition}) { var us42 = const Duration(microseconds: 42); var unIndexed = {'blobProperty'}; diff --git a/pkgs/gcloud/test/datastore/error_matchers.dart b/pkgs/gcloud/test/datastore/error_matchers.dart index 9ee1c4e1..65ebce5e 100644 --- a/pkgs/gcloud/test/datastore/error_matchers.dart +++ b/pkgs/gcloud/test/datastore/error_matchers.dart @@ -1,7 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 + library error_matchers; diff --git a/pkgs/gcloud/test/db/db_test.dart b/pkgs/gcloud/test/db/db_test.dart index 464ee352..6240eb57 100644 --- a/pkgs/gcloud/test/db/db_test.dart +++ b/pkgs/gcloud/test/db/db_test.dart @@ -1,23 +1,31 @@ // Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 library gcloud.db_test; import 'dart:mirrors' show reflectClass; +import 'package:gcloud/datastore.dart' as datastore; import 'package:gcloud/db.dart'; -import 'package:meta/meta.dart'; +import 'package:http/http.dart' as http; import 'package:test/test.dart'; @Kind() class Foobar extends Model {} +class _FakeHttpClient extends http.BaseClient { + @override + Future send(http.BaseRequest request) { + throw UnimplementedError('FakeHttpClient cannot make requests'); + } +} + void main() { group('db', () { + final ds = datastore.Datastore(_FakeHttpClient(), ''); test('default-partition', () { - var db = DatastoreDB(null); + var db = DatastoreDB(ds); // Test defaultPartition expect(db.defaultPartition.namespace, isNull); @@ -35,7 +43,7 @@ void main() { test('non-default-partition', () { var nsDb = - DatastoreDB(null, defaultPartition: Partition('foobar-namespace')); + DatastoreDB(ds, defaultPartition: Partition('foobar-namespace')); // Test defaultPartition expect(nsDb.defaultPartition.namespace, 'foobar-namespace'); @@ -58,7 +66,8 @@ void main() { expect(hasDefaultConstructor(RequiredArguments), isFalse); expect(hasDefaultConstructor(OnlyPositionalArguments), isTrue); expect(hasDefaultConstructor(OnlyNamedArguments), isTrue); - expect(hasDefaultConstructor(RequiredNamedArguments), isFalse); + // TODO: Figure out how mirrors can detect 'required' named parameters. + // expect(hasDefaultConstructor(RequiredNamedArguments), isFalse); expect(hasDefaultConstructor(DefaultArgumentValues), isTrue); }); }); @@ -85,15 +94,15 @@ class RequiredArguments { } class OnlyPositionalArguments { - const OnlyPositionalArguments([int arg, int arg2]); + const OnlyPositionalArguments([int? arg, int? arg2]); } class OnlyNamedArguments { - const OnlyNamedArguments({int arg, int arg2}); + const OnlyNamedArguments({int? arg, int? arg2}); } class RequiredNamedArguments { - const RequiredNamedArguments({int arg1, @required int arg2}); + const RequiredNamedArguments({int? arg1, required int arg2}); } class DefaultArgumentValues { diff --git a/pkgs/gcloud/test/db/e2e/db_test_impl.dart b/pkgs/gcloud/test/db/e2e/db_test_impl.dart index 73edec97..2a48ec02 100644 --- a/pkgs/gcloud/test/db/e2e/db_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/db_test_impl.dart @@ -1,7 +1,6 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 library db_test; @@ -58,13 +57,13 @@ import '../../datastore/e2e/datastore_test_impl.dart' as datastore_test; @db.Kind() class Person extends db.Model { @db.StringProperty() - String name; + String? name; @db.IntProperty() - int age; + int? age; @db.ModelKeyProperty(propertyName: 'mangledWife') - db.Key wife; + db.Key? wife; @override bool operator ==(Object other) => sameAs(other); @@ -84,13 +83,13 @@ class Person extends db.Model { @db.Kind(idType: db.IdType.String) class PersonStringId extends db.Model { - String get name => id; + String? get name => id; @db.IntProperty() - int age; + int? age; @db.ModelKeyProperty(propertyName: 'mangledWife') - db.Key wife; + db.Key? wife; @override bool operator ==(Object other) => sameAs(other); @@ -110,7 +109,7 @@ class PersonStringId extends db.Model { @db.Kind() class User extends Person { @db.StringProperty() - String nickname; + String? nickname; @db.StringListProperty(propertyName: 'language') List languages = const []; @@ -121,7 +120,7 @@ class User extends Person { return false; } - var user = other as User; + var user = other; if (languages == null) { if (user.languages == null) return true; return false; @@ -146,10 +145,10 @@ class User extends Person { @db.Kind() class ExpandoPerson extends db.ExpandoModel { @db.StringProperty() - String name; + String? name; @db.StringProperty(propertyName: 'NN') - String nickname; + String? nickname; @override bool operator ==(Object other) { @@ -170,10 +169,12 @@ class ExpandoPerson extends db.ExpandoModel { Future sleep(Duration duration) => Future.delayed(duration); -void runTests(db.DatastoreDB store, String namespace) { - var partition = store.newPartition(namespace); +void runTests(db.DatastoreDB store, String? namespace) { + var partition = namespace != null + ? store.newPartition(namespace) + : store.defaultPartition; - void compareModels(List expectedModels, List models, + void compareModels(List expectedModels, List models, {bool anyOrder = false}) { expect(models.length, equals(expectedModels.length)); if (anyOrder) { @@ -205,7 +206,7 @@ void runTests(db.DatastoreDB store, String namespace) { return commitTransaction.commit(); }).then((_) { return store.withTransaction((db.Transaction deleteTransaction) { - return deleteTransaction.lookup(keys).then((List models) { + return deleteTransaction.lookup(keys).then((List models) { compareModels(objects, models); deleteTransaction.queueMutations(deletes: keys); return deleteTransaction.commit(); @@ -214,12 +215,12 @@ void runTests(db.DatastoreDB store, String namespace) { }); } else { return store.commit(inserts: objects).then(expectAsync1((_) { - return store.lookup(keys).then(expectAsync1((List models) { + return store.lookup(keys).then(expectAsync1((List models) { compareModels(objects, models); return store.commit(deletes: keys).then(expectAsync1((_) { return store .lookup(keys) - .then(expectAsync1((List models) { + .then(expectAsync1((List models) { for (var i = 0; i < models.length; i++) { expect(models[i], isNull); } @@ -233,9 +234,7 @@ void runTests(db.DatastoreDB store, String namespace) { group('key', () { test('equal_and_hashcode', () { var k1 = store.emptyKey.append(User, id: 10).append(Person, id: 12); - var k2 = store - .newPartition(null) - .emptyKey + var k2 = store.defaultPartition.emptyKey .append(User, id: 10) .append(Person, id: 12); expect(k1, equals(k2)); @@ -407,7 +406,7 @@ void runTests(db.DatastoreDB store, String namespace) { // because an id doesn't need to be globally unique, only under // entities with the same parent. - return store.lookup(keys).then(expectAsync1((List models) { + return store.lookup(keys).then(expectAsync1((List models) { // Since the id/parentKey fields are set after commit and a lookup // returns new model instances, we can do full model comparison // here. @@ -462,26 +461,26 @@ void runTests(db.DatastoreDB store, String namespace) { var usersSortedNameDescNicknameAsc = List.from(users); usersSortedNameDescNicknameAsc.sort((User a, User b) { - var result = b.name.compareTo(a.name); - if (result == 0) return a.nickname.compareTo(b.nickname); + var result = b.name!.compareTo(a.name!); + if (result == 0) return a.nickname!.compareTo(b.nickname!); return result; }); var usersSortedNameDescNicknameDesc = List.from(users); usersSortedNameDescNicknameDesc.sort((User a, User b) { - var result = b.name.compareTo(a.name); - if (result == 0) return b.nickname.compareTo(a.nickname); + var result = b.name!.compareTo(a.name!); + if (result == 0) return b.nickname!.compareTo(a.nickname!); return result; }); var usersSortedAndFilteredNameDescNicknameAsc = usersSortedNameDescNicknameAsc.where((User u) { - return LOWER_BOUND.compareTo(u.name) <= 0; + return LOWER_BOUND.compareTo(u.name!) <= 0; }).toList(); var usersSortedAndFilteredNameDescNicknameDesc = usersSortedNameDescNicknameDesc.where((User u) { - return LOWER_BOUND.compareTo(u.name) <= 0; + return LOWER_BOUND.compareTo(u.name!) <= 0; }).toList(); var fooUsers = @@ -587,7 +586,7 @@ void runTests(db.DatastoreDB store, String namespace) { // Filter equals () async { - var wifeKey = root.append(User, id: usersWithWife.first.wife.id); + var wifeKey = root.append(User, id: usersWithWife.first.wife!.id); var query = store.query(partition: partition) ..filter('wife =', wifeKey) ..run(); @@ -656,14 +655,14 @@ void runTests(db.DatastoreDB store, String namespace) { ]), // Make sure queries don't return results - () => store.lookup(allKeys).then((List models) { + () => store.lookup(allKeys).then((List models) { expect(models.length, equals(allKeys.length)); for (var model in models) { expect(model, isNull); } }), ]; - return Future.forEach(tests, (f) => f()); + return Future.forEach(tests, (dynamic f) => f()); }); }); }); @@ -732,8 +731,8 @@ Future waitUntilEntitiesHelper( } Future main() async { - db.DatastoreDB store; - BaseClient client; + late db.DatastoreDB store; + BaseClient? client; var scopes = datastore_impl.DatastoreImpl.SCOPES; await withAuthClient(scopes, (String project, httpClient) { diff --git a/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart index 4368f3e0..2a34d991 100644 --- a/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart @@ -1,7 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 + library metamodel_test; @@ -14,12 +14,12 @@ import 'package:gcloud/db/metamodel.dart'; import 'package:test/test.dart'; List buildEntitiesWithDifferentNamespaces() { - Key newKey(String namespace, String kind, int id) { + Key newKey(String? namespace, String kind, int id) { var partition = Partition(namespace); return Key([KeyElement(kind, id)], partition: partition); } - Entity newEntity(String namespace, String kind, {int id = 1}) { + Entity newEntity(String? namespace, String kind, {int id = 1}) { return Entity(newKey(namespace, kind, id), {'ping': 'pong'}); } @@ -61,11 +61,11 @@ void runTests(datastore, db.DatastoreDB store) { var namespaceQuery = store.query(); return namespaceQuery.run().toList().then((namespaces) { expect(namespaces.length, greaterThanOrEqualTo(3)); - expect(namespaces, contains(cond((ns) => ns.name == null))); + expect(namespaces, contains(cond((dynamic ns) => ns.name == null))); expect( - namespaces, contains(cond((ns) => ns.name == 'FooNamespace'))); + namespaces, contains(cond((dynamic ns) => ns.name == 'FooNamespace'))); expect( - namespaces, contains(cond((ns) => ns.name == 'BarNamespace'))); + namespaces, contains(cond((dynamic ns) => ns.name == 'BarNamespace'))); var futures = []; for (var namespace in namespaces) { @@ -74,19 +74,19 @@ void runTests(datastore, db.DatastoreDB store) { namespace.name == 'BarNamespace')) { continue; } - var partition = store.newPartition(namespace.name); + var partition = store.newPartition(namespace.name!); var kindQuery = store.query(partition: partition); futures.add(kindQuery.run().toList().then((List kinds) { expect(kinds.length, greaterThanOrEqualTo(2)); if (namespace.name == null) { - expect(kinds, contains(cond((k) => k.name == 'NullKind'))); - expect(kinds, contains(cond((k) => k.name == 'NullKind2'))); + expect(kinds, contains(cond((dynamic k) => k.name == 'NullKind'))); + expect(kinds, contains(cond((dynamic k) => k.name == 'NullKind2'))); } else if (namespace.name == 'FooNamespace') { - expect(kinds, contains(cond((k) => k.name == 'FooKind'))); - expect(kinds, contains(cond((k) => k.name == 'FooKind2'))); + expect(kinds, contains(cond((dynamic k) => k.name == 'FooKind'))); + expect(kinds, contains(cond((dynamic k) => k.name == 'FooKind2'))); } else if (namespace.name == 'BarNamespace') { - expect(kinds, contains(cond((k) => k.name == 'BarKind'))); - expect(kinds, contains(cond((k) => k.name == 'BarKind2'))); + expect(kinds, contains(cond((dynamic k) => k.name == 'BarKind'))); + expect(kinds, contains(cond((dynamic k) => k.name == 'BarKind2'))); } })); } diff --git a/pkgs/gcloud/test/db/model_db_test.dart b/pkgs/gcloud/test/db/model_db_test.dart index cd3941be..e749ce4f 100644 --- a/pkgs/gcloud/test/db/model_db_test.dart +++ b/pkgs/gcloud/test/db/model_db_test.dart @@ -1,7 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 + library gcloud.db_impl_test; diff --git a/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart b/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart index 3fc11b31..1f2db4ec 100644 --- a/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart +++ b/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart @@ -1,7 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 + library gcloud.db.model_test.duplicate_fieldname; @@ -10,12 +10,12 @@ import 'package:gcloud/db.dart' as db; @db.Kind() class A extends db.Model { @db.IntProperty() - int foo; + int? foo; } @db.Kind() class B extends A { @override @db.IntProperty(propertyName: 'bar') - int foo; + int? foo; } diff --git a/pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart b/pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart index dfd9cc68..d039b5a6 100644 --- a/pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart +++ b/pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart @@ -1,7 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 + library gcloud.db.model_test.duplicate_kind; diff --git a/pkgs/gcloud/test/db/model_dbs/duplicate_property.dart b/pkgs/gcloud/test/db/model_dbs/duplicate_property.dart index b1756cea..f05c7f55 100644 --- a/pkgs/gcloud/test/db/model_dbs/duplicate_property.dart +++ b/pkgs/gcloud/test/db/model_dbs/duplicate_property.dart @@ -1,7 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 + library gcloud.db.model_test.duplicate_property; @@ -10,8 +10,8 @@ import 'package:gcloud/db.dart' as db; @db.Kind() class A extends db.Model { @db.IntProperty() - int foo; + int? foo; @db.IntProperty(propertyName: 'foo') - int bar; + int? bar; } diff --git a/pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart b/pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart index 858ce1e1..59fdde11 100644 --- a/pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart +++ b/pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart @@ -1,7 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 + library gcloud.db.model_test.multiple_annotations; diff --git a/pkgs/gcloud/test/db/model_dbs/no_default_constructor.dart b/pkgs/gcloud/test/db/model_dbs/no_default_constructor.dart index a80c74d6..e8c29bb6 100644 --- a/pkgs/gcloud/test/db/model_dbs/no_default_constructor.dart +++ b/pkgs/gcloud/test/db/model_dbs/no_default_constructor.dart @@ -1,7 +1,7 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 + library gcloud.db.model_test.no_default_constructor; diff --git a/pkgs/gcloud/test/db/properties_test.dart b/pkgs/gcloud/test/db/properties_test.dart index 36fcf689..15be265b 100644 --- a/pkgs/gcloud/test/db/properties_test.dart +++ b/pkgs/gcloud/test/db/properties_test.dart @@ -1,7 +1,6 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 library gcloud.db.properties_test; @@ -13,86 +12,94 @@ import 'package:test/test.dart'; void main() { group('properties', () { + var datastoreKey = datastore.Key([datastore.KeyElement('MyKind', 42)], + partition: datastore.Partition('foonamespace')); + var dbKey = KeyMock(datastoreKey); + var modelDBMock = ModelDBMock(datastoreKey, dbKey); + test('bool_property', () { var prop = const BoolProperty(required: true); - expect(prop.validate(null, null), isFalse); + expect(prop.validate(modelDBMock, null), isFalse); prop = const BoolProperty(required: false); - expect(prop.validate(null, null), isTrue); - expect(prop.validate(null, true), isTrue); - expect(prop.validate(null, false), isTrue); - expect(prop.encodeValue(null, null), equals(null)); - expect(prop.encodeValue(null, true), equals(true)); - expect(prop.encodeValue(null, false), equals(false)); - expect(prop.decodePrimitiveValue(null, null), equals(null)); - expect(prop.decodePrimitiveValue(null, true), equals(true)); - expect(prop.decodePrimitiveValue(null, false), equals(false)); + expect(prop.validate(modelDBMock, null), isTrue); + expect(prop.validate(modelDBMock, true), isTrue); + expect(prop.validate(modelDBMock, false), isTrue); + expect(prop.encodeValue(modelDBMock, null), equals(null)); + expect(prop.encodeValue(modelDBMock, true), equals(true)); + expect(prop.encodeValue(modelDBMock, false), equals(false)); + expect(prop.decodePrimitiveValue(modelDBMock, null), equals(null)); + expect(prop.decodePrimitiveValue(modelDBMock, true), equals(true)); + expect(prop.decodePrimitiveValue(modelDBMock, false), equals(false)); }); test('int_property', () { var prop = const IntProperty(required: true); - expect(prop.validate(null, null), isFalse); + expect(prop.validate(modelDBMock, null), isFalse); prop = const IntProperty(required: false); - expect(prop.validate(null, null), isTrue); - expect(prop.validate(null, 33), isTrue); - expect(prop.encodeValue(null, null), equals(null)); - expect(prop.encodeValue(null, 42), equals(42)); - expect(prop.decodePrimitiveValue(null, null), equals(null)); - expect(prop.decodePrimitiveValue(null, 99), equals(99)); + expect(prop.validate(modelDBMock, null), isTrue); + expect(prop.validate(modelDBMock, 33), isTrue); + expect(prop.encodeValue(modelDBMock, null), equals(null)); + expect(prop.encodeValue(modelDBMock, 42), equals(42)); + expect(prop.decodePrimitiveValue(modelDBMock, null), equals(null)); + expect(prop.decodePrimitiveValue(modelDBMock, 99), equals(99)); }); test('double_property', () { var prop = const DoubleProperty(required: true); - expect(prop.validate(null, null), isFalse); + expect(prop.validate(modelDBMock, null), isFalse); prop = const DoubleProperty(required: false); - expect(prop.validate(null, null), isTrue); - expect(prop.validate(null, 33.0), isTrue); - expect(prop.encodeValue(null, null), equals(null)); - expect(prop.encodeValue(null, 42.3), equals(42.3)); - expect(prop.decodePrimitiveValue(null, null), equals(null)); - expect(prop.decodePrimitiveValue(null, 99.1), equals(99.1)); + expect(prop.validate(modelDBMock, null), isTrue); + expect(prop.validate(modelDBMock, 33.0), isTrue); + expect(prop.encodeValue(modelDBMock, null), equals(null)); + expect(prop.encodeValue(modelDBMock, 42.3), equals(42.3)); + expect(prop.decodePrimitiveValue(modelDBMock, null), equals(null)); + expect(prop.decodePrimitiveValue(modelDBMock, 99.1), equals(99.1)); }); test('string_property', () { var prop = const StringProperty(required: true); - expect(prop.validate(null, null), isFalse); + expect(prop.validate(modelDBMock, null), isFalse); prop = const StringProperty(required: false); - expect(prop.validate(null, null), isTrue); - expect(prop.validate(null, 'foobar'), isTrue); - expect(prop.encodeValue(null, null), equals(null)); - expect(prop.encodeValue(null, 'foo'), equals('foo')); - expect(prop.decodePrimitiveValue(null, null), equals(null)); - expect(prop.decodePrimitiveValue(null, 'bar'), equals('bar')); + expect(prop.validate(modelDBMock, null), isTrue); + expect(prop.validate(modelDBMock, 'foobar'), isTrue); + expect(prop.encodeValue(modelDBMock, null), equals(null)); + expect(prop.encodeValue(modelDBMock, 'foo'), equals('foo')); + expect(prop.decodePrimitiveValue(modelDBMock, null), equals(null)); + expect(prop.decodePrimitiveValue(modelDBMock, 'bar'), equals('bar')); }); test('blob_property', () { var prop = const BlobProperty(required: true); - expect(prop.validate(null, null), isFalse); + expect(prop.validate(modelDBMock, null), isFalse); prop = const BlobProperty(required: false); - expect(prop.validate(null, null), isTrue); - expect(prop.validate(null, [1, 2]), isTrue); - expect(prop.encodeValue(null, null), equals(null)); - expect((prop.encodeValue(null, []) as datastore.BlobValue).bytes, + expect(prop.validate(modelDBMock, null), isTrue); + expect(prop.validate(modelDBMock, [1, 2]), isTrue); + expect(prop.encodeValue(modelDBMock, null), equals(null)); + expect( + (prop.encodeValue(modelDBMock, []) as datastore.BlobValue).bytes, equals([])); - expect((prop.encodeValue(null, [1, 2]) as datastore.BlobValue).bytes, + expect( + (prop.encodeValue(modelDBMock, [1, 2]) as datastore.BlobValue).bytes, equals([1, 2])); expect( - (prop.encodeValue(null, Uint8List.fromList([1, 2])) + (prop.encodeValue(modelDBMock, Uint8List.fromList([1, 2])) as datastore.BlobValue) .bytes, equals([1, 2])); - expect(prop.decodePrimitiveValue(null, null), equals(null)); + expect(prop.decodePrimitiveValue(modelDBMock, null), equals(null)); + expect(prop.decodePrimitiveValue(modelDBMock, datastore.BlobValue([])), + equals([])); expect( - prop.decodePrimitiveValue(null, datastore.BlobValue([])), equals([])); - expect(prop.decodePrimitiveValue(null, datastore.BlobValue([5, 6])), + prop.decodePrimitiveValue(modelDBMock, datastore.BlobValue([5, 6])), equals([5, 6])); expect( prop.decodePrimitiveValue( - null, datastore.BlobValue(Uint8List.fromList([5, 6]))), + modelDBMock, datastore.BlobValue(Uint8List.fromList([5, 6]))), equals([5, 6])); }); @@ -100,37 +107,42 @@ void main() { var utc99 = DateTime.fromMillisecondsSinceEpoch(99, isUtc: true); var prop = const DateTimeProperty(required: true); - expect(prop.validate(null, null), isFalse); + expect(prop.validate(modelDBMock, null), isFalse); prop = const DateTimeProperty(required: false); - expect(prop.validate(null, null), isTrue); - expect(prop.validate(null, utc99), isTrue); - expect(prop.encodeValue(null, null), equals(null)); - expect(prop.encodeValue(null, utc99), equals(utc99)); - expect(prop.decodePrimitiveValue(null, null), equals(null)); - expect(prop.decodePrimitiveValue(null, 99 * 1000), equals(utc99)); - expect(prop.decodePrimitiveValue(null, 99 * 1000 + 1), equals(utc99)); - expect(prop.decodePrimitiveValue(null, utc99), equals(utc99)); + expect(prop.validate(modelDBMock, null), isTrue); + expect(prop.validate(modelDBMock, utc99), isTrue); + expect(prop.encodeValue(modelDBMock, null), equals(null)); + expect(prop.encodeValue(modelDBMock, utc99), equals(utc99)); + expect(prop.decodePrimitiveValue(modelDBMock, null), equals(null)); + expect(prop.decodePrimitiveValue(modelDBMock, 99 * 1000), equals(utc99)); + expect( + prop.decodePrimitiveValue(modelDBMock, 99 * 1000 + 1), equals(utc99)); + expect(prop.decodePrimitiveValue(modelDBMock, utc99), equals(utc99)); }); test('list_property', () { var prop = const ListProperty(BoolProperty()); - expect(prop.validate(null, null), isFalse); - expect(prop.validate(null, []), isTrue); - expect(prop.validate(null, [true]), isTrue); - expect(prop.validate(null, [true, false]), isTrue); - expect(prop.validate(null, [true, false, 1]), isFalse); - expect(prop.encodeValue(null, []), equals(null)); - expect(prop.encodeValue(null, [true]), equals(true)); - expect(prop.encodeValue(null, [true, false]), equals([true, false])); - expect(prop.encodeValue(null, true, forComparison: true), equals(true)); - expect(prop.encodeValue(null, false, forComparison: true), equals(false)); - expect(prop.encodeValue(null, null, forComparison: true), equals(null)); - expect(prop.decodePrimitiveValue(null, null), equals([])); - expect(prop.decodePrimitiveValue(null, []), equals([])); - expect(prop.decodePrimitiveValue(null, true), equals([true])); - expect(prop.decodePrimitiveValue(null, [true, false]), + expect(prop.validate(modelDBMock, null), isFalse); + expect(prop.validate(modelDBMock, []), isTrue); + expect(prop.validate(modelDBMock, [true]), isTrue); + expect(prop.validate(modelDBMock, [true, false]), isTrue); + expect(prop.validate(modelDBMock, [true, false, 1]), isFalse); + expect(prop.encodeValue(modelDBMock, []), equals(null)); + expect(prop.encodeValue(modelDBMock, [true]), equals(true)); + expect( + prop.encodeValue(modelDBMock, [true, false]), equals([true, false])); + expect(prop.encodeValue(modelDBMock, true, forComparison: true), + equals(true)); + expect(prop.encodeValue(modelDBMock, false, forComparison: true), + equals(false)); + expect(prop.encodeValue(modelDBMock, null, forComparison: true), + equals(null)); + expect(prop.decodePrimitiveValue(modelDBMock, null), equals([])); + expect(prop.decodePrimitiveValue(modelDBMock, []), equals([])); + expect(prop.decodePrimitiveValue(modelDBMock, true), equals([true])); + expect(prop.decodePrimitiveValue(modelDBMock, [true, false]), equals([true, false])); }); @@ -140,28 +152,26 @@ void main() { var c1 = Custom()..customValue = 'c1'; var c2 = Custom()..customValue = 'c2'; - expect(prop.validate(null, null), isFalse); - expect(prop.validate(null, []), isTrue); - expect(prop.validate(null, [c1]), isTrue); - expect(prop.validate(null, [c1, c2]), isTrue); - expect(prop.validate(null, [c1, c2, 1]), isFalse); - expect(prop.encodeValue(null, []), equals(null)); - expect(prop.encodeValue(null, [c1]), equals(c1.customValue)); - expect(prop.encodeValue(null, [c1, c2]), + expect(prop.validate(modelDBMock, null), isFalse); + expect(prop.validate(modelDBMock, []), isTrue); + expect(prop.validate(modelDBMock, [c1]), isTrue); + expect(prop.validate(modelDBMock, [c1, c2]), isTrue); + expect(prop.validate(modelDBMock, [c1, c2, 1]), isFalse); + expect(prop.encodeValue(modelDBMock, []), equals(null)); + expect(prop.encodeValue(modelDBMock, [c1]), equals(c1.customValue)); + expect(prop.encodeValue(modelDBMock, [c1, c2]), equals([c1.customValue, c2.customValue])); - expect(prop.decodePrimitiveValue(null, null), equals([])); - expect(prop.decodePrimitiveValue(null, []), equals([])); - expect(prop.decodePrimitiveValue(null, c1.customValue), equals([c1])); - expect(prop.decodePrimitiveValue(null, [c1.customValue, c2.customValue]), + expect(prop.decodePrimitiveValue(modelDBMock, null), equals([])); + expect(prop.decodePrimitiveValue(modelDBMock, []), equals([])); + expect( + prop.decodePrimitiveValue(modelDBMock, c1.customValue), equals([c1])); + expect( + prop.decodePrimitiveValue( + modelDBMock, [c1.customValue, c2.customValue]), equals([c1, c2])); }); test('modelkey_property', () { - var datastoreKey = datastore.Key([datastore.KeyElement('MyKind', 42)], - partition: datastore.Partition('foonamespace')); - var dbKey = KeyMock(datastoreKey); - var modelDBMock = ModelDBMock(datastoreKey, dbKey); - var prop = const ModelKeyProperty(required: true); expect(prop.validate(modelDBMock, null), isFalse); @@ -179,7 +189,7 @@ void main() { } class Custom { - String customValue; + String? customValue; @override int get hashCode => customValue.hashCode; @@ -192,22 +202,22 @@ class Custom { class CustomProperty extends StringProperty { const CustomProperty( - {String propertyName, bool required = false, bool indexed = true}); + {String? propertyName, bool required = false, bool indexed = true}); @override - bool validate(ModelDB db, Object value) { + bool validate(ModelDB db, Object? value) { if (required && value == null) return false; return value == null || value is Custom; } @override - Object decodePrimitiveValue(ModelDB db, Object value) { + Object? decodePrimitiveValue(ModelDB db, Object? value) { if (value == null) return null; return Custom()..customValue = value as String; } @override - Object encodeValue(ModelDB db, Object value, {bool forComparison = false}) { + Object? encodeValue(ModelDB db, Object? value, {bool forComparison = false}) { if (value == null) return null; return (value as Custom).customValue; } @@ -221,16 +231,17 @@ class KeyMock implements Key { @override Object id = 1; @override - Type type; + Type? type; @override Key get parent => this; @override bool get isEmpty => false; @override - Partition get partition => null; + Partition get partition => throw UnimplementedError('not mocked'); datastore.Key get datastoreKey => _datastoreKey; @override - Key append(Type modelType, {T id}) => null; + Key append(Type modelType, {T? id}) => + throw UnimplementedError('not mocked'); @override Key cast() => Key(parent, type, id as U); @override @@ -258,17 +269,18 @@ class ModelDBMock implements ModelDB { return _datastoreKey; } - Map propertiesForModel(modelDescription) => null; + Map? propertiesForModel(modelDescription) => null; @override - T fromDatastoreEntity(datastore.Entity entity) => null; + T? fromDatastoreEntity(datastore.Entity? entity) => null; @override - datastore.Entity toDatastoreEntity(Model model) => null; + datastore.Entity toDatastoreEntity(Model model) => + throw UnimplementedError('not mocked'); @override - String fieldNameToPropertyName(String kind, String fieldName) => null; + String? fieldNameToPropertyName(String kind, String fieldName) => null; @override - String kindName(Type type) => null; + String kindName(Type type) => throw UnimplementedError('not mocked'); @override - Object toDatastoreValue(String kind, String fieldName, Object value, + Object? toDatastoreValue(String kind, String fieldName, Object? value, {bool forComparison = false}) => null; } From d5ed5c60d9882aeeae933a930dfdcbac6c298f80 Mon Sep 17 00:00:00 2001 From: Jonas Finnemann Jensen Date: Sat, 17 Jul 2021 20:25:51 +0200 Subject: [PATCH 180/239] Fix review comments --- pkgs/gcloud/lib/pubsub.dart | 1 + pkgs/gcloud/lib/src/common_utils.dart | 13 +++++++++++++ pkgs/gcloud/lib/src/datastore_impl.dart | 11 +++-------- pkgs/gcloud/lib/src/pubsub_impl.dart | 8 ++------ 4 files changed, 19 insertions(+), 14 deletions(-) create mode 100644 pkgs/gcloud/lib/src/common_utils.dart diff --git a/pkgs/gcloud/lib/pubsub.dart b/pkgs/gcloud/lib/pubsub.dart index 468e89dc..a2416802 100644 --- a/pkgs/gcloud/lib/pubsub.dart +++ b/pkgs/gcloud/lib/pubsub.dart @@ -9,6 +9,7 @@ import 'dart:collection'; import 'dart:convert'; import 'dart:io'; +import 'package:gcloud/src/common_utils.dart'; import 'package:googleapis/pubsub/v1.dart' as pubsub; import 'package:http/http.dart' as http; diff --git a/pkgs/gcloud/lib/src/common_utils.dart b/pkgs/gcloud/lib/src/common_utils.dart new file mode 100644 index 00000000..d8380109 --- /dev/null +++ b/pkgs/gcloud/lib/src/common_utils.dart @@ -0,0 +1,13 @@ +// Copyright (c) 2021, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import '../common.dart'; + +extension PageUtils on Page { + void throwIfIsLast() { + if (isLast) { + throw StateError('Page.next() cannot be called when Page.isLast == true'); + } + } +} diff --git a/pkgs/gcloud/lib/src/datastore_impl.dart b/pkgs/gcloud/lib/src/datastore_impl.dart index 5d86fc8e..b76aef8b 100644 --- a/pkgs/gcloud/lib/src/datastore_impl.dart +++ b/pkgs/gcloud/lib/src/datastore_impl.dart @@ -6,6 +6,7 @@ library gcloud.datastore_impl; import 'dart:async'; +import 'package:gcloud/src/common_utils.dart'; import 'package:googleapis/datastore/v1.dart' as api; import 'package:http/http.dart' as http; @@ -613,17 +614,11 @@ class QueryPageImpl implements Page { List get items => _entities; @override - Future> next({int? pageSize}) { + Future> next({int? pageSize}) async { // NOTE: We do not respect [pageSize] here, the only mechanism we can // really use is `query.limit`, but this is user-specified when making // the query. - if (isLast) { - return Future.sync(() { - throw StateError( - 'Page.next() cannot be called when Page.isLast == true', - ); - }); - } + throwIfIsLast(); return QueryPageImpl.runQuery( _api, _project, _nextRequest, _remainingNumberOfEntities) diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart index 3ba28f67..ea202a5a 100644 --- a/pkgs/gcloud/lib/src/pubsub_impl.dart +++ b/pkgs/gcloud/lib/src/pubsub_impl.dart @@ -478,9 +478,7 @@ class _TopicPageImpl implements Page { @override Future> next({int? pageSize}) async { - if (isLast) { - throw StateError('Page.next() cannot be called when Page.isLast == true'); - } + throwIfIsLast(); final pageSize_ = pageSize ?? _pageSize; return _api._listTopics(pageSize_, _nextPageToken).then((response) { @@ -511,9 +509,7 @@ class _SubscriptionPageImpl implements Page { @override Future> next({int? pageSize}) { - if (isLast) { - throw StateError('Page.next() cannot be called when Page.isLast == true'); - } + throwIfIsLast(); final pageSize_ = pageSize ?? _pageSize; return _api From dbfaba9a111cc3c972cf3aaf712aea6fe6b79629 Mon Sep 17 00:00:00 2001 From: Jonas Finnemann Jensen Date: Sat, 17 Jul 2021 20:34:20 +0200 Subject: [PATCH 181/239] Fix lints --- .../datastore/e2e/datastore_test_impl.dart | 6 ++-- pkgs/gcloud/test/db/e2e/db_test_impl.dart | 12 ++++---- .../test/db/e2e/metamodel_test_impl.dart | 30 +++++++++++-------- pkgs/gcloud/test/db/model_db_test.dart | 3 +- 4 files changed, 28 insertions(+), 23 deletions(-) diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index b92ce5a1..c31c884b 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -65,7 +65,7 @@ void runTests(Datastore datastore, String? namespace) { autoIdInserts: autoIdEntities, transaction: transaction) .then((result) { - if (autoIdEntities != null && autoIdEntities.isNotEmpty) { + if (autoIdEntities.isNotEmpty) { expect( result.autoIdInsertKeys.length, equals(autoIdEntities.length)); } @@ -76,7 +76,7 @@ void runTests(Datastore datastore, String? namespace) { return datastore .commit(inserts: entities, autoIdInserts: autoIdEntities) .then((result) { - if (autoIdEntities != null && autoIdEntities.isNotEmpty) { + if (autoIdEntities.isNotEmpty) { expect(result.autoIdInsertKeys.length, equals(autoIdEntities.length)); } return result.autoIdInsertKeys; @@ -110,7 +110,7 @@ void runTests(Datastore datastore, String? namespace) { if (key.elements.isEmpty) return false; for (var element in key.elements) { - if (element.kind == null || element.kind is! String) return false; + if (element.kind is! String) return false; if (!ignoreIds) { if (element.id == null || (element.id is! String && element.id is! int)) { diff --git a/pkgs/gcloud/test/db/e2e/db_test_impl.dart b/pkgs/gcloud/test/db/e2e/db_test_impl.dart index 2a48ec02..84839066 100644 --- a/pkgs/gcloud/test/db/e2e/db_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/db_test_impl.dart @@ -112,7 +112,7 @@ class User extends Person { String? nickname; @db.StringListProperty(propertyName: 'language') - List languages = const []; + List? languages = const []; @override bool sameAs(Object other) { @@ -125,12 +125,12 @@ class User extends Person { if (user.languages == null) return true; return false; } - if (languages.length != user.languages.length) { + if (languages!.length != user.languages?.length) { return false; } - for (var i = 0; i < languages.length; i++) { - if (languages[i] != user.languages[i]) { + for (var i = 0; i < languages!.length; i++) { + if (languages![i] != user.languages![i]) { return false; } } @@ -484,9 +484,9 @@ void runTests(db.DatastoreDB store, String? namespace) { }).toList(); var fooUsers = - users.where((User u) => u.languages.contains('foo')).toList(); + users.where((User u) => u.languages!.contains('foo')).toList(); var barUsers = - users.where((User u) => u.languages.contains('bar')).toList(); + users.where((User u) => u.languages!.contains('bar')).toList(); var usersWithWife = users .where((User u) => u.wife == root.append(User, id: 42 + 3)) .toList(); diff --git a/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart index 2a34d991..a97061c2 100644 --- a/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart @@ -2,7 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. - library metamodel_test; import 'dart:async'; @@ -62,15 +61,14 @@ void runTests(datastore, db.DatastoreDB store) { return namespaceQuery.run().toList().then((namespaces) { expect(namespaces.length, greaterThanOrEqualTo(3)); expect(namespaces, contains(cond((dynamic ns) => ns.name == null))); - expect( - namespaces, contains(cond((dynamic ns) => ns.name == 'FooNamespace'))); - expect( - namespaces, contains(cond((dynamic ns) => ns.name == 'BarNamespace'))); + expect(namespaces, + contains(cond((dynamic ns) => ns.name == 'FooNamespace'))); + expect(namespaces, + contains(cond((dynamic ns) => ns.name == 'BarNamespace'))); var futures = []; for (var namespace in namespaces) { - if (!(namespace == null || - namespace.name == 'FooNamespace' || + if (!(namespace.name == 'FooNamespace' || namespace.name == 'BarNamespace')) { continue; } @@ -79,14 +77,20 @@ void runTests(datastore, db.DatastoreDB store) { futures.add(kindQuery.run().toList().then((List kinds) { expect(kinds.length, greaterThanOrEqualTo(2)); if (namespace.name == null) { - expect(kinds, contains(cond((dynamic k) => k.name == 'NullKind'))); - expect(kinds, contains(cond((dynamic k) => k.name == 'NullKind2'))); + expect(kinds, + contains(cond((dynamic k) => k.name == 'NullKind'))); + expect(kinds, + contains(cond((dynamic k) => k.name == 'NullKind2'))); } else if (namespace.name == 'FooNamespace') { - expect(kinds, contains(cond((dynamic k) => k.name == 'FooKind'))); - expect(kinds, contains(cond((dynamic k) => k.name == 'FooKind2'))); + expect(kinds, + contains(cond((dynamic k) => k.name == 'FooKind'))); + expect(kinds, + contains(cond((dynamic k) => k.name == 'FooKind2'))); } else if (namespace.name == 'BarNamespace') { - expect(kinds, contains(cond((dynamic k) => k.name == 'BarKind'))); - expect(kinds, contains(cond((dynamic k) => k.name == 'BarKind2'))); + expect(kinds, + contains(cond((dynamic k) => k.name == 'BarKind'))); + expect(kinds, + contains(cond((dynamic k) => k.name == 'BarKind2'))); } })); } diff --git a/pkgs/gcloud/test/db/model_db_test.dart b/pkgs/gcloud/test/db/model_db_test.dart index e749ce4f..2ffdd8f5 100644 --- a/pkgs/gcloud/test/db/model_db_test.dart +++ b/pkgs/gcloud/test/db/model_db_test.dart @@ -2,7 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. - library gcloud.db_impl_test; import 'dart:async'; @@ -19,6 +18,8 @@ import 'model_dbs/no_default_constructor.dart' as test5; void main() { // These unused imports make sure that [ModelDBImpl.fromLibrary()] will find // all the Model/ModelDescription classes. + // + // ignore: unnecessary_null_comparison assert([test1.A, test2.A, test3.A, test4.A, test5.A] != null); ModelDBImpl newModelDB(Symbol symbol) => ModelDBImpl.fromLibrary(symbol); From a3e11e27ad658540d0cc73f5e02774b7a93a8dfe Mon Sep 17 00:00:00 2001 From: Jonas Finnemann Jensen Date: Sat, 17 Jul 2021 20:55:54 +0200 Subject: [PATCH 182/239] Fix formatting --- pkgs/gcloud/test/datastore/e2e/utils.dart | 1 - pkgs/gcloud/test/datastore/error_matchers.dart | 1 - pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart | 1 - pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart | 1 - pkgs/gcloud/test/db/model_dbs/duplicate_property.dart | 1 - pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart | 1 - pkgs/gcloud/test/db/model_dbs/no_default_constructor.dart | 1 - 7 files changed, 7 deletions(-) diff --git a/pkgs/gcloud/test/datastore/e2e/utils.dart b/pkgs/gcloud/test/datastore/e2e/utils.dart index 7f508d92..1f693402 100644 --- a/pkgs/gcloud/test/datastore/e2e/utils.dart +++ b/pkgs/gcloud/test/datastore/e2e/utils.dart @@ -2,7 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. - library raw_datastore_test_utils; import 'package:gcloud/datastore.dart'; diff --git a/pkgs/gcloud/test/datastore/error_matchers.dart b/pkgs/gcloud/test/datastore/error_matchers.dart index 65ebce5e..44bdfb32 100644 --- a/pkgs/gcloud/test/datastore/error_matchers.dart +++ b/pkgs/gcloud/test/datastore/error_matchers.dart @@ -2,7 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. - library error_matchers; import 'dart:io'; diff --git a/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart b/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart index 1f2db4ec..98690ea7 100644 --- a/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart +++ b/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart @@ -2,7 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. - library gcloud.db.model_test.duplicate_fieldname; import 'package:gcloud/db.dart' as db; diff --git a/pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart b/pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart index d039b5a6..1859fdf9 100644 --- a/pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart +++ b/pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart @@ -2,7 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. - library gcloud.db.model_test.duplicate_kind; import 'package:gcloud/db.dart' as db; diff --git a/pkgs/gcloud/test/db/model_dbs/duplicate_property.dart b/pkgs/gcloud/test/db/model_dbs/duplicate_property.dart index f05c7f55..de550898 100644 --- a/pkgs/gcloud/test/db/model_dbs/duplicate_property.dart +++ b/pkgs/gcloud/test/db/model_dbs/duplicate_property.dart @@ -2,7 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. - library gcloud.db.model_test.duplicate_property; import 'package:gcloud/db.dart' as db; diff --git a/pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart b/pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart index 59fdde11..3ffd27ca 100644 --- a/pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart +++ b/pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart @@ -2,7 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. - library gcloud.db.model_test.multiple_annotations; import 'package:gcloud/db.dart' as db; diff --git a/pkgs/gcloud/test/db/model_dbs/no_default_constructor.dart b/pkgs/gcloud/test/db/model_dbs/no_default_constructor.dart index e8c29bb6..1c3b3d5e 100644 --- a/pkgs/gcloud/test/db/model_dbs/no_default_constructor.dart +++ b/pkgs/gcloud/test/db/model_dbs/no_default_constructor.dart @@ -2,7 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. - library gcloud.db.model_test.no_default_constructor; import 'package:gcloud/db.dart' as db; From 770b26702412959ff333801fc2efd99de2b77b0c Mon Sep 17 00:00:00 2001 From: Jonas Finnemann Jensen Date: Tue, 20 Jul 2021 13:47:57 +0200 Subject: [PATCH 183/239] clean up idFunction --- pkgs/gcloud/test/datastore/e2e/utils.dart | 26 +++++++++++++++++------ 1 file changed, 20 insertions(+), 6 deletions(-) diff --git a/pkgs/gcloud/test/datastore/e2e/utils.dart b/pkgs/gcloud/test/datastore/e2e/utils.dart index 1f693402..6f722c15 100644 --- a/pkgs/gcloud/test/datastore/e2e/utils.dart +++ b/pkgs/gcloud/test/datastore/e2e/utils.dart @@ -18,8 +18,12 @@ const TEST_UNINDEXED_PROPERTY = 'unindexedProp'; const TEST_BLOB_INDEXED_PROPERTY = 'blobPropertyIndexed'; final TEST_BLOB_INDEXED_VALUE = BlobValue([0xaa, 0xaa, 0xff, 0xff]); -Key buildKey(int i, - {Function? idFunction, String kind = TEST_KIND, Partition? p}) { +Key buildKey( + int i, { + Object Function(int)? idFunction, + String kind = TEST_KIND, + Partition? p, +}) { var path = [KeyElement(kind, idFunction == null ? null : idFunction(i))]; return Key(path, partition: p ?? Partition.DEFAULT); } @@ -39,8 +43,13 @@ Map buildProperties(int i) { }; } -List buildKeys(int from, int to, - {Function? idFunction, String kind = TEST_KIND, Partition? partition}) { +List buildKeys( + int from, + int to, { + Object Function(int)? idFunction, + String kind = TEST_KIND, + Partition? partition, +}) { var keys = []; for (var i = from; i < to; i++) { keys.add(buildKey(i, idFunction: idFunction, kind: kind, p: partition)); @@ -48,8 +57,13 @@ List buildKeys(int from, int to, return keys; } -List buildEntities(int from, int to, - {Function? idFunction, String kind = TEST_KIND, Partition? partition}) { +List buildEntities( + int from, + int to, { + Object Function(int)? idFunction, + String kind = TEST_KIND, + Partition? partition, +}) { var entities = []; var unIndexedProperties = {}; for (var i = from; i < to; i++) { From 17468bee2cc39efa0d1462def5e74557b7e458f1 Mon Sep 17 00:00:00 2001 From: Jonas Finnemann Jensen Date: Tue, 20 Jul 2021 13:51:56 +0200 Subject: [PATCH 184/239] Fix README.md to reference new factory constructor --- pkgs/gcloud/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/gcloud/README.md b/pkgs/gcloud/README.md index fee281e7..cf7b4106 100644 --- a/pkgs/gcloud/README.md +++ b/pkgs/gcloud/README.md @@ -29,7 +29,7 @@ import 'package:gcloud/db.dart'; import 'package:gcloud/storage.dart'; import 'package:gcloud/pubsub.dart'; import 'package:gcloud/service_scope.dart' as ss; -import 'package:gcloud/src/datastore_impl.dart' as datastore_impl; +import 'package:gcloud/datastore.dart' as datastore; ``` ### Getting access to the APIs @@ -47,7 +47,7 @@ var credentials = new auth.ServiceAccountCredentials.fromJson(jsonCredentials); // Get an HTTP authenticated client using the service account credentials. var scopes = [] - ..addAll(datastore_impl.DatastoreImpl.SCOPES) + ..addAll(datastore.Datastore.Scopes) ..addAll(Storage.SCOPES) ..addAll(PubSub.SCOPES); var client = await auth.clientViaServiceAccount(credentials, scopes); @@ -55,7 +55,7 @@ var client = await auth.clientViaServiceAccount(credentials, scopes); // Instantiate objects to access Cloud Datastore, Cloud Storage // and Cloud Pub/Sub APIs. var db = new DatastoreDB( - new datastore_impl.DatastoreImpl(client, 's~my-project')); + new datastore.Datastore(client, 's~my-project')); var storage = new Storage(client, 'my-project'); var pubsub = new PubSub(client, 'my-project'); ``` From 6ed0917abe35a2a431ea640cd35be32121a64299 Mon Sep 17 00:00:00 2001 From: Simon Binder Date: Wed, 18 Aug 2021 16:50:27 +0200 Subject: [PATCH 185/239] Support googleapis v4 --- pkgs/gcloud/CHANGELOG.md | 4 ++++ pkgs/gcloud/pubspec.yaml | 4 ++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index a68b1c9e..74822933 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.8.3 + +- Support the latest version of the `googleapis` package. + ## 0.8.2 * **BREAKING CHANGE:** `Page.next()` throws if `Page.isLast`, this change only diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 3bfb96ec..5f980c23 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.8.2 +version: 0.8.3 description: >- High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. homepage: https://github.com/dart-lang/gcloud @@ -8,7 +8,7 @@ environment: dependencies: _discoveryapis_commons: ^1.0.0 - googleapis: ^3.0.0 + googleapis: ^4.0.0 http: ^0.13.0 meta: ^1.3.0 From 69e1d107dfd3d02c87a14866ec58cbea5cf4b614 Mon Sep 17 00:00:00 2001 From: Simon Binder Date: Wed, 15 Sep 2021 18:57:37 +0200 Subject: [PATCH 186/239] Support 5.0.0 as well --- pkgs/gcloud/pubspec.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 5f980c23..de0facee 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -8,7 +8,7 @@ environment: dependencies: _discoveryapis_commons: ^1.0.0 - googleapis: ^4.0.0 + googleapis: ^5.0.0 http: ^0.13.0 meta: ^1.3.0 From 87a5fa325b44ac174d2a1ac6fc0a5a3544a1a502 Mon Sep 17 00:00:00 2001 From: Simon Binder Date: Wed, 15 Sep 2021 20:11:30 +0200 Subject: [PATCH 187/239] Support v3-v5 --- pkgs/gcloud/pubspec.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index de0facee..ac5a565f 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -8,7 +8,7 @@ environment: dependencies: _discoveryapis_commons: ^1.0.0 - googleapis: ^5.0.0 + googleapis: '>=3.0.0 <6.0.0' http: ^0.13.0 meta: ^1.3.0 From c927de4cebabe80173a4048eae2d57b49bfa5f18 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Mon, 27 Sep 2021 09:04:00 -0700 Subject: [PATCH 188/239] Drop unneeded directives and redundant type checks (dart-lang/gcloud#126) Fixes CI --- pkgs/gcloud/lib/db.dart | 3 +-- pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart | 1 - pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart | 1 - pkgs/gcloud/test/storage/e2e_test.dart | 3 --- pkgs/gcloud/test/storage/storage_test.dart | 3 --- 5 files changed, 1 insertion(+), 10 deletions(-) diff --git a/pkgs/gcloud/lib/db.dart b/pkgs/gcloud/lib/db.dart index 42e15514..11509e02 100644 --- a/pkgs/gcloud/lib/db.dart +++ b/pkgs/gcloud/lib/db.dart @@ -4,7 +4,6 @@ library gcloud.db; -import 'dart:async'; import 'dart:collection'; // dart:core is imported explicitly so it is available at top-level without // the `core` prefix defined below. @@ -23,9 +22,9 @@ import 'service_scope.dart' as ss; part 'src/db/annotations.dart'; part 'src/db/db.dart'; part 'src/db/exceptions.dart'; -part 'src/db/models.dart'; part 'src/db/model_db.dart'; part 'src/db/model_db_impl.dart'; +part 'src/db/models.dart'; const Symbol _dbKey = #gcloud.db; diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index c31c884b..de134b01 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -110,7 +110,6 @@ void runTests(Datastore datastore, String? namespace) { if (key.elements.isEmpty) return false; for (var element in key.elements) { - if (element.kind is! String) return false; if (!ignoreIds) { if (element.id == null || (element.id is! String && element.id is! int)) { diff --git a/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart index a97061c2..027b1d29 100644 --- a/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart @@ -7,7 +7,6 @@ library metamodel_test; import 'dart:async'; import 'package:gcloud/datastore.dart'; -import 'package:gcloud/datastore.dart' show Key, Partition; import 'package:gcloud/db.dart' as db; import 'package:gcloud/db/metamodel.dart'; import 'package:test/test.dart'; diff --git a/pkgs/gcloud/test/storage/e2e_test.dart b/pkgs/gcloud/test/storage/e2e_test.dart index a7b1026a..ef1cbec3 100644 --- a/pkgs/gcloud/test/storage/e2e_test.dart +++ b/pkgs/gcloud/test/storage/e2e_test.dart @@ -67,7 +67,6 @@ void main() { return storage.bucketInfo(bucketName).then(expectAsync1((info) { expect(info.bucketName, bucketName); expect(info.etag, isNotNull); - expect(info.created is DateTime, isTrue); expect(info.id, isNotNull); return storage.deleteBucket(bucketName).then(expectAsync1((result) { expect(result, isNull); @@ -233,10 +232,8 @@ void main() { return bucket.info(objectName).then(expectAsync1((info) { expect(info.name, objectName); expect(info.length, bytes.length); - expect(info.updated is DateTime, isTrue); expect(info.md5Hash, isNotNull); expect(info.crc32CChecksum, isNotNull); - expect(info.downloadLink is Uri, isTrue); expect(info.generation.objectGeneration, isNotNull); expect(info.generation.metaGeneration, 1); expect(info.metadata.contentType, metadata.contentType); diff --git a/pkgs/gcloud/test/storage/storage_test.dart b/pkgs/gcloud/test/storage/storage_test.dart index 4360f07c..ad497ccf 100644 --- a/pkgs/gcloud/test/storage/storage_test.dart +++ b/pkgs/gcloud/test/storage/storage_test.dart @@ -1013,11 +1013,8 @@ void main() { bucket.info(objectName).then(expectAsync1((ObjectInfo info) { expect(info.name, objectName); expect(info.metadata.acl!.entries.length, 3); - expect(info.metadata.acl!.entries[0] is AclEntry, isTrue); expect(info.metadata.acl!.entries[0].scope is StorageIdScope, isTrue); - expect(info.metadata.acl!.entries[1] is AclEntry, isTrue); expect(info.metadata.acl!.entries[1].scope is AccountScope, isTrue); - expect(info.metadata.acl!.entries[2] is AclEntry, isTrue); expect(info.metadata.acl!.entries[2].scope is OpaqueScope, isTrue); })); }); From acb50d960fc1d7285c66833aa03daea804715d30 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Wed, 13 Oct 2021 12:03:35 -0700 Subject: [PATCH 189/239] Switch to pkg:lints for lints Also enable and fix prefer_relative_imports --- pkgs/gcloud/analysis_options.yaml | 3 +- pkgs/gcloud/lib/common.dart | 4 +- pkgs/gcloud/lib/datastore.dart | 14 +- pkgs/gcloud/lib/db/metamodel.dart | 1 + pkgs/gcloud/lib/pubsub.dart | 3 +- pkgs/gcloud/lib/service_scope.dart | 8 +- pkgs/gcloud/lib/src/datastore_impl.dart | 8 +- pkgs/gcloud/lib/src/db/annotations.dart | 2 + pkgs/gcloud/lib/src/db/db.dart | 24 +- pkgs/gcloud/lib/src/db/model_db_impl.dart | 4 +- pkgs/gcloud/lib/src/pubsub_impl.dart | 9 +- pkgs/gcloud/lib/src/storage_impl.dart | 52 +-- pkgs/gcloud/lib/storage.dart | 47 +-- pkgs/gcloud/pubspec.yaml | 4 +- pkgs/gcloud/test/common.dart | 22 +- pkgs/gcloud/test/common_e2e.dart | 10 +- .../datastore/e2e/datastore_test_impl.dart | 189 ++++++----- pkgs/gcloud/test/datastore/e2e/utils.dart | 42 +-- pkgs/gcloud/test/db/e2e/db_test_impl.dart | 17 +- .../db/model_dbs/duplicate_fieldname.dart | 1 + pkgs/gcloud/test/db/properties_test.dart | 1 + pkgs/gcloud/test/db_all_e2e_test.dart | 2 +- pkgs/gcloud/test/pubsub/pubsub_test.dart | 295 +++++++++--------- pkgs/gcloud/test/service_scope_test.dart | 6 +- pkgs/gcloud/test/storage/e2e_test.dart | 6 +- pkgs/gcloud/test/storage/storage_test.dart | 62 ++-- 26 files changed, 449 insertions(+), 387 deletions(-) diff --git a/pkgs/gcloud/analysis_options.yaml b/pkgs/gcloud/analysis_options.yaml index 43cc915b..89810860 100644 --- a/pkgs/gcloud/analysis_options.yaml +++ b/pkgs/gcloud/analysis_options.yaml @@ -1,4 +1,4 @@ -include: package:pedantic/analysis_options.yaml +include: package:lints/recommended.yaml analyzer: strong-mode: @@ -17,6 +17,7 @@ linter: - package_api_docs - package_names - package_prefixed_library_names + - prefer_relative_imports - test_types_in_equals - throw_in_finally - unnecessary_brace_in_string_interps diff --git a/pkgs/gcloud/lib/common.dart b/pkgs/gcloud/lib/common.dart index f8e00c5c..09e3bcd5 100644 --- a/pkgs/gcloud/lib/common.dart +++ b/pkgs/gcloud/lib/common.dart @@ -29,7 +29,7 @@ typedef FirstPageProvider = Future> Function(int pageSize); /// Helper class to turn a series of pages into a stream. class StreamFromPages { - static const int _PAGE_SIZE = 50; + static const int _pageSize = 50; final FirstPageProvider _firstPageProvider; bool _pendingRequest = false; bool _paused = false; @@ -66,7 +66,7 @@ class StreamFromPages { } void _onListen() { - var pageSize = _PAGE_SIZE; + var pageSize = _pageSize; _pendingRequest = true; _firstPageProvider(pageSize).then(_handlePage, onError: _handleError); } diff --git a/pkgs/gcloud/lib/datastore.dart b/pkgs/gcloud/lib/datastore.dart index c26f155d..b1c857bc 100644 --- a/pkgs/gcloud/lib/datastore.dart +++ b/pkgs/gcloud/lib/datastore.dart @@ -12,6 +12,7 @@ library gcloud.datastore; import 'dart:async'; import 'package:http/http.dart' as http; + import 'common.dart' show Page; import 'service_scope.dart' as ss; import 'src/datastore_impl.dart' show DatastoreImpl; @@ -54,7 +55,7 @@ class DatastoreError implements Exception { : message = (message ?? 'DatastoreError: An unknown error occured'); @override - String toString() => '$message'; + String toString() => message; } class UnknownDatastoreError extends DatastoreError { @@ -187,6 +188,7 @@ class Key { /// // TODO(Issue #6): Add dataset-id here. class Partition { + // ignore: constant_identifier_names static const Partition DEFAULT = Partition._default(); /// The namespace of this partition. @@ -243,10 +245,15 @@ class KeyElement { /// A relation used in query filters. class FilterRelation { + // ignore: constant_identifier_names static const FilterRelation LessThan = FilterRelation._('<'); + // ignore: constant_identifier_names static const FilterRelation LessThanOrEqual = FilterRelation._('<='); + // ignore: constant_identifier_names static const FilterRelation GreatherThan = FilterRelation._('>'); + // ignore: constant_identifier_names static const FilterRelation GreatherThanOrEqual = FilterRelation._('>='); + // ignore: constant_identifier_names static const FilterRelation Equal = FilterRelation._('=='); final String name; @@ -277,7 +284,9 @@ class Filter { /// 'Order' class. /// [i.e. so one can write Order.Ascending, Order.Descending]. class OrderDirection { + // ignore: constant_identifier_names static const OrderDirection Ascending = OrderDirection._('Ascending'); + // ignore: constant_identifier_names static const OrderDirection Decending = OrderDirection._('Decending'); final String name; @@ -365,7 +374,8 @@ abstract class Transaction {} /// and allocate IDs from the auto ID allocation policy. abstract class Datastore { /// List of required OAuth2 scopes for Datastore operation. - static const Scopes = DatastoreImpl.SCOPES; + // ignore: constant_identifier_names + static const Scopes = DatastoreImpl.scopes; /// Access Datastore using an authenticated client. /// diff --git a/pkgs/gcloud/lib/db/metamodel.dart b/pkgs/gcloud/lib/db/metamodel.dart index 09616f8c..c019fd24 100644 --- a/pkgs/gcloud/lib/db/metamodel.dart +++ b/pkgs/gcloud/lib/db/metamodel.dart @@ -8,6 +8,7 @@ import '../db.dart' as db; @db.Kind(name: '__namespace__') class Namespace extends db.ExpandoModel { + // ignore: constant_identifier_names static const int EmptyNamespaceId = 1; String? get name { diff --git a/pkgs/gcloud/lib/pubsub.dart b/pkgs/gcloud/lib/pubsub.dart index a2416802..a2951153 100644 --- a/pkgs/gcloud/lib/pubsub.dart +++ b/pkgs/gcloud/lib/pubsub.dart @@ -9,12 +9,12 @@ import 'dart:collection'; import 'dart:convert'; import 'dart:io'; -import 'package:gcloud/src/common_utils.dart'; import 'package:googleapis/pubsub/v1.dart' as pubsub; import 'package:http/http.dart' as http; import 'common.dart'; import 'service_scope.dart' as ss; +import 'src/common_utils.dart'; export 'common.dart'; @@ -113,6 +113,7 @@ void registerPubSubService(PubSub pubsub) { /// abstract class PubSub { /// List of required OAuth2 scopes for Pub/Sub operation. + // ignore: constant_identifier_names static const SCOPES = [pubsub.PubsubApi.pubsubScope]; /// Access Pub/Sub using an authenticated client. diff --git a/pkgs/gcloud/lib/service_scope.dart b/pkgs/gcloud/lib/service_scope.dart index 97b5d5d3..89768704 100644 --- a/pkgs/gcloud/lib/service_scope.dart +++ b/pkgs/gcloud/lib/service_scope.dart @@ -77,7 +77,7 @@ library gcloud.service_scope; import 'dart:async'; /// The Symbol used as index in the zone map for the service scope object. -const Symbol _ServiceScopeKey = #gcloud.service_scope; +const Symbol _serviceScopeKey = #gcloud.service_scope; /// An empty service scope. /// @@ -87,7 +87,7 @@ final _ServiceScope _emptyServiceScope = _ServiceScope(); /// Returns the current [_ServiceScope] object. _ServiceScope? get _serviceScope => - Zone.current[_ServiceScopeKey] as _ServiceScope?; + Zone.current[_serviceScopeKey] as _ServiceScope?; /// Start a new zone with a new service scope and run [func] inside it. /// @@ -160,7 +160,7 @@ class _ServiceScope { Object? lookup(Object serviceScope) { _ensureNotInDestroyingState(); var entry = _key2Values[serviceScope]; - return entry != null ? entry.value : null; + return entry?.value; } /// Inserts a new item to the service scope using [serviceScopeKey]. @@ -200,7 +200,7 @@ class _ServiceScope { _ensureNotInDestroyingState(); var serviceScope = _copy(); - var map = {_ServiceScopeKey: serviceScope}; + var map = {_serviceScopeKey: serviceScope}; return runZoned(() { var f = func(); return f.whenComplete(serviceScope._runScopeExitHandlers); diff --git a/pkgs/gcloud/lib/src/datastore_impl.dart b/pkgs/gcloud/lib/src/datastore_impl.dart index b76aef8b..fbe7c038 100644 --- a/pkgs/gcloud/lib/src/datastore_impl.dart +++ b/pkgs/gcloud/lib/src/datastore_impl.dart @@ -6,12 +6,12 @@ library gcloud.datastore_impl; import 'dart:async'; -import 'package:gcloud/src/common_utils.dart'; import 'package:googleapis/datastore/v1.dart' as api; import 'package:http/http.dart' as http; import '../common.dart' show Page; import '../datastore.dart' as datastore; +import 'common_utils.dart'; class TransactionImpl implements datastore.Transaction { final String data; @@ -20,7 +20,7 @@ class TransactionImpl implements datastore.Transaction { } class DatastoreImpl implements datastore.Datastore { - static const List SCOPES = [ + static const List scopes = [ api.DatastoreApi.datastoreScope, api.DatastoreApi.cloudPlatformScope, ]; @@ -489,7 +489,7 @@ class DatastoreImpl implements datastore.Datastore { } class QueryPageImpl implements Page { - static const int MAX_ENTITIES_PER_RESPONSE = 2000; + static const int _maxEntitiesPerResponse = 2000; final api.DatastoreApi _api; final String _project; @@ -505,7 +505,7 @@ class QueryPageImpl implements Page { static Future runQuery(api.DatastoreApi api, String project, api.RunQueryRequest request, int? limit, - {int batchSize = MAX_ENTITIES_PER_RESPONSE}) { + {int batchSize = _maxEntitiesPerResponse}) { if (limit != null && limit < batchSize) { batchSize = limit; } diff --git a/pkgs/gcloud/lib/src/db/annotations.dart b/pkgs/gcloud/lib/src/db/annotations.dart index 38ad145d..241d5797 100644 --- a/pkgs/gcloud/lib/src/db/annotations.dart +++ b/pkgs/gcloud/lib/src/db/annotations.dart @@ -49,9 +49,11 @@ class Kind { /// The type used for id's of an entity. class IdType { /// Use integer ids for identifying entities. + // ignore: constant_identifier_names static const IdType Integer = IdType('Integer'); /// Use string ids for identifying entities. + // ignore: constant_identifier_names static const IdType String = IdType('String'); final core.String _type; diff --git a/pkgs/gcloud/lib/src/db/db.dart b/pkgs/gcloud/lib/src/db/db.dart index c86f85cf..1d7eda8a 100644 --- a/pkgs/gcloud/lib/src/db/db.dart +++ b/pkgs/gcloud/lib/src/db/db.dart @@ -16,10 +16,10 @@ typedef TransactionHandler = Future Function(Transaction transaction); /// (inserts/updates/deletes). Finally the transaction can be either committed /// or rolled back. class Transaction { - static const int _TRANSACTION_STARTED = 0; - static const int _TRANSACTION_ROLLED_BACK = 1; - static const int _TRANSACTION_COMMITTED = 2; - static const int _TRANSACTION_COMMIT_FAILED = 3; + static const int _transactionStarted = 0; + static const int _transactionRolledBack = 1; + static const int _transactionCommitted = 2; + static const int _transactionCommitFailed = 3; final DatastoreDB db; final ds.Transaction _datastoreTransaction; @@ -27,7 +27,7 @@ class Transaction { final List _inserts = []; final List _deletes = []; - int _state = _TRANSACTION_STARTED; + int _state = _transactionStarted; Transaction(this.db, this._datastoreTransaction); @@ -107,30 +107,30 @@ class Transaction { /// Rolls this transaction back. Future rollback() { - _checkSealed(changeState: _TRANSACTION_ROLLED_BACK, allowFailed: true); + _checkSealed(changeState: _transactionRolledBack, allowFailed: true); return db.datastore.rollback(_datastoreTransaction); } /// Commits this transaction including all of the queued mutations. Future commit() { - _checkSealed(changeState: _TRANSACTION_COMMITTED); + _checkSealed(changeState: _transactionCommitted); try { return _commitHelper(db, inserts: _inserts, deletes: _deletes, datastoreTransaction: _datastoreTransaction); } catch (error) { - _state = _TRANSACTION_COMMIT_FAILED; + _state = _transactionCommitFailed; rethrow; } } void _checkSealed({int? changeState, bool allowFailed = false}) { - if (_state == _TRANSACTION_COMMITTED) { + if (_state == _transactionCommitted) { throw StateError('The transaction has already been committed.'); - } else if (_state == _TRANSACTION_ROLLED_BACK) { + } else if (_state == _transactionRolledBack) { throw StateError('The transaction has already been rolled back.'); - } else if (_state == _TRANSACTION_COMMIT_FAILED && !allowFailed) { + } else if (_state == _transactionCommitFailed && !allowFailed) { throw StateError('The transaction has attempted commit and failed.'); } if (changeState != null) { @@ -413,7 +413,7 @@ Future _commitHelper(DatastoreDB db, ds.Transaction? datastoreTransaction}) { List? entityInserts, entityAutoIdInserts; List? entityDeletes; - late var autoIdModelInserts; + late List autoIdModelInserts; if (inserts != null) { entityInserts = []; entityAutoIdInserts = []; diff --git a/pkgs/gcloud/lib/src/db/model_db_impl.dart b/pkgs/gcloud/lib/src/db/model_db_impl.dart index fc4749a7..e17ddcff 100644 --- a/pkgs/gcloud/lib/src/db/model_db_impl.dart +++ b/pkgs/gcloud/lib/src/db/model_db_impl.dart @@ -198,14 +198,14 @@ class ModelDBImpl implements ModelDB { } void _initialize(Iterable libraries) { - libraries.forEach((mirrors.LibraryMirror lm) { + for (var lm in libraries) { lm.declarations.values .whereType() .where((d) => d.hasReflectedType) .forEach((declaration) { _tryLoadNewModelClass(declaration); }); - }); + } // Ask every [ModelDescription] to compute whatever global state it wants // to have. diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart index ea202a5a..a546cfc8 100644 --- a/pkgs/gcloud/lib/src/pubsub_impl.dart +++ b/pkgs/gcloud/lib/src/pubsub_impl.dart @@ -76,8 +76,7 @@ class _PubSubImpl implements PubSub { } Future _modifyPushConfig(String subscription, Uri? endpoint) { - var pushConfig = pubsub.PushConfig() - ..pushEndpoint = endpoint != null ? endpoint.toString() : null; + var pushConfig = pubsub.PushConfig()..pushEndpoint = endpoint?.toString(); var request = pubsub.ModifyPushConfigRequest()..pushConfig = pushConfig; return _api.projects.subscriptions.modifyPushConfig(request, subscription); } @@ -324,7 +323,7 @@ class _PullEventImpl implements PullEvent { /// /// decoded from JSON encoded push HTTP request body. class _PushEventImpl implements PushEvent { - static const PREFIX = '/subscriptions/'; + static const _prefix = '/subscriptions/'; final Message _message; final String _subscriptionName; @@ -349,8 +348,8 @@ class _PushEventImpl implements PushEvent { var subscription = body['subscription'] as String; // TODO(#1): Remove this when the push event subscription name is prefixed // with '/subscriptions/'. - if (!subscription.startsWith(PREFIX)) { - subscription = PREFIX + subscription; + if (!subscription.startsWith(_prefix)) { + subscription = _prefix + subscription; } return _PushEventImpl(_PushMessage(data, labels), subscription); } diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index 6370280d..4f02224f 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -4,8 +4,8 @@ part of gcloud.storage; -const String _ABSOLUTE_PREFIX = 'gs://'; -const String _DIRECTORY_DELIMITER = '/'; +const String _absolutePrefix = 'gs://'; +const String _directoryDelimiter = '/'; /// Representation of an absolute name consisting of bucket name and object /// name. @@ -16,12 +16,12 @@ class _AbsoluteName { _AbsoluteName._(this.bucketName, this.objectName); factory _AbsoluteName.parse(String absoluteName) { - if (!absoluteName.startsWith(_ABSOLUTE_PREFIX)) { + if (!absoluteName.startsWith(_absolutePrefix)) { throw FormatException("Absolute name '$absoluteName' does not start " - "with '$_ABSOLUTE_PREFIX'"); + "with '$_absolutePrefix'"); } - var index = absoluteName.indexOf('/', _ABSOLUTE_PREFIX.length); - if (index == -1 || index == _ABSOLUTE_PREFIX.length) { + var index = absoluteName.indexOf('/', _absolutePrefix.length); + if (index == -1 || index == _absolutePrefix.length) { throw FormatException("Absolute name '$absoluteName' does not have " 'a bucket name'); } @@ -29,7 +29,7 @@ class _AbsoluteName { throw FormatException("Absolute name '$absoluteName' does not have " 'an object name'); } - final bucketName = absoluteName.substring(_ABSOLUTE_PREFIX.length, index); + final bucketName = absoluteName.substring(_absolutePrefix.length, index); final objectName = absoluteName.substring(index + 1); return _AbsoluteName._(bucketName, objectName); @@ -48,7 +48,7 @@ class _StorageImpl implements Storage { Future createBucket(String bucketName, {PredefinedAcl? predefinedAcl, Acl? acl}) { var bucket = storage_api.Bucket()..name = bucketName; - var predefinedName = predefinedAcl != null ? predefinedAcl._name : null; + var predefinedName = predefinedAcl?._name; if (acl != null) { bucket.acl = acl._toBucketAccessControlList(); } @@ -157,7 +157,7 @@ class _BucketImpl implements Bucket { @override String absoluteObjectName(String objectName) { - return '$_ABSOLUTE_PREFIX$bucketName/$objectName'; + return '$_absolutePrefix$bucketName/$objectName'; } @override @@ -259,7 +259,7 @@ class _BucketImpl implements Bucket { @override Stream list({String? prefix, String? delimiter}) { - delimiter ??= _DIRECTORY_DELIMITER; + delimiter ??= _directoryDelimiter; Future<_ObjectPageImpl> firstPage(int pageSize) async { final response = await _listObjects(bucketName, prefix, delimiter, 50, null); @@ -272,7 +272,7 @@ class _BucketImpl implements Bucket { @override Future> page( {String? prefix, String? delimiter, int pageSize = 50}) async { - delimiter ??= _DIRECTORY_DELIMITER; + delimiter ??= _directoryDelimiter; final response = await _listObjects(bucketName, prefix, delimiter, pageSize, null); return _ObjectPageImpl(this, prefix, delimiter, pageSize, response); @@ -442,7 +442,7 @@ class _ObjectMetadata implements ObjectMetadata { String? contentLanguage, Map? custom}) : _object = storage_api.Object() { - _object.acl = acl != null ? acl._toObjectAccessControlList() : null; + _object.acl = acl?._toObjectAccessControlList(); _object.contentType = contentType; _object.contentEncoding = contentEncoding; _object.cacheControl = cacheControl; @@ -511,7 +511,7 @@ class _ObjectMetadata implements ObjectMetadata { /// It provides a StreamSink and logic which selects whether to use normal /// media upload (multipart mime) or resumable media upload. class _MediaUploadStreamSink implements StreamSink> { - static const int _DEFAULT_MAX_NORMAL_UPLOAD_LENGTH = 1024 * 1024; + static const int _defaultMaxNormalUploadLength = 1024 * 1024; final storage_api.StorageApi _api; final String _bucketName; final String _objectName; @@ -526,25 +526,25 @@ class _MediaUploadStreamSink implements StreamSink> { late StreamController> _resumableController; final _doneCompleter = Completer(); - static const int _STATE_LENGTH_KNOWN = 0; - static const int _STATE_PROBING_LENGTH = 1; - static const int _STATE_DECIDED_RESUMABLE = 2; + static const int _stateLengthKnown = 0; + static const int _stateProbingLength = 1; + static const int _stateDecidedResumable = 2; int? _state; _MediaUploadStreamSink(this._api, this._bucketName, this._objectName, this._object, this._predefinedAcl, this._length, - [this._maxNormalUploadLength = _DEFAULT_MAX_NORMAL_UPLOAD_LENGTH]) { + [this._maxNormalUploadLength = _defaultMaxNormalUploadLength]) { if (_length != null) { // If the length is known in advance decide on the upload strategy // immediately - _state = _STATE_LENGTH_KNOWN; + _state = _stateLengthKnown; if (_length! <= _maxNormalUploadLength) { _startNormalUpload(_controller.stream, _length); } else { _startResumableUpload(_controller.stream, _length); } } else { - _state = _STATE_PROBING_LENGTH; + _state = _stateProbingLength; // If the length is not known in advance decide on the upload strategy // later. Start buffering until enough data has been read to decide. _subscription = _controller.stream @@ -577,8 +577,8 @@ class _MediaUploadStreamSink implements StreamSink> { Future get done => _doneCompleter.future; void _onData(List data) { - assert(_state != _STATE_LENGTH_KNOWN); - if (_state == _STATE_PROBING_LENGTH) { + assert(_state != _stateLengthKnown); + if (_state == _stateProbingLength) { buffer.add(data); _bufferLength += data.length; if (_bufferLength > _maxNormalUploadLength) { @@ -587,16 +587,16 @@ class _MediaUploadStreamSink implements StreamSink> { _resumableController = StreamController>(sync: true); buffer.forEach(_resumableController.add); _startResumableUpload(_resumableController.stream, _length); - _state = _STATE_DECIDED_RESUMABLE; + _state = _stateDecidedResumable; } } else { - assert(_state == _STATE_DECIDED_RESUMABLE); + assert(_state == _stateDecidedResumable); _resumableController.add(data); } } void _onDone() { - if (_state == _STATE_PROBING_LENGTH) { + if (_state == _stateProbingLength) { // As the data is already cached don't bother to wait on somebody // listening on the stream before adding the data. _startNormalUpload(Stream>.fromIterable(buffer), _bufferLength); @@ -608,7 +608,7 @@ class _MediaUploadStreamSink implements StreamSink> { void _onError(Object e, StackTrace s) { // If still deciding on the strategy complete with error. Otherwise // forward the error for default processing. - if (_state == _STATE_PROBING_LENGTH) { + if (_state == _stateProbingLength) { _completeError(e, s); } else { _resumableController.addError(e, s); @@ -616,7 +616,7 @@ class _MediaUploadStreamSink implements StreamSink> { } void _completeError(Object e, StackTrace s) { - if (_state != _STATE_LENGTH_KNOWN) { + if (_state != _stateLengthKnown) { // Always cancel subscription on error. _subscription.cancel(); } diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index caa00cfc..4f6e4461 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -87,17 +87,17 @@ void registerStorageService(Storage storage) { } int _jenkinsHash(List e) { - const _HASH_MASK = 0x3fffffff; + const _hashMask = 0x3fffffff; var hash = 0; for (var i = 0; i < e.length; i++) { var c = e[i].hashCode; - hash = (hash + c) & _HASH_MASK; - hash = (hash + (hash << 10)) & _HASH_MASK; + hash = (hash + c) & _hashMask; + hash = (hash + (hash << 10)) & _hashMask; hash ^= (hash >> 6); } - hash = (hash + (hash << 3)) & _HASH_MASK; + hash = (hash + (hash << 3)) & _hashMask; hash ^= (hash >> 11); - hash = (hash + (hash << 15)) & _HASH_MASK; + hash = (hash + (hash << 15)) & _hashMask; return hash; } @@ -247,28 +247,28 @@ class AclEntry { /// See https://cloud.google.com/storage/docs/accesscontrol for more details. abstract class AclScope { /// ACL type for scope representing a Google Storage id. - static const int _TYPE_STORAGE_ID = 0; + static const int _typeStorageId = 0; /// ACL type for scope representing a project entity. - static const int _TYPE_PROJECT = 1; + static const int _typeProject = 1; /// ACL type for scope representing an account holder. - static const int _TYPE_ACCOUNT = 2; + static const int _typeAccount = 2; /// ACL type for scope representing a group. - static const int _TYPE_GROUP = 3; + static const int _typeGroup = 3; /// ACL type for scope representing a domain. - static const int _TYPE_DOMAIN = 4; + static const int _typeDomain = 4; /// ACL type for scope representing all authenticated users. - static const int _TYPE_ALL_AUTHENTICATED = 5; + static const int _typeAllAuthenticated = 5; /// ACL type for scope representing all users. - static const int _TYPE_ALL_USERS = 6; + static const int _typeAllUsers = 6; /// ACL type for scope representing an unsupported scope. - static const int _TYPE_OPAQUE = 7; + static const int _typeOpaque = 7; /// The id of the actual entity this ACL scope represents. The actual values /// are set in the different subclasses. @@ -305,7 +305,7 @@ abstract class AclScope { /// specific Google account holder or a specific Google group. class StorageIdScope extends AclScope { StorageIdScope(String storageId) - : super._(AclScope._TYPE_STORAGE_ID, storageId); + : super._(AclScope._typeStorageId, storageId); /// Google Storage ID. String get storageId => _id; @@ -316,7 +316,7 @@ class StorageIdScope extends AclScope { /// An ACL scope for an entity identified by an individual email address. class AccountScope extends AclScope { - AccountScope(String email) : super._(AclScope._TYPE_ACCOUNT, email); + AccountScope(String email) : super._(AclScope._typeAccount, email); /// Email address. String get email => _id; @@ -327,7 +327,7 @@ class AccountScope extends AclScope { /// An ACL scope for an entity identified by an Google Groups email. class GroupScope extends AclScope { - GroupScope(String group) : super._(AclScope._TYPE_GROUP, group); + GroupScope(String group) : super._(AclScope._typeGroup, group); /// Group name. String get group => _id; @@ -338,7 +338,7 @@ class GroupScope extends AclScope { /// An ACL scope for an entity identified by a domain name. class DomainScope extends AclScope { - DomainScope(String domain) : super._(AclScope._TYPE_DOMAIN, domain); + DomainScope(String domain) : super._(AclScope._typeDomain, domain); /// Domain name. String get domain => _id; @@ -355,7 +355,7 @@ class ProjectScope extends AclScope { final String role; ProjectScope(String project, this.role) - : super._(AclScope._TYPE_PROJECT, project); + : super._(AclScope._typeProject, project); /// Project ID. String get project => _id; @@ -366,7 +366,7 @@ class ProjectScope extends AclScope { /// An ACL scope for an unsupported scope. class OpaqueScope extends AclScope { - OpaqueScope(String id) : super._(AclScope._TYPE_OPAQUE, id); + OpaqueScope(String id) : super._(AclScope._typeOpaque, id); @override String get _storageEntity => _id; @@ -374,8 +374,7 @@ class OpaqueScope extends AclScope { /// ACL scope for a all authenticated users. class AllAuthenticatedScope extends AclScope { - AllAuthenticatedScope() - : super._(AclScope._TYPE_ALL_AUTHENTICATED, 'invalid'); + AllAuthenticatedScope() : super._(AclScope._typeAllAuthenticated, 'invalid'); @override String get _storageEntity => 'allAuthenticatedUsers'; @@ -383,7 +382,7 @@ class AllAuthenticatedScope extends AclScope { /// ACL scope for a all users. class AllUsersScope extends AclScope { - AllUsersScope() : super._(AclScope._TYPE_ALL_USERS, 'invalid'); + AllUsersScope() : super._(AclScope._typeAllUsers, 'invalid'); @override String get _storageEntity => 'allUsers'; @@ -392,16 +391,19 @@ class AllUsersScope extends AclScope { /// Permissions for individual scopes in an ACL. class AclPermission { /// Provide read access. + // ignore: constant_identifier_names static const READ = AclPermission._('READER'); /// Provide write access. /// /// For objects this permission is the same as [FULL_CONTROL]. + // ignore: constant_identifier_names static const WRITE = AclPermission._('WRITER'); /// Provide full control. /// /// For objects this permission is the same as [WRITE]. + // ignore: constant_identifier_names static const FULL_CONTROL = AclPermission._('OWNER'); final String _id; @@ -493,6 +495,7 @@ abstract class BucketInfo { /// Access to Cloud Storage abstract class Storage { /// List of required OAuth2 scopes for Cloud Storage operation. + // ignore: constant_identifier_names static const List SCOPES = [ storage_api.StorageApi.devstorageFullControlScope ]; diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index ac5a565f..b6f62b64 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.8.3 +version: 0.8.4-dev description: >- High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. homepage: https://github.com/dart-lang/gcloud @@ -15,6 +15,6 @@ dependencies: dev_dependencies: googleapis_auth: ^1.1.0 http_parser: ^4.0.0 + lints: ^1.0.0 mime: ^1.0.0 - pedantic: ^1.11.0 test: ^1.17.5 diff --git a/pkgs/gcloud/test/common.dart b/pkgs/gcloud/test/common.dart index 0e1e5c14..c4af9317 100644 --- a/pkgs/gcloud/test/common.dart +++ b/pkgs/gcloud/test/common.dart @@ -11,9 +11,9 @@ import 'package:http_parser/http_parser.dart' as http_parser; import 'package:mime/mime.dart' as mime; import 'package:test/test.dart'; -const CONTENT_TYPE_JSON_UTF8 = 'application/json; charset=utf-8'; +const _contentTypeJsonUtf8 = 'application/json; charset=utf-8'; -const RESPONSE_HEADERS = {'content-type': CONTENT_TYPE_JSON_UTF8}; +const _responseHeaders = {'content-type': _contentTypeJsonUtf8}; class MockClient extends http.BaseClient { static const bytes = [1, 2, 3, 4, 5]; @@ -27,10 +27,8 @@ class MockClient extends http.BaseClient { Map> mocks = {}; late http_testing.MockClient client; - MockClient(String hostname, String rootPath) - : hostname = hostname, - rootPath = rootPath, - rootUri = Uri.parse('https://$hostname$rootPath') { + MockClient(this.hostname, this.rootPath) + : rootUri = Uri.parse('https://$hostname$rootPath') { client = http_testing.MockClient(handler); } @@ -91,15 +89,15 @@ class MockClient extends http.BaseClient { Future respond(response) { return Future.value(http.Response(jsonEncode(response.toJson()), 200, - headers: RESPONSE_HEADERS)); + headers: _responseHeaders)); } Future respondEmpty() { - return Future.value(http.Response('{}', 200, headers: RESPONSE_HEADERS)); + return Future.value(http.Response('{}', 200, headers: _responseHeaders)); } Future respondInitiateResumableUpload(project) { - final headers = Map.from(RESPONSE_HEADERS); + final headers = Map.from(_responseHeaders); headers['location'] = 'https://$hostname/resumable/upload$rootPath' 'b/$project/o?uploadType=resumable&alt=json&' 'upload_id=AEnB2UqucpaWy7d5cr5iVQzmbQcQlLDIKiClrm0SAX3rJ7UN' @@ -108,14 +106,14 @@ class MockClient extends http.BaseClient { } Future respondContinueResumableUpload() { - return Future.value(http.Response('', 308, headers: RESPONSE_HEADERS)); + return Future.value(http.Response('', 308, headers: _responseHeaders)); } Future respondBytes(http.Request request) async { expect(request.url.queryParameters['alt'], 'media'); var myBytes = bytes; - var headers = Map.from(RESPONSE_HEADERS); + var headers = Map.from(_responseHeaders); var range = request.headers['range']; if (range != null) { @@ -137,7 +135,7 @@ class MockClient extends http.BaseClient { 'error': {'code': statusCode, 'message': 'error'} }; return Future.value(http.Response(jsonEncode(error), statusCode, - headers: RESPONSE_HEADERS)); + headers: _responseHeaders)); } Future processNormalMediaUpload(http.Request request) { diff --git a/pkgs/gcloud/test/common_e2e.dart b/pkgs/gcloud/test/common_e2e.dart index 586b1fce..ed35e482 100644 --- a/pkgs/gcloud/test/common_e2e.dart +++ b/pkgs/gcloud/test/common_e2e.dart @@ -12,11 +12,11 @@ import 'package:http/http.dart' as http; import 'common.dart'; -const PROJECT = 'test-project'; +const testProject = 'test-project'; // Environment variables for specifying the cloud project to use and the // location of the service account key for that project. -const String PROJECT_ENV = 'GCLOUD_E2E_TEST_PROJECT'; +const projectEnv = 'GCLOUD_E2E_TEST_PROJECT'; // Used for storage e2e tests: // @@ -26,17 +26,17 @@ const String PROJECT_ENV = 'GCLOUD_E2E_TEST_PROJECT'; // // So this can make tests flaky. The following delay is introduced as an // attempt to account for that. -const STORAGE_LIST_DELAY = Duration(seconds: 5); +const storageListDelay = Duration(seconds: 5); Future withAuthClient( List scopes, Future Function(String project, http.Client client) callback, { bool trace = false, }) async { - var project = Platform.environment[PROJECT_ENV]; + var project = Platform.environment[projectEnv]; if (project == null) { - throw StateError('Environment variables $PROJECT_ENV '); + throw StateError('Environment variables $projectEnv '); } http.Client client = await auth.clientViaApplicationDefaultCredentials( diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index de134b01..0bc983e2 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -221,7 +221,9 @@ void runTests(Datastore datastore, String? namespace) { return testInsert(unnamedEntities5, transactional: false).then((keys) { return delete(keys).then((_) { return lookup(keys).then((List entities) { - entities.forEach((Entity? e) => expect(e, isNull)); + for (var e in entities) { + expect(e, isNull); + } }); }); }); @@ -231,7 +233,9 @@ void runTests(Datastore datastore, String? namespace) { return testInsert(unnamedEntities1, transactional: true).then((keys) { return delete(keys).then((_) { return lookup(keys).then((List entities) { - entities.forEach((Entity? e) => expect(e, isNull)); + for (var e in entities) { + expect(e, isNull); + } }); }); }); @@ -242,7 +246,9 @@ void runTests(Datastore datastore, String? namespace) { .then((keys) { return delete(keys).then((_) { return lookup(keys).then((List entities) { - entities.forEach((Entity? e) => expect(e, isNull)); + for (var e in entities) { + expect(e, isNull); + } }); }); }); @@ -356,7 +362,9 @@ void runTests(Datastore datastore, String? namespace) { test('lookup', () { return insert([], unnamedEntities20, transactional: false).then((keys) { - keys.forEach((key) => expect(isValidKey(key), isTrue)); + for (var key in keys) { + expect(isValidKey(key), isTrue); + } return testLookup(keys, unnamedEntities20).then((_) { return delete(keys, transactional: false); }); @@ -375,7 +383,9 @@ void runTests(Datastore datastore, String? namespace) { test('lookup_transactional', () { return insert([], unnamedEntities1).then((keys) { - keys.forEach((key) => expect(isValidKey(key), isTrue)); + for (var key in keys) { + expect(isValidKey(key), isTrue); + } return testLookup(keys, unnamedEntities1, transactional: true) .then((_) => delete(keys)); }); @@ -383,7 +393,9 @@ void runTests(Datastore datastore, String? namespace) { test('lookup_transactional_xg', () { return insert([], unnamedEntities5).then((keys) { - keys.forEach((key) => expect(isValidKey(key), isTrue)); + for (var key in keys) { + expect(isValidKey(key), isTrue); + } return testLookup(keys, unnamedEntities5, transactional: true, xg: true) .then((_) { @@ -417,12 +429,18 @@ void runTests(Datastore datastore, String? namespace) { test('delete', () { return insert([], unnamedEntities99, transactional: false).then((keys) { - keys.forEach((key) => expect(isValidKey(key), isTrue)); + for (var key in keys) { + expect(isValidKey(key), isTrue); + } return lookup(keys, transactional: false).then((entities) { - entities.forEach((e) => expect(e, isNotNull)); + for (var e in entities) { + expect(e, isNotNull); + } return testDelete(keys).then((_) { return lookup(keys, transactional: false).then((entities) { - entities.forEach((e) => expect(e, isNull)); + for (var e in entities) { + expect(e, isNull); + } }); }); }); @@ -433,12 +451,18 @@ void runTests(Datastore datastore, String? namespace) { // FIXME TODO FIXME : look into this. test('delete_transactional', () { return insert([], unnamedEntities99, transactional: false).then((keys) { - keys.forEach((key) => expect(isValidKey(key), isTrue)); + for (var key in keys) { + expect(isValidKey(key), isTrue); + } return lookup(keys, transactional: false).then((entities) { - entities.forEach((e) => expect(e, isNotNull)); + for (var e in entities) { + expect(e, isNotNull); + } return testDelete(keys, transactional: true).then((_) { return lookup(keys, transactional: false).then((entities) { - entities.forEach((e) => expect(e, isNull)); + for (var e in entities) { + expect(e, isNull); + } }); }); }); @@ -447,14 +471,20 @@ void runTests(Datastore datastore, String? namespace) { test('delete_transactional_xg', () { return insert([], unnamedEntities99, transactional: false).then((keys) { - keys.forEach((key) => expect(isValidKey(key), isTrue)); + for (var key in keys) { + expect(isValidKey(key), isTrue); + } return lookup(keys, transactional: false).then((entities) { expect(entities.length, equals(unnamedEntities99.length)); - entities.forEach((e) => expect(e, isNotNull)); + for (var e in entities) { + expect(e, isNotNull); + } return testDelete(keys, transactional: true, xg: true).then((_) { return lookup(keys, transactional: false).then((entities) { expect(entities.length, equals(unnamedEntities99.length)); - entities.forEach((e) => expect(e, isNull)); + for (var e in entities) { + expect(e, isNull); + } }); }); }); @@ -568,11 +598,11 @@ void runTests(Datastore datastore, String? namespace) { return insert(entities, [], transactional: true).then((_) { var keys = entities.map((e) => e.key).toList(); - var NUM_TRANSACTIONS = 10; + var numTransactions = 10; // Start transactions var transactions = >[]; - for (var i = 0; i < NUM_TRANSACTIONS; i++) { + for (var i = 0; i < numTransactions; i++) { transactions.add(datastore.beginTransaction(crossEntityGroup: xg)); } return Future.wait(transactions) @@ -713,38 +743,40 @@ void runTests(Datastore datastore, String? namespace) { return Future.forEach(queryTests, (dynamic f) => f()); } - const TEST_QUERY_KIND = 'TestQueryKind'; + const testQueryKind = 'TestQueryKind'; var stringNamedEntities = buildEntities(1, 6, idFunction: (i) => 'str$i', - kind: TEST_QUERY_KIND, + kind: testQueryKind, partition: partition); var stringNamedKeys = stringNamedEntities.map((e) => e.key).toList(); - var QUERY_KEY = TEST_PROPERTY_KEY_PREFIX; - var QUERY_UPPER_BOUND = '${TEST_PROPERTY_VALUE_PREFIX}4'; - var QUERY_LOWER_BOUND = '${TEST_PROPERTY_VALUE_PREFIX}1'; - var QUERY_LIST_ENTRY = '${TEST_LIST_VALUE}2'; - var QUERY_INDEX_VALUE = '${TEST_INDEXED_PROPERTY_VALUE_PREFIX}1'; + var queryKey = testPropertyKeyPrefix; + var queryUpperbound = '${testPropertyValuePrefix}4'; + var queryLowerBound = '${testPropertyValuePrefix}1'; + var queryListEntry = '${testListValue}2'; + var queryIndexValue = '${testIndexedPropertyValuePrefix}1'; - var reverseOrderFunction = (Entity a, Entity b) { + reverseOrderFunction(Entity a, Entity b) { // Reverse the order return -1 * - (a.properties[QUERY_KEY] as String) - .compareTo(b.properties[QUERY_KEY].toString()); - }; - - var filterFunction = (Entity entity) { - var value = entity.properties[QUERY_KEY] as String; - return value.compareTo(QUERY_UPPER_BOUND) == -1 && - value.compareTo(QUERY_LOWER_BOUND) == 1; - }; - var listFilterFunction = (Entity entity) { - var values = entity.properties[TEST_LIST_PROPERTY] as List; - return values.contains(QUERY_LIST_ENTRY); - }; - var indexFilterMatches = (Entity entity) { - return entity.properties[TEST_INDEXED_PROPERTY] == QUERY_INDEX_VALUE; - }; + (a.properties[queryKey] as String) + .compareTo(b.properties[queryKey].toString()); + } + + filterFunction(Entity entity) { + var value = entity.properties[queryKey] as String; + return value.compareTo(queryUpperbound) == -1 && + value.compareTo(queryLowerBound) == 1; + } + + listFilterFunction(Entity entity) { + var values = entity.properties[testListProperty] as List; + return values.contains(queryListEntry); + } + + indexFilterMatches(Entity entity) { + return entity.properties[testIndexedProperty] == queryIndexValue; + } var sorted = stringNamedEntities.toList()..sort(reverseOrderFunction); var filtered = stringNamedEntities.where(filterFunction).toList(); @@ -754,22 +786,22 @@ void runTests(Datastore datastore, String? namespace) { assert(indexedEntity.length == 1); var filters = [ - Filter(FilterRelation.GreatherThan, QUERY_KEY, QUERY_LOWER_BOUND), - Filter(FilterRelation.LessThan, QUERY_KEY, QUERY_UPPER_BOUND), + Filter(FilterRelation.GreatherThan, queryKey, queryLowerBound), + Filter(FilterRelation.LessThan, queryKey, queryUpperbound), ]; var listFilters = [ - Filter(FilterRelation.Equal, TEST_LIST_PROPERTY, QUERY_LIST_ENTRY) + Filter(FilterRelation.Equal, testListProperty, queryListEntry) ]; var indexedPropertyFilter = [ - Filter(FilterRelation.Equal, TEST_INDEXED_PROPERTY, QUERY_INDEX_VALUE), - Filter(FilterRelation.Equal, TEST_BLOB_INDEXED_PROPERTY, - TEST_BLOB_INDEXED_VALUE) + Filter(FilterRelation.Equal, testIndexedProperty, queryIndexValue), + Filter( + FilterRelation.Equal, testBlobIndexedProperty, testBlobIndexedValue) ]; var unIndexedPropertyFilter = [ - Filter(FilterRelation.Equal, TEST_UNINDEXED_PROPERTY, QUERY_INDEX_VALUE) + Filter(FilterRelation.Equal, testUnindexedProperty, queryIndexValue) ]; - var orders = [Order(OrderDirection.Decending, QUERY_KEY)]; + var orders = [Order(OrderDirection.Decending, queryKey)]; test('query', () { return insert(stringNamedEntities, []).then((keys) { @@ -777,75 +809,75 @@ void runTests(Datastore datastore, String? namespace) { .then((_) { var tests = [ // EntityKind query - () => testQueryAndCompare(TEST_QUERY_KIND, stringNamedEntities, + () => testQueryAndCompare(testQueryKind, stringNamedEntities, transactional: false, correctOrder: false), - () => testQueryAndCompare(TEST_QUERY_KIND, stringNamedEntities, + () => testQueryAndCompare(testQueryKind, stringNamedEntities, transactional: true, correctOrder: false), - () => testQueryAndCompare(TEST_QUERY_KIND, stringNamedEntities, + () => testQueryAndCompare(testQueryKind, stringNamedEntities, transactional: true, correctOrder: false, xg: true), // EntityKind query with order - () => testQueryAndCompare(TEST_QUERY_KIND, sorted, + () => testQueryAndCompare(testQueryKind, sorted, transactional: false, orders: orders), - () => testQueryAndCompare(TEST_QUERY_KIND, sorted, + () => testQueryAndCompare(testQueryKind, sorted, transactional: true, orders: orders), - () => testQueryAndCompare(TEST_QUERY_KIND, sorted, + () => testQueryAndCompare(testQueryKind, sorted, transactional: false, xg: true, orders: orders), // EntityKind query with filter - () => testQueryAndCompare(TEST_QUERY_KIND, filtered, + () => testQueryAndCompare(testQueryKind, filtered, transactional: false, filters: filters), - () => testQueryAndCompare(TEST_QUERY_KIND, filtered, + () => testQueryAndCompare(testQueryKind, filtered, transactional: true, filters: filters), - () => testQueryAndCompare(TEST_QUERY_KIND, filtered, + () => testQueryAndCompare(testQueryKind, filtered, transactional: false, xg: true, filters: filters), // EntityKind query with filter + order - () => testQueryAndCompare(TEST_QUERY_KIND, sortedAndFiltered, + () => testQueryAndCompare(testQueryKind, sortedAndFiltered, transactional: false, filters: filters, orders: orders), - () => testQueryAndCompare(TEST_QUERY_KIND, sortedAndFiltered, + () => testQueryAndCompare(testQueryKind, sortedAndFiltered, transactional: true, filters: filters, orders: orders), - () => testQueryAndCompare(TEST_QUERY_KIND, sortedAndFiltered, + () => testQueryAndCompare(testQueryKind, sortedAndFiltered, transactional: false, xg: true, filters: filters, orders: orders), // EntityKind query with IN filter + order - () => testQueryAndCompare(TEST_QUERY_KIND, sortedAndListFiltered, + () => testQueryAndCompare(testQueryKind, sortedAndListFiltered, transactional: false, filters: listFilters, orders: orders), - () => testQueryAndCompare(TEST_QUERY_KIND, sortedAndListFiltered, + () => testQueryAndCompare(testQueryKind, sortedAndListFiltered, transactional: true, filters: listFilters, orders: orders), - () => testQueryAndCompare(TEST_QUERY_KIND, sortedAndListFiltered, + () => testQueryAndCompare(testQueryKind, sortedAndListFiltered, transactional: false, xg: true, filters: listFilters, orders: orders), // Limit & Offset test - () => testOffsetLimitQuery(TEST_QUERY_KIND, sorted, + () => testOffsetLimitQuery(testQueryKind, sorted, transactional: false, orders: orders), - () => testOffsetLimitQuery(TEST_QUERY_KIND, sorted, + () => testOffsetLimitQuery(testQueryKind, sorted, transactional: true, orders: orders), - () => testOffsetLimitQuery(TEST_QUERY_KIND, sorted, + () => testOffsetLimitQuery(testQueryKind, sorted, transactional: false, xg: true, orders: orders), // Query for indexed property - () => testQueryAndCompare(TEST_QUERY_KIND, indexedEntity, + () => testQueryAndCompare(testQueryKind, indexedEntity, transactional: false, filters: indexedPropertyFilter), - () => testQueryAndCompare(TEST_QUERY_KIND, indexedEntity, + () => testQueryAndCompare(testQueryKind, indexedEntity, transactional: true, filters: indexedPropertyFilter), - () => testQueryAndCompare(TEST_QUERY_KIND, indexedEntity, + () => testQueryAndCompare(testQueryKind, indexedEntity, transactional: false, xg: true, filters: indexedPropertyFilter), // Query for un-indexed property - () => testQueryAndCompare(TEST_QUERY_KIND, [], + () => testQueryAndCompare(testQueryKind, [], transactional: false, filters: unIndexedPropertyFilter), - () => testQueryAndCompare(TEST_QUERY_KIND, [], + () => testQueryAndCompare(testQueryKind, [], transactional: true, filters: unIndexedPropertyFilter), - () => testQueryAndCompare(TEST_QUERY_KIND, [], + () => testQueryAndCompare(testQueryKind, [], transactional: false, xg: true, filters: unIndexedPropertyFilter), @@ -858,13 +890,12 @@ void runTests(Datastore datastore, String? namespace) { waitUntilEntitiesGone(datastore, stringNamedKeys, partition), // Make sure queries don't return results - () => testQueryAndCompare(TEST_QUERY_KIND, [], - transactional: false), () => - testQueryAndCompare(TEST_QUERY_KIND, [], transactional: true), - () => testQueryAndCompare(TEST_QUERY_KIND, [], + testQueryAndCompare(testQueryKind, [], transactional: false), + () => testQueryAndCompare(testQueryKind, [], transactional: true), + () => testQueryAndCompare(testQueryKind, [], transactional: true, xg: true), - () => testQueryAndCompare(TEST_QUERY_KIND, [], + () => testQueryAndCompare(testQueryKind, [], transactional: false, filters: filters, orders: orders), ]; return Future.forEach(tests, (dynamic f) => f()); @@ -1109,7 +1140,7 @@ Future main() async { late Datastore datastore; late Client client; - var scopes = datastore_impl.DatastoreImpl.SCOPES; + var scopes = datastore_impl.DatastoreImpl.scopes; await withAuthClient(scopes, (String project, Client httpClient) { datastore = datastore_impl.DatastoreImpl(httpClient, project); client = httpClient; diff --git a/pkgs/gcloud/test/datastore/e2e/utils.dart b/pkgs/gcloud/test/datastore/e2e/utils.dart index 6f722c15..f5dfb106 100644 --- a/pkgs/gcloud/test/datastore/e2e/utils.dart +++ b/pkgs/gcloud/test/datastore/e2e/utils.dart @@ -6,22 +6,22 @@ library raw_datastore_test_utils; import 'package:gcloud/datastore.dart'; -const TEST_KIND = 'TestKind'; -const TEST_PROPERTY_KEY_PREFIX = 'test_property'; -const TEST_LIST_PROPERTY = 'listproperty'; -const TEST_LIST_VALUE = 'listvalue'; -const TEST_PROPERTY_VALUE_PREFIX = 'test_property'; +const _testKind = 'TestKind'; +const testPropertyKeyPrefix = 'test_property'; +const testListProperty = 'listproperty'; +const testListValue = 'listvalue'; +const testPropertyValuePrefix = 'test_property'; -const TEST_INDEXED_PROPERTY = 'indexedProp'; -const TEST_INDEXED_PROPERTY_VALUE_PREFIX = 'indexedValue'; -const TEST_UNINDEXED_PROPERTY = 'unindexedProp'; -const TEST_BLOB_INDEXED_PROPERTY = 'blobPropertyIndexed'; -final TEST_BLOB_INDEXED_VALUE = BlobValue([0xaa, 0xaa, 0xff, 0xff]); +const testIndexedProperty = 'indexedProp'; +const testIndexedPropertyValuePrefix = 'indexedValue'; +const testUnindexedProperty = 'unindexedProp'; +const testBlobIndexedProperty = 'blobPropertyIndexed'; +final testBlobIndexedValue = BlobValue([0xaa, 0xaa, 0xff, 0xff]); Key buildKey( int i, { Object Function(int)? idFunction, - String kind = TEST_KIND, + String kind = _testKind, Partition? p, }) { var path = [KeyElement(kind, idFunction == null ? null : idFunction(i))]; @@ -31,15 +31,15 @@ Key buildKey( Map buildProperties(int i) { var listValues = [ 'foo', - '$TEST_LIST_VALUE$i', + '$testListValue$i', ]; return { - TEST_PROPERTY_KEY_PREFIX: '$TEST_PROPERTY_VALUE_PREFIX$i', - TEST_LIST_PROPERTY: listValues, - TEST_INDEXED_PROPERTY: '$TEST_INDEXED_PROPERTY_VALUE_PREFIX$i', - TEST_UNINDEXED_PROPERTY: '$TEST_INDEXED_PROPERTY_VALUE_PREFIX$i', - TEST_BLOB_INDEXED_PROPERTY: TEST_BLOB_INDEXED_VALUE, + testPropertyKeyPrefix: '$testPropertyValuePrefix$i', + testListProperty: listValues, + testIndexedProperty: '$testIndexedPropertyValuePrefix$i', + testUnindexedProperty: '$testIndexedPropertyValuePrefix$i', + testBlobIndexedProperty: testBlobIndexedValue, }; } @@ -47,7 +47,7 @@ List buildKeys( int from, int to, { Object Function(int)? idFunction, - String kind = TEST_KIND, + String kind = _testKind, Partition? partition, }) { var keys = []; @@ -61,7 +61,7 @@ List buildEntities( int from, int to, { Object Function(int)? idFunction, - String kind = TEST_KIND, + String kind = _testKind, Partition? partition, }) { var entities = []; @@ -69,7 +69,7 @@ List buildEntities( for (var i = from; i < to; i++) { var key = buildKey(i, idFunction: idFunction, kind: kind, p: partition); var properties = buildProperties(i); - unIndexedProperties.add(TEST_UNINDEXED_PROPERTY); + unIndexedProperties.add(testUnindexedProperty); entities .add(Entity(key, properties, unIndexedProperties: unIndexedProperties)); } @@ -77,7 +77,7 @@ List buildEntities( } List buildEntityWithAllProperties(int from, int to, - {String kind = TEST_KIND, Partition? partition}) { + {String kind = _testKind, Partition? partition}) { var us42 = const Duration(microseconds: 42); var unIndexed = {'blobProperty'}; diff --git a/pkgs/gcloud/test/db/e2e/db_test_impl.dart b/pkgs/gcloud/test/db/e2e/db_test_impl.dart index 84839066..9e55c54e 100644 --- a/pkgs/gcloud/test/db/e2e/db_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/db_test_impl.dart @@ -43,7 +43,6 @@ library db_test; /// $ gcloud datastore create-indexes index.yaml /// /// Now, wait for indexing done - import 'dart:async'; import 'package:gcloud/db.dart' as db; @@ -66,6 +65,7 @@ class Person extends db.Model { db.Key? wife; @override + // ignore: hash_and_equals bool operator ==(Object other) => sameAs(other); bool sameAs(Object other) { @@ -92,6 +92,7 @@ class PersonStringId extends db.Model { db.Key? wife; @override + // ignore: hash_and_equals bool operator ==(Object other) => sameAs(other); bool sameAs(Object other) { @@ -151,6 +152,7 @@ class ExpandoPerson extends db.ExpandoModel { String? nickname; @override + // ignore: hash_and_equals bool operator ==(Object other) { if (other is ExpandoPerson && id == other.id && name == other.name) { if (additionalProperties.length != other.additionalProperties.length) { @@ -457,7 +459,7 @@ void runTests(db.DatastoreDB store, String? namespace) { expandoPersons.add(expandoPerson as ExpandoPerson); } - var LOWER_BOUND = 'user2'; + var lowerBound = 'user2'; var usersSortedNameDescNicknameAsc = List.from(users); usersSortedNameDescNicknameAsc.sort((User a, User b) { @@ -475,12 +477,12 @@ void runTests(db.DatastoreDB store, String? namespace) { var usersSortedAndFilteredNameDescNicknameAsc = usersSortedNameDescNicknameAsc.where((User u) { - return LOWER_BOUND.compareTo(u.name!) <= 0; + return lowerBound.compareTo(u.name!) <= 0; }).toList(); var usersSortedAndFilteredNameDescNicknameDesc = usersSortedNameDescNicknameDesc.where((User u) { - return LOWER_BOUND.compareTo(u.name!) <= 0; + return lowerBound.compareTo(u.name!) <= 0; }).toList(); var fooUsers = @@ -546,7 +548,7 @@ void runTests(db.DatastoreDB store, String? namespace) { // Sorted query with filter () async { var query = store.query(partition: partition) - ..filter('name >=', LOWER_BOUND) + ..filter('name >=', lowerBound) ..order('-name') ..order('nickname'); var models = await runQueryWithExponentialBackoff( @@ -555,7 +557,7 @@ void runTests(db.DatastoreDB store, String? namespace) { }, () async { var query = store.query(partition: partition) - ..filter('name >=', LOWER_BOUND) + ..filter('name >=', lowerBound) ..order('-name') ..order('-nickname') ..run(); @@ -726,7 +728,6 @@ Future waitUntilEntitiesHelper( } } } - return null; } } @@ -734,7 +735,7 @@ Future main() async { late db.DatastoreDB store; BaseClient? client; - var scopes = datastore_impl.DatastoreImpl.SCOPES; + var scopes = datastore_impl.DatastoreImpl.scopes; await withAuthClient(scopes, (String project, httpClient) { var datastore = datastore_impl.DatastoreImpl(httpClient, project); return datastore_test.cleanupDB(datastore, null).then((_) { diff --git a/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart b/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart index 98690ea7..d028614c 100644 --- a/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart +++ b/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart @@ -16,5 +16,6 @@ class A extends db.Model { class B extends A { @override @db.IntProperty(propertyName: 'bar') + // ignore: overridden_fields int? foo; } diff --git a/pkgs/gcloud/test/db/properties_test.dart b/pkgs/gcloud/test/db/properties_test.dart index 15be265b..61547876 100644 --- a/pkgs/gcloud/test/db/properties_test.dart +++ b/pkgs/gcloud/test/db/properties_test.dart @@ -245,6 +245,7 @@ class KeyMock implements Key { @override Key cast() => Key(parent, type, id as U); @override + // ignore: hash_and_equals int get hashCode => 1; } diff --git a/pkgs/gcloud/test/db_all_e2e_test.dart b/pkgs/gcloud/test/db_all_e2e_test.dart index 720cc8eb..239689f8 100644 --- a/pkgs/gcloud/test/db_all_e2e_test.dart +++ b/pkgs/gcloud/test/db_all_e2e_test.dart @@ -22,7 +22,7 @@ import 'db/e2e/db_test_impl.dart' as db_test; import 'db/e2e/metamodel_test_impl.dart' as db_metamodel_test; Future main() async { - var scopes = datastore_impl.DatastoreImpl.SCOPES; + var scopes = datastore_impl.DatastoreImpl.scopes; var now = DateTime.now().millisecondsSinceEpoch; var namespace = '${Platform.operatingSystem}$now'; diff --git a/pkgs/gcloud/test/pubsub/pubsub_test.dart b/pkgs/gcloud/test/pubsub/pubsub_test.dart index 5a9d54ef..2f73b98c 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_test.dart @@ -13,49 +13,49 @@ import 'package:test/test.dart'; import '../common.dart'; import '../common_e2e.dart'; -const String HOSTNAME = 'pubsub.googleapis.com'; -const String ROOT_PATH = '/v1/'; +const _hostName = 'pubsub.googleapis.com'; +const _rootPath = '/v1/'; -MockClient mockClient() => MockClient(HOSTNAME, ROOT_PATH); +MockClient mockClient() => MockClient(_hostName, _rootPath); void main() { group('api', () { var badTopicNames = [ 'projects/', 'projects/topics', - 'projects/$PROJECT', - 'projects/$PROJECT/', - 'projects/$PROJECT/topics', - 'projects/$PROJECT/topics/' + 'projects/$testProject', + 'projects/$testProject/', + 'projects/$testProject/topics', + 'projects/$testProject/topics/' ]; var badSubscriptionNames = [ 'projects/', 'projects/subscriptions', - 'projects/$PROJECT', - 'projects/$PROJECT/', - 'projects/$PROJECT/subscriptions', - 'projects/$PROJECT/subscriptions/' + 'projects/$testProject', + 'projects/$testProject/', + 'projects/$testProject/subscriptions', + 'projects/$testProject/subscriptions/' ]; group('topic', () { var name = 'test-topic'; - var absoluteName = 'projects/$PROJECT/topics/test-topic'; + var absoluteName = 'projects/$testProject/topics/test-topic'; test('create', () { var mock = mockClient(); mock.register( 'PUT', - 'projects/$PROJECT/topics/test-topic', + 'projects/$testProject/topics/test-topic', expectAsync1((http.Request request) { expect(request.body, '{}'); return mock.respond(pubsub.Topic()..name = absoluteName); }, count: 2)); - var api = PubSub(mock, PROJECT); + var api = PubSub(mock, testProject); return api.createTopic(name).then(expectAsync1((topic) { expect(topic.name, name); - expect(topic.project, PROJECT); + expect(topic.project, testProject); expect(topic.absoluteName, absoluteName); return api.createTopic(absoluteName).then(expectAsync1((topic) { expect(topic.name, name); @@ -66,13 +66,13 @@ void main() { test('create-error', () { var mock = mockClient(); - var api = PubSub(mock, PROJECT); - badTopicNames.forEach((name) { + var api = PubSub(mock, testProject); + for (var name in badTopicNames) { expect(() => api.createTopic(name), throwsArgumentError); - }); - badSubscriptionNames.forEach((name) { + } + for (var name in badSubscriptionNames) { expect(() => api.createTopic(name), throwsArgumentError); - }); + } }); test('delete', () { @@ -85,7 +85,7 @@ void main() { return mock.respondEmpty(); }, count: 2)); - var api = PubSub(mock, PROJECT); + var api = PubSub(mock, testProject); return api.deleteTopic(name).then(expectAsync1((result) { expect(result, isNull); return api.deleteTopic(absoluteName).then(expectAsync1((topic) { @@ -96,13 +96,13 @@ void main() { test('delete-error', () { var mock = mockClient(); - var api = PubSub(mock, PROJECT); - badTopicNames.forEach((name) { + var api = PubSub(mock, testProject); + for (var name in badTopicNames) { expect(() => api.deleteTopic(name), throwsArgumentError); - }); - badSubscriptionNames.forEach((name) { + } + for (var name in badSubscriptionNames) { expect(() => api.deleteTopic(name), throwsArgumentError); - }); + } }); test('lookup', () { @@ -115,10 +115,10 @@ void main() { return mock.respond(pubsub.Topic()..name = absoluteName); }, count: 2)); - var api = PubSub(mock, PROJECT); + var api = PubSub(mock, testProject); return api.lookupTopic(name).then(expectAsync1((topic) { expect(topic.name, name); - expect(topic.project, PROJECT); + expect(topic.project, testProject); expect(topic.absoluteName, absoluteName); return api.lookupTopic(absoluteName).then(expectAsync1((topic) { expect(topic.name, name); @@ -129,13 +129,13 @@ void main() { test('lookup-error', () { var mock = mockClient(); - var api = PubSub(mock, PROJECT); - badTopicNames.forEach((name) { + var api = PubSub(mock, testProject); + for (var name in badTopicNames) { expect(() => api.lookupTopic(name), throwsArgumentError); - }); - badSubscriptionNames.forEach((name) { + } + for (var name in badSubscriptionNames) { expect(() => api.lookupTopic(name), throwsArgumentError); - }); + } }); group('query', () { @@ -164,7 +164,7 @@ void main() { var pageCount = 0; mock.register( 'GET', - 'projects/$PROJECT/topics', + 'projects/$testProject/topics', expectAsync1((request) { pageCount++; expect(request.url.queryParameters['pageSize'], '$pageSize'); @@ -190,10 +190,10 @@ void main() { var mock = mockClient(); registerQueryMock(mock, count, 50); - var api = PubSub(mock, PROJECT); + var api = PubSub(mock, testProject); return api .listTopics() - .listen(expectAsync1((_) => null, count: count)) + .listen(expectAsync1((_) {}, count: count)) .asFuture(); } @@ -214,9 +214,9 @@ void main() { var mock = mockClient(); registerQueryMock(mock, 70, 50); - var api = PubSub(mock, PROJECT); - api.listTopics().listen(expectAsync1(((_) => null), count: 70), - onDone: expectAsync0(() => null)) + var api = PubSub(mock, testProject); + api.listTopics().listen(expectAsync1(((_) {}), count: 70), + onDone: expectAsync0(() {})) ..pause() ..resume() ..pause() @@ -227,9 +227,9 @@ void main() { var mock = mockClient(); registerQueryMock(mock, 70, 50); - var api = PubSub(mock, PROJECT); + var api = PubSub(mock, testProject); var count = 0; - var subscription; + late StreamSubscription subscription; subscription = api.listTopics().listen( expectAsync1(((_) { subscription @@ -241,18 +241,19 @@ void main() { } else { scheduleMicrotask(() => subscription.resume()); } - return null; + return; }), count: 70), - onDone: expectAsync0(() => null)) + onDone: expectAsync0(() {})) ..pause(); scheduleMicrotask(() => subscription.resume()); + addTearDown(() => subscription.cancel()); }); test('immediate-cancel', () { var mock = mockClient(); registerQueryMock(mock, 70, 50, 1); - var api = PubSub(mock, PROJECT); + var api = PubSub(mock, testProject); api .listTopics() .listen((_) => throw 'Unexpected', @@ -264,8 +265,8 @@ void main() { var mock = mockClient(); registerQueryMock(mock, 170, 50, 1); - var api = PubSub(mock, PROJECT); - var subscription; + var api = PubSub(mock, testProject); + late StreamSubscription subscription; subscription = api.listTopics().listen( expectAsync1((_) => subscription.cancel()), onDone: () => throw 'Unexpected'); @@ -275,20 +276,21 @@ void main() { void runTest(bool withPause) { // Test error on first GET request. var mock = mockClient(); - mock.register('GET', 'projects/$PROJECT/topics', + mock.register('GET', 'projects/$testProject/topics', expectAsync1((request) { return mock.respondError(500); })); - var api = PubSub(mock, PROJECT); - var subscription; + var api = PubSub(mock, testProject); + StreamSubscription subscription; subscription = api.listTopics().listen((_) => throw 'Unexpected', - onDone: expectAsync0(() => null), + onDone: expectAsync0(() {}), onError: expectAsync1((e) => e is pubsub.DetailedApiRequestError)); if (withPause) { subscription.pause(); scheduleMicrotask(() => subscription.resume()); } + addTearDown(() => subscription.cancel()); } runTest(false); @@ -301,29 +303,31 @@ void main() { var mock = mockClient(); registerQueryMock(mock, 51, 50, 1); - var api = PubSub(mock, PROJECT); + var api = PubSub(mock, testProject); var count = 0; - var subscription; + late StreamSubscription subscription; subscription = api.listTopics().listen( - expectAsync1(((_) { - count++; - if (count == 50) { - if (withPause) { - subscription.pause(); - scheduleMicrotask(() => subscription.resume()); + expectAsync1(((_) { + count++; + if (count == 50) { + if (withPause) { + subscription.pause(); + scheduleMicrotask(() => subscription.resume()); + } + mock.clear(); + mock.register('GET', 'projects/$testProject/topics', + expectAsync1((request) { + return mock.respondError(500); + })); } - mock.clear(); - mock.register('GET', 'projects/$PROJECT/topics', - expectAsync1((request) { - return mock.respondError(500); - })); - } - return null; - }), count: 50), - onDone: expectAsync0(() => null), - onError: - expectAsync1((e) => e is pubsub.DetailedApiRequestError)); + return; + }), count: 50), + onDone: expectAsync0(() {}), + onError: expectAsync1( + (e) => e is pubsub.DetailedApiRequestError), + ); + addTearDown(() => subscription.cancel()); } runTest(false); @@ -336,7 +340,7 @@ void main() { var mock = mockClient(); registerQueryMock(mock, 0, 50); - var api = PubSub(mock, PROJECT); + var api = PubSub(mock, testProject); return api.pageTopics().then(expectAsync1((page) { expect(page.items.length, 0); expect(page.isLast, isTrue); @@ -356,7 +360,7 @@ void main() { var mock = mockClient(); registerQueryMock(mock, 10, 50); - var api = PubSub(mock, PROJECT); + var api = PubSub(mock, testProject); return api.pageTopics().then(expectAsync1((page) { expect(page.items.length, 10); expect(page.isLast, isTrue); @@ -397,7 +401,7 @@ void main() { } } - var api = PubSub(mock, PROJECT); + var api = PubSub(mock, testProject); api.pageTopics(pageSize: pageSize).then(expectAsync1(handlePage)); return completer.future; @@ -420,22 +424,23 @@ void main() { group('subscription', () { var name = 'test-subscription'; - var absoluteName = 'projects/$PROJECT/subscriptions/test-subscription'; + var absoluteName = + 'projects/$testProject/subscriptions/test-subscription'; var topicName = 'test-topic'; - var absoluteTopicName = 'projects/$PROJECT/topics/test-topic'; + var absoluteTopicName = 'projects/$testProject/topics/test-topic'; test('create', () { var mock = mockClient(); mock.register( 'PUT', - 'projects/$PROJECT/subscriptions', + 'projects/$testProject/subscriptions', expectAsync1((request) { var requestSubscription = jsonDecode(request.body) as Map; expect(requestSubscription['topic'], absoluteTopicName); return mock.respond(pubsub.Subscription()..name = absoluteName); }, count: 2)); - var api = PubSub(mock, PROJECT); + var api = PubSub(mock, testProject); return api .createSubscription(name, topicName) .then(expectAsync1((subscription) { @@ -445,7 +450,7 @@ void main() { .createSubscription(absoluteName, absoluteTopicName) .then(expectAsync1((subscription) { expect(subscription.name, name); - expect(subscription.project, PROJECT); + expect(subscription.project, testProject); expect(subscription.absoluteName, absoluteName); })); })); @@ -453,28 +458,28 @@ void main() { test('create-error', () { var mock = mockClient(); - var api = PubSub(mock, PROJECT); - badSubscriptionNames.forEach((name) { + var api = PubSub(mock, testProject); + for (var name in badSubscriptionNames) { expect(() => api.createSubscription(name, 'test-topic'), throwsArgumentError); - }); - badTopicNames.forEach((name) { + } + for (var name in badTopicNames) { expect(() => api.createSubscription('test-subscription', name), throwsArgumentError); - }); + } }); test('delete', () { var mock = mockClient(); mock.register( 'DELETE', - 'projects/$PROJECT/subscriptions', + 'projects/$testProject/subscriptions', expectAsync1((request) { expect(request.body.length, 0); return mock.respondEmpty(); }, count: 2)); - var api = PubSub(mock, PROJECT); + var api = PubSub(mock, testProject); return api.deleteSubscription(name).then(expectAsync1((result) { expect(result, isNull); return api @@ -487,26 +492,26 @@ void main() { test('delete-error', () { var mock = mockClient(); - var api = PubSub(mock, PROJECT); - badSubscriptionNames.forEach((name) { + var api = PubSub(mock, testProject); + for (var name in badSubscriptionNames) { expect(() => api.deleteSubscription(name), throwsArgumentError); - }); - badTopicNames.forEach((name) { + } + for (var name in badTopicNames) { expect(() => api.deleteSubscription(name), throwsArgumentError); - }); + } }); test('lookup', () { var mock = mockClient(); mock.register( 'GET', - RegExp('projects/$PROJECT/subscriptions'), + RegExp('projects/$testProject/subscriptions'), expectAsync1((request) { expect(request.body.length, 0); return mock.respond(pubsub.Subscription()..name = absoluteName); }, count: 2)); - var api = PubSub(mock, PROJECT); + var api = PubSub(mock, testProject); return api.lookupSubscription(name).then(expectAsync1((subscription) { expect(subscription.name, name); expect(subscription.absoluteName, absoluteName); @@ -514,7 +519,7 @@ void main() { .lookupSubscription(absoluteName) .then(expectAsync1((subscription) { expect(subscription.name, name); - expect(subscription.project, PROJECT); + expect(subscription.project, testProject); expect(subscription.absoluteName, absoluteName); })); })); @@ -522,13 +527,13 @@ void main() { test('lookup-error', () { var mock = mockClient(); - var api = PubSub(mock, PROJECT); - badSubscriptionNames.forEach((name) { + var api = PubSub(mock, testProject); + for (var name in badSubscriptionNames) { expect(() => api.lookupSubscription(name), throwsArgumentError); - }); - badTopicNames.forEach((name) { + } + for (var name in badTopicNames) { expect(() => api.lookupSubscription(name), throwsArgumentError); - }); + } }); group('query', () { @@ -554,7 +559,7 @@ void main() { var pageCount = 0; mock.register( 'GET', - 'projects/$PROJECT/subscriptions', + 'projects/$testProject/subscriptions', expectAsync1((request) { pageCount++; expect(request.url.queryParameters['pageSize'], '$pageSize'); @@ -581,11 +586,11 @@ void main() { var mock = mockClient(); registerQueryMock(mock, count, 50, topic: topic); - var api = PubSub(mock, PROJECT); + var api = PubSub(mock, testProject); return (topic == null ? api.listSubscriptions() : api.listSubscriptions(topic)) - .listen(expectAsync1((_) => null, count: count)) + .listen(expectAsync1((_) {}, count: count)) .asFuture(); } @@ -616,10 +621,9 @@ void main() { var mock = mockClient(); registerQueryMock(mock, 70, 50); - var api = PubSub(mock, PROJECT); - api.listSubscriptions().listen( - expectAsync1(((_) => null), count: 70), - onDone: expectAsync0(() => null)) + var api = PubSub(mock, testProject); + api.listSubscriptions().listen(expectAsync1(((_) {}), count: 70), + onDone: expectAsync0(() {})) ..pause() ..resume() ..pause() @@ -630,9 +634,9 @@ void main() { var mock = mockClient(); registerQueryMock(mock, 70, 50); - var api = PubSub(mock, PROJECT); + var api = PubSub(mock, testProject); var count = 0; - var subscription; + late StreamSubscription subscription; subscription = api.listSubscriptions().listen( expectAsync1(((_) { subscription @@ -644,18 +648,19 @@ void main() { } else { scheduleMicrotask(() => subscription.resume()); } - return null; + return; }), count: 70), - onDone: expectAsync0(() => null)) + onDone: expectAsync0(() {})) ..pause(); scheduleMicrotask(() => subscription.resume()); + addTearDown(() => subscription.cancel()); }); test('immediate-cancel', () { var mock = mockClient(); registerQueryMock(mock, 70, 50, totalCalls: 1); - var api = PubSub(mock, PROJECT); + var api = PubSub(mock, testProject); api .listSubscriptions() .listen((_) => throw 'Unexpected', @@ -667,8 +672,8 @@ void main() { var mock = mockClient(); registerQueryMock(mock, 170, 50, totalCalls: 1); - var api = PubSub(mock, PROJECT); - var subscription; + var api = PubSub(mock, testProject); + late StreamSubscription subscription; subscription = api.listSubscriptions().listen( expectAsync1((_) => subscription.cancel()), onDone: () => throw 'Unexpected'); @@ -678,17 +683,18 @@ void main() { void runTest(bool withPause) { // Test error on first GET request. var mock = mockClient(); - mock.register('GET', 'projects/$PROJECT/subscriptions', + mock.register('GET', 'projects/$testProject/subscriptions', expectAsync1((request) { return mock.respondError(500); })); - var api = PubSub(mock, PROJECT); - var subscription; + var api = PubSub(mock, testProject); + StreamSubscription subscription; subscription = api.listSubscriptions().listen( (_) => throw 'Unexpected', - onDone: expectAsync0(() => null), + onDone: expectAsync0(() {}), onError: expectAsync1((e) => e is pubsub.DetailedApiRequestError)); + addTearDown(() => subscription.cancel()); if (withPause) { subscription.pause(); scheduleMicrotask(() => subscription.resume()); @@ -705,29 +711,32 @@ void main() { var mock = mockClient(); registerQueryMock(mock, 51, 50, totalCalls: 1); - var api = PubSub(mock, PROJECT); + var api = PubSub(mock, testProject); var count = 0; - var subscription; + late StreamSubscription subscription; subscription = api.listSubscriptions().listen( - expectAsync1(((_) { - count++; - if (count == 50) { - if (withPause) { - subscription.pause(); - scheduleMicrotask(() => subscription.resume()); + expectAsync1(((_) { + count++; + if (count == 50) { + if (withPause) { + subscription.pause(); + scheduleMicrotask(() => subscription.resume()); + } + mock.clear(); + mock.register( + 'GET', 'projects/$testProject/subscriptions', + expectAsync1((request) { + return mock.respondError(500); + })); } - mock.clear(); - mock.register('GET', 'projects/$PROJECT/subscriptions', - expectAsync1((request) { - return mock.respondError(500); - })); - } - return null; - }), count: 50), - onDone: expectAsync0(() => null), - onError: - expectAsync1((e) => e is pubsub.DetailedApiRequestError)); + return; + }), count: 50), + onDone: expectAsync0(() {}), + onError: expectAsync1( + (e) => e is pubsub.DetailedApiRequestError), + ); + addTearDown(() => subscription.cancel()); } runTest(false); @@ -740,7 +749,7 @@ void main() { var mock = mockClient(); registerQueryMock(mock, 0, 50, topic: topic); - var api = PubSub(mock, PROJECT); + var api = PubSub(mock, testProject); return (topic == null ? api.pageSubscriptions() : api.pageSubscriptions(topic: topic)) @@ -771,7 +780,7 @@ void main() { var mock = mockClient(); registerQueryMock(mock, 10, 50, topic: topic); - var api = PubSub(mock, PROJECT); + var api = PubSub(mock, testProject); return (topic == null ? api.pageSubscriptions() : api.pageSubscriptions(topic: topic)) @@ -822,7 +831,7 @@ void main() { } } - var api = PubSub(mock, PROJECT); + var api = PubSub(mock, testProject); (topic == null ? api.pageSubscriptions(pageSize: pageSize) : api.pageSubscriptions(topic: topic, pageSize: pageSize)) @@ -860,7 +869,7 @@ void main() { group('topic', () { var name = 'test-topic'; - var absoluteName = 'projects/$PROJECT/topics/test-topic'; + var absoluteName = 'projects/$testProject/topics/test-topic'; var message = 'Hello, world!'; var messageBytes = utf8.encode(message); var messageBase64 = base64.encode(messageBytes); @@ -892,7 +901,7 @@ void main() { var mock = mockClient(); registerLookup(mock); - var api = PubSub(mock, PROJECT); + var api = PubSub(mock, testProject); return api.lookupTopic(name).then(expectAsync1((topic) { mock.clear(); registerPublish(mock, 4, ((request) { @@ -925,7 +934,7 @@ void main() { var mock = mockClient(); registerLookup(mock); - var api = PubSub(mock, PROJECT); + var api = PubSub(mock, testProject); return api.lookupTopic(name).then(expectAsync1((topic) { mock.clear(); registerPublish(mock, 4, ((request) { @@ -968,7 +977,7 @@ void main() { return mock.respond(pubsub.Topic()..name = absoluteName); })); - var api = PubSub(mock, PROJECT); + var api = PubSub(mock, testProject); return api.lookupTopic(name).then(expectAsync1((topic) { expect(topic.name, name); expect(topic.absoluteName, absoluteName); @@ -987,7 +996,7 @@ void main() { group('subscription', () { var name = 'test-subscription'; - var absoluteName = 'projects/$PROJECT/subscriptions/test-subscription'; + var absoluteName = 'projects/$testProject/subscriptions/test-subscription'; test('delete', () { var mock = mockClient(); @@ -996,7 +1005,7 @@ void main() { return mock.respond(pubsub.Topic()..name = absoluteName); })); - var api = PubSub(mock, PROJECT); + var api = PubSub(mock, testProject); return api.lookupSubscription(name).then(expectAsync1((subscription) { expect(subscription.name, name); expect(subscription.absoluteName, absoluteName); diff --git a/pkgs/gcloud/test/service_scope_test.dart b/pkgs/gcloud/test/service_scope_test.dart index dc90d510..bb6874b6 100644 --- a/pkgs/gcloud/test/service_scope_test.dart +++ b/pkgs/gcloud/test/service_scope_test.dart @@ -58,9 +58,9 @@ void main() { // their entries. var insertions = 0; return ss.fork(expectAsync0(() => Future.value(() { - var NUM = 10; + var num = 10; - for (var i = 0; i < NUM; i++) { + for (var i = 0; i < num; i++) { var key = i; insertions++; @@ -71,7 +71,7 @@ void main() { return null; })); - for (var j = 0; j <= NUM; j++) { + for (var j = 0; j <= num; j++) { if (j <= i) { expect(ss.lookup(key), 'value$i'); } else { diff --git a/pkgs/gcloud/test/storage/e2e_test.dart b/pkgs/gcloud/test/storage/e2e_test.dart index ef1cbec3..7da0deb1 100644 --- a/pkgs/gcloud/test/storage/e2e_test.dart +++ b/pkgs/gcloud/test/storage/e2e_test.dart @@ -21,8 +21,8 @@ String generateBucketName() { bool testDetailedApiError(e) => e is storage_api.DetailedApiRequestError; // Generate a list just above the limit when changing to resumable upload. -const int MB = 1024 * 1024; -const int maxNormalUpload = 1 * MB; +const int mb = 1024 * 1024; +const int maxNormalUpload = 1 * mb; const int minResumableUpload = maxNormalUpload + 1; final bytesResumableUpload = List.generate(minResumableUpload, (e) => e & 255); @@ -55,7 +55,7 @@ void main() { } // Deleting a bucket relies on eventually consistent behaviour, hence // the delay in attempt to prevent test flakiness. - await Future.delayed(STORAGE_LIST_DELAY); + await Future.delayed(storageListDelay); await storage.deleteBucket(testBucketName); }); diff --git a/pkgs/gcloud/test/storage/storage_test.dart b/pkgs/gcloud/test/storage/storage_test.dart index ad497ccf..d2ba5ab7 100644 --- a/pkgs/gcloud/test/storage/storage_test.dart +++ b/pkgs/gcloud/test/storage/storage_test.dart @@ -15,20 +15,20 @@ import 'package:test/test.dart'; import '../common.dart'; import '../common_e2e.dart'; -const String HOSTNAME = 'storage.googleapis.com'; -const String ROOT_PATH = '/storage/v1/'; +const _hostName = 'storage.googleapis.com'; +const _rootPath = '/storage/v1/'; -MockClient mockClient() => MockClient(HOSTNAME, ROOT_PATH); +MockClient mockClient() => MockClient(_hostName, _rootPath); void withMockClient(Function(MockClient client, Storage storage) function) { var mock = mockClient(); - function(mock, Storage(mock, PROJECT)); + function(mock, Storage(mock, testProject)); } Future withMockClientAsync( Future Function(MockClient client, Storage storage) function) async { var mock = mockClient(); - await function(mock, Storage(mock, PROJECT)); + await function(mock, Storage(mock, testProject)); } void main() { @@ -202,7 +202,7 @@ void main() { test('delete', () { withMockClient((mock, api) { mock.register('DELETE', RegExp(r'b/[a-z/-]*$'), expectAsync1((request) { - expect(request.url.path, '${ROOT_PATH}b/$bucketName'); + expect(request.url.path, '${_rootPath}b/$bucketName'); expect(request.body.length, 0); return mock.respond(storage.Bucket()..name = bucketName); })); @@ -219,7 +219,7 @@ void main() { 'GET', RegExp(r'b/[a-z/-]*$'), expectAsync1((request) { - expect(request.url.path, '${ROOT_PATH}b/$bucketName'); + expect(request.url.path, '${_rootPath}b/$bucketName'); expect(request.body.length, 0); if (exists) { return mock.respond(storage.Bucket()..name = bucketName); @@ -239,7 +239,7 @@ void main() { test('stat', () { withMockClient((mock, api) { mock.register('GET', RegExp(r'b/[a-z/-]*$'), expectAsync1((request) { - expect(request.url.path, '${ROOT_PATH}b/$bucketName'); + expect(request.url.path, '${_rootPath}b/$bucketName'); expect(request.body.length, 0); return mock.respond(storage.Bucket() ..name = bucketName @@ -261,8 +261,9 @@ void main() { return mock.respond(storage.Buckets()); })); - api.listBucketNames().listen((_) => throw 'Unexpected', - onDone: expectAsync0(() => null)); + api + .listBucketNames() + .listen((_) => throw 'Unexpected', onDone: expectAsync0(() {})); }); }); @@ -318,8 +319,8 @@ void main() { var bytesNormalUpload = [1, 2, 3]; // Generate a list just above the limit when changing to resumable upload. - const MB = 1024 * 1024; - const maxNormalUpload = 1 * MB; + const mb = 1024 * 1024; + const maxNormalUpload = 1 * mb; const minResumableUpload = maxNormalUpload + 1; var bytesResumableUpload = List.generate(minResumableUpload, (e) => e & 255); @@ -351,15 +352,15 @@ void main() { var requestObject = storage.Object.fromJson(jsonDecode(request.body) as Map); expect(requestObject.name, objectName); - return mock.respondInitiateResumableUpload(PROJECT); + return mock.respondInitiateResumableUpload(testProject); })); mock.registerResumableUpload( 'PUT', - 'b/$PROJECT/o', + 'b/$testProject/o', expectAsync1((request) { count++; if (count == 1) { - expect(request.bodyBytes.length, MB); + expect(request.bodyBytes.length, mb); return mock.respondContinueResumableUpload(); } else { expect(request.bodyBytes.length, 1); @@ -394,7 +395,9 @@ void main() { Future addToSink(StreamSink> sink, List> data) { sink.done.then(expectAsync1(checkResult)); sink.done.catchError((e) => throw 'Unexpected $e'); - data.forEach((bytes) => sink.add(bytes)); + for (var bytes in data) { + sink.add(bytes); + } return sink .close() .then(expectAsync1(checkResult)) @@ -481,11 +484,11 @@ void main() { mock.clear(); mock.registerResumableUpload('POST', 'b/$bucketName/o', expectAsync1((request) { - return mock.respondInitiateResumableUpload(PROJECT); + return mock.respondInitiateResumableUpload(testProject); })); mock.registerResumableUpload( 'PUT', - 'b/$PROJECT/o', + 'b/$testProject/o', expectAsync1((request) { return mock.respondError(502); }, count: 3)); // Default 3 retries in googleapis library. @@ -509,9 +512,9 @@ void main() { mock.clear(); mock.registerResumableUpload('POST', 'b/$bucketName/o', expectAsync1((request) { - return mock.respondInitiateResumableUpload(PROJECT); + return mock.respondInitiateResumableUpload(testProject); })); - mock.registerResumableUpload('PUT', 'b/$PROJECT/o', + mock.registerResumableUpload('PUT', 'b/$testProject/o', expectAsync1((request) { return mock.respondContinueResumableUpload(); })); // Default 3 retries in googleapis library. @@ -551,12 +554,12 @@ void main() { withMockClient((mock, api) { mock.registerResumableUpload('POST', 'b/$bucketName/o', expectAsync1((request) { - return mock.respondInitiateResumableUpload(PROJECT); + return mock.respondInitiateResumableUpload(testProject); })); // The resumable upload will buffer until either close or a full chunk, // so when we add an error the last byte is never sent. Therefore this // PUT is only called once. - mock.registerResumableUpload('PUT', 'b/$PROJECT/o', + mock.registerResumableUpload('PUT', 'b/$testProject/o', expectAsync1((request) { expect(request.bodyBytes.length, 1024 * 1024); return mock.respondContinueResumableUpload(); @@ -659,11 +662,11 @@ void main() { expect(object.contentLanguage, m.contentLanguage); expect(object.metadata, m.custom); countInitial++; - return mock.respondInitiateResumableUpload(PROJECT); + return mock.respondInitiateResumableUpload(testProject); }, count: metadata.length)); mock.registerResumableUpload( 'PUT', - 'b/$PROJECT/o', + 'b/$testProject/o', expectAsync1((request) { var m = metadata[countData % metadata.length]; var contentType = m.contentType ?? 'application/octet-stream'; @@ -671,7 +674,7 @@ void main() { var firstPart = countData < metadata.length; countData++; if (firstPart) { - expect(request.bodyBytes.length, MB); + expect(request.bodyBytes.length, mb); return mock.respondContinueResumableUpload(); } else { expect(request.bodyBytes.length, 1); @@ -979,7 +982,7 @@ void main() { ..contentType = 'mime/type'); })); - var api = Storage(mock, PROJECT); + var api = Storage(mock, testProject); var bucket = api.bucket(bucketName); bucket.info(objectName).then(expectAsync1((stat) { expect(stat.name, objectName); @@ -1008,7 +1011,7 @@ void main() { ..acl = [acl1, acl2, acl3]); })); - var api = Storage(mock, PROJECT); + var api = Storage(mock, testProject); var bucket = api.bucket(bucketName); bucket.info(objectName).then(expectAsync1((ObjectInfo info) { expect(info.name, objectName); @@ -1029,8 +1032,9 @@ void main() { })); var bucket = api.bucket(bucketName); - bucket.list().listen((_) => throw 'Unexpected', - onDone: expectAsync0(() => null)); + bucket + .list() + .listen((_) => throw 'Unexpected', onDone: expectAsync0(() {})); }); }); From a1a319be556de35094b82864e19a5fc7e14aecb1 Mon Sep 17 00:00:00 2001 From: keyonghan <54558023+keyonghan@users.noreply.github.com> Date: Thu, 14 Oct 2021 11:04:43 -0700 Subject: [PATCH 190/239] Update googleapis package to latest version (dart-lang/gcloud#128) --- pkgs/gcloud/CHANGELOG.md | 4 ++++ pkgs/gcloud/pubspec.yaml | 4 ++-- pkgs/gcloud/test/storage/storage_test.dart | 8 ++++---- 3 files changed, 10 insertions(+), 6 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 74822933..ae21217c 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.8.4 + +- Support the latest version 6.0.0 of the `googleapis` package. + ## 0.8.3 - Support the latest version of the `googleapis` package. diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index b6f62b64..531b1cb7 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.8.4-dev +version: 0.8.4 description: >- High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. homepage: https://github.com/dart-lang/gcloud @@ -8,7 +8,7 @@ environment: dependencies: _discoveryapis_commons: ^1.0.0 - googleapis: '>=3.0.0 <6.0.0' + googleapis: '>=3.0.0 <7.0.0' http: ^0.13.0 meta: ^1.3.0 diff --git a/pkgs/gcloud/test/storage/storage_test.dart b/pkgs/gcloud/test/storage/storage_test.dart index d2ba5ab7..0d7b7bba 100644 --- a/pkgs/gcloud/test/storage/storage_test.dart +++ b/pkgs/gcloud/test/storage/storage_test.dart @@ -243,12 +243,12 @@ void main() { expect(request.body.length, 0); return mock.respond(storage.Bucket() ..name = bucketName - ..timeCreated = DateTime(2014)); + ..timeCreated = DateTime.utc(2014)); })); return api.bucketInfo(bucketName).then(expectAsync1((result) { expect(result.bucketName, bucketName); - expect(result.created, DateTime(2014)); + expect(result.created, DateTime.utc(2014)); })); }); }); @@ -978,7 +978,7 @@ void main() { expect(request.url.queryParameters['alt'], 'json'); return mock.respond(storage.Object() ..name = objectName - ..updated = DateTime(2014) + ..updated = DateTime.utc(2014) ..contentType = 'mime/type'); })); @@ -986,7 +986,7 @@ void main() { var bucket = api.bucket(bucketName); bucket.info(objectName).then(expectAsync1((stat) { expect(stat.name, objectName); - expect(stat.updated, DateTime(2014)); + expect(stat.updated, DateTime.utc(2014)); expect(stat.metadata.contentType, 'mime/type'); })); }); From 57f330ae760925d46977ea8d8a5da66d05fcf131 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Tue, 2 Nov 2021 11:02:38 -0700 Subject: [PATCH 191/239] =?UTF-8?q?Support=20latest=20googleapis=20?= =?UTF-8?q?=E2=80=93=C2=A0prepare=20release=20(dart-lang/gcloud#132)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- pkgs/gcloud/CHANGELOG.md | 4 ++++ pkgs/gcloud/pubspec.yaml | 4 ++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index ae21217c..89db88c6 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.8.5 + +- Support the latest version 7.0.0 of the `googleapis` package. + ## 0.8.4 - Support the latest version 6.0.0 of the `googleapis` package. diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 531b1cb7..dda364c2 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.8.4 +version: 0.8.5 description: >- High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. homepage: https://github.com/dart-lang/gcloud @@ -8,7 +8,7 @@ environment: dependencies: _discoveryapis_commons: ^1.0.0 - googleapis: '>=3.0.0 <7.0.0' + googleapis: '>=3.0.0 <8.0.0' http: ^0.13.0 meta: ^1.3.0 From cc92ac9243ee48cac9c36017d3b3ca8de19ae58a Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Wed, 16 Feb 2022 08:50:54 -0800 Subject: [PATCH 192/239] Drop unused ctor param in private class (dart-lang/gcloud#135) --- pkgs/gcloud/lib/src/storage_impl.dart | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index 4f02224f..b26214d0 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -511,14 +511,13 @@ class _ObjectMetadata implements ObjectMetadata { /// It provides a StreamSink and logic which selects whether to use normal /// media upload (multipart mime) or resumable media upload. class _MediaUploadStreamSink implements StreamSink> { - static const int _defaultMaxNormalUploadLength = 1024 * 1024; + static const _maxNormalUploadLength = 1024 * 1024; final storage_api.StorageApi _api; final String _bucketName; final String _objectName; final storage_api.Object _object; final String? _predefinedAcl; final int? _length; - final int _maxNormalUploadLength; int _bufferLength = 0; final List> buffer = >[]; final _controller = StreamController>(sync: true); @@ -532,8 +531,7 @@ class _MediaUploadStreamSink implements StreamSink> { int? _state; _MediaUploadStreamSink(this._api, this._bucketName, this._objectName, - this._object, this._predefinedAcl, this._length, - [this._maxNormalUploadLength = _defaultMaxNormalUploadLength]) { + this._object, this._predefinedAcl, this._length) { if (_length != null) { // If the length is known in advance decide on the upload strategy // immediately From c18915e9f67c982f2eeabd1c9f480a5284d3cac1 Mon Sep 17 00:00:00 2001 From: Simon Binder Date: Mon, 7 Feb 2022 20:26:38 +0100 Subject: [PATCH 193/239] Forward pauses for GCS upload with unknown size --- pkgs/gcloud/lib/src/storage_impl.dart | 19 +++++++++++++------ pkgs/gcloud/lib/storage.dart | 1 + 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index b26214d0..8091626e 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -518,8 +518,7 @@ class _MediaUploadStreamSink implements StreamSink> { final storage_api.Object _object; final String? _predefinedAcl; final int? _length; - int _bufferLength = 0; - final List> buffer = >[]; + final BytesBuilder buffer = BytesBuilder(); final _controller = StreamController>(sync: true); late StreamSubscription _subscription; late StreamController> _resumableController; @@ -578,14 +577,22 @@ class _MediaUploadStreamSink implements StreamSink> { assert(_state != _stateLengthKnown); if (_state == _stateProbingLength) { buffer.add(data); - _bufferLength += data.length; - if (_bufferLength > _maxNormalUploadLength) { + if (buffer.length > _maxNormalUploadLength) { // Start resumable upload. // TODO: Avoid using another stream-controller. _resumableController = StreamController>(sync: true); - buffer.forEach(_resumableController.add); + _resumableController.add(buffer.takeBytes()); _startResumableUpload(_resumableController.stream, _length); _state = _stateDecidedResumable; + + // At this point, we're forwarding events to the synchronous controller, + // so let's also forward pause and resume requests. + _resumableController + ..onPause = _subscription.pause + ..onResume = _subscription.resume; + // We don't have to handle `onCancel`: The upload will only cancel the + // stream in case of errors, which we already handle by closing the + // subscription. } } else { assert(_state == _stateDecidedResumable); @@ -597,7 +604,7 @@ class _MediaUploadStreamSink implements StreamSink> { if (_state == _stateProbingLength) { // As the data is already cached don't bother to wait on somebody // listening on the stream before adding the data. - _startNormalUpload(Stream>.fromIterable(buffer), _bufferLength); + _startNormalUpload(Stream.value(buffer.takeBytes()), buffer.length); } else { _resumableController.close(); } diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index 4f6e4461..122e395f 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -51,6 +51,7 @@ library gcloud.storage; import 'dart:async'; import 'dart:collection' show UnmodifiableListView, UnmodifiableMapView; import 'dart:convert'; +import 'dart:typed_data'; import 'package:_discoveryapis_commons/_discoveryapis_commons.dart' as commons; import 'package:googleapis/storage/v1.dart' as storage_api; From 33d3081ac0a8f390a32faf631079666f383753bf Mon Sep 17 00:00:00 2001 From: Simon Binder Date: Mon, 7 Feb 2022 20:29:49 +0100 Subject: [PATCH 194/239] Bump version, add changelog --- pkgs/gcloud/CHANGELOG.md | 5 +++++ pkgs/gcloud/pubspec.yaml | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 89db88c6..604f2bac 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,8 @@ +## 0.8.6-dev + +- Throttle streams piped into `Bucket.write` when the size is not known + beforehand. + ## 0.8.5 - Support the latest version 7.0.0 of the `googleapis` package. diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index dda364c2..a2bed2fa 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.8.5 +version: 0.8.6-dev description: >- High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. homepage: https://github.com/dart-lang/gcloud From d79162efa12b4230ec34cef05832e75e049daa4d Mon Sep 17 00:00:00 2001 From: Simon Binder Date: Mon, 7 Feb 2022 22:35:36 +0100 Subject: [PATCH 195/239] Make internal buffer private --- pkgs/gcloud/lib/src/storage_impl.dart | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index 8091626e..448c5f14 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -518,7 +518,7 @@ class _MediaUploadStreamSink implements StreamSink> { final storage_api.Object _object; final String? _predefinedAcl; final int? _length; - final BytesBuilder buffer = BytesBuilder(); + final BytesBuilder _buffer = BytesBuilder(); final _controller = StreamController>(sync: true); late StreamSubscription _subscription; late StreamController> _resumableController; @@ -576,12 +576,12 @@ class _MediaUploadStreamSink implements StreamSink> { void _onData(List data) { assert(_state != _stateLengthKnown); if (_state == _stateProbingLength) { - buffer.add(data); - if (buffer.length > _maxNormalUploadLength) { + _buffer.add(data); + if (_buffer.length > _maxNormalUploadLength) { // Start resumable upload. // TODO: Avoid using another stream-controller. _resumableController = StreamController>(sync: true); - _resumableController.add(buffer.takeBytes()); + _resumableController.add(_buffer.takeBytes()); _startResumableUpload(_resumableController.stream, _length); _state = _stateDecidedResumable; @@ -604,7 +604,7 @@ class _MediaUploadStreamSink implements StreamSink> { if (_state == _stateProbingLength) { // As the data is already cached don't bother to wait on somebody // listening on the stream before adding the data. - _startNormalUpload(Stream.value(buffer.takeBytes()), buffer.length); + _startNormalUpload(Stream.value(_buffer.takeBytes()), _buffer.length); } else { _resumableController.close(); } From 3690e1abd14dcdcc114ff67b4544c6df2d5411cb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 2 Mar 2022 11:16:00 -0800 Subject: [PATCH 196/239] Bump actions/checkout from 2 to 3 (dart-lang/gcloud#136) Bumps [actions/checkout](https://github.com/actions/checkout) from 2 to 3. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v2...v3) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pkgs/gcloud/.github/workflows/test-package.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/.github/workflows/test-package.yml b/pkgs/gcloud/.github/workflows/test-package.yml index b790ff82..3c673769 100644 --- a/pkgs/gcloud/.github/workflows/test-package.yml +++ b/pkgs/gcloud/.github/workflows/test-package.yml @@ -22,7 +22,7 @@ jobs: matrix: sdk: [dev] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - uses: dart-lang/setup-dart@v1 with: sdk: ${{ matrix.sdk }} @@ -49,7 +49,7 @@ jobs: os: [ubuntu-latest] sdk: [2.12.0, dev] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - uses: dart-lang/setup-dart@v1 with: sdk: ${{ matrix.sdk }} From 1a8e76eab08a2afb0ca4ccb2a13dc4fbb45fc48d Mon Sep 17 00:00:00 2001 From: Devon Carew Date: Wed, 20 Apr 2022 14:39:18 -0700 Subject: [PATCH 197/239] Switch from homepage to repository in pubspec (dart-lang/gcloud#137) --- pkgs/gcloud/pubspec.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index a2bed2fa..d77b5b5d 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -2,7 +2,8 @@ name: gcloud version: 0.8.6-dev description: >- High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. -homepage: https://github.com/dart-lang/gcloud +repository: https://github.com/dart-lang/gcloud + environment: sdk: '>=2.12.0 <3.0.0' From c417c8a4c536b98ce5b80f02ac14812cfe103d7f Mon Sep 17 00:00:00 2001 From: Devon Carew Date: Sat, 11 Jun 2022 02:12:06 +0000 Subject: [PATCH 198/239] remove an nnbd file exclusion --- pkgs/gcloud/test/db_all_e2e_test.dart | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/pkgs/gcloud/test/db_all_e2e_test.dart b/pkgs/gcloud/test/db_all_e2e_test.dart index 239689f8..f66f5156 100644 --- a/pkgs/gcloud/test/db_all_e2e_test.dart +++ b/pkgs/gcloud/test/db_all_e2e_test.dart @@ -1,7 +1,6 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// @dart=2.9 @Tags(['e2e']) @Timeout(Duration(seconds: 120)) @@ -26,19 +25,18 @@ Future main() async { var now = DateTime.now().millisecondsSinceEpoch; var namespace = '${Platform.operatingSystem}$now'; - datastore_impl.DatastoreImpl datastore; - db.DatastoreDB datastoreDB; - Client client; + late datastore_impl.DatastoreImpl datastore; + late db.DatastoreDB datastoreDB; + Client? client; - await withAuthClient(scopes, (String project, httpClient) { + await withAuthClient(scopes, (String project, httpClient) async { datastore = datastore_impl.DatastoreImpl(httpClient, project); datastoreDB = db.DatastoreDB(datastore); client = httpClient; - return null; }); tearDownAll(() async { - client.close(); + client?.close(); }); group('datastore_test', () { From 901fc67c902ce9bb34f67922ae5f811c05c953ee Mon Sep 17 00:00:00 2001 From: Jonas Finnemann Jensen Date: Wed, 24 Aug 2022 12:24:22 +0200 Subject: [PATCH 199/239] Prepare release --- pkgs/gcloud/CHANGELOG.md | 3 +- pkgs/gcloud/pubspec.yaml | 4 +- .../datastore/e2e/datastore_test_impl.dart | 453 +++++++++--------- 3 files changed, 221 insertions(+), 239 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 604f2bac..e47410cc 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,7 +1,8 @@ -## 0.8.6-dev +## 0.8.6 - Throttle streams piped into `Bucket.write` when the size is not known beforehand. +- Support the latest version 9.0.0 of the `googleapis` package. ## 0.8.5 diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index d77b5b5d..f42336e1 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.8.6-dev +version: 0.8.6 description: >- High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. repository: https://github.com/dart-lang/gcloud @@ -9,7 +9,7 @@ environment: dependencies: _discoveryapis_commons: ^1.0.0 - googleapis: '>=3.0.0 <8.0.0' + googleapis: '>=3.0.0 <10.0.0' http: ^0.13.0 meta: ^1.3.0 diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index 0bc983e2..1bba7adc 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -270,7 +270,7 @@ void runTests(Datastore datastore, String? namespace) { // Maybe it should not be a [DataStoreError] here? // FIXME/TODO: This was adapted expect( - datastore.commit(inserts: named20000), throwsA(isSocketException)); + datastore.commit(inserts: named20000), throwsA(isApplicationError)); }); // TODO: test invalid inserts (like entities without key, ...) @@ -590,7 +590,7 @@ void runTests(Datastore datastore, String? namespace) { changedEntities[i] = Entity(entity.key, newProperties); } return datastore.commit( - inserts: changedEntities as List, + inserts: changedEntities.cast(), transaction: transaction); } @@ -608,7 +608,7 @@ void runTests(Datastore datastore, String? namespace) { return Future.wait(transactions) .then((List transactions) { // Do a lookup for the entities in every transaction - List>> lookups = >>[]; + List>> lookups = >>[]; for (var transaction in transactions) { lookups.add(datastore.lookup(keys, transaction: transaction)); } @@ -803,109 +803,95 @@ void runTests(Datastore datastore, String? namespace) { var orders = [Order(OrderDirection.Decending, queryKey)]; - test('query', () { - return insert(stringNamedEntities, []).then((keys) { - return waitUntilEntitiesReady(datastore, stringNamedKeys, partition) - .then((_) { - var tests = [ - // EntityKind query - () => testQueryAndCompare(testQueryKind, stringNamedEntities, - transactional: false, correctOrder: false), - () => testQueryAndCompare(testQueryKind, stringNamedEntities, - transactional: true, correctOrder: false), - () => testQueryAndCompare(testQueryKind, stringNamedEntities, - transactional: true, correctOrder: false, xg: true), - - // EntityKind query with order - () => testQueryAndCompare(testQueryKind, sorted, - transactional: false, orders: orders), - () => testQueryAndCompare(testQueryKind, sorted, - transactional: true, orders: orders), - () => testQueryAndCompare(testQueryKind, sorted, - transactional: false, xg: true, orders: orders), - - // EntityKind query with filter - () => testQueryAndCompare(testQueryKind, filtered, - transactional: false, filters: filters), - () => testQueryAndCompare(testQueryKind, filtered, - transactional: true, filters: filters), - () => testQueryAndCompare(testQueryKind, filtered, - transactional: false, xg: true, filters: filters), - - // EntityKind query with filter + order - () => testQueryAndCompare(testQueryKind, sortedAndFiltered, - transactional: false, filters: filters, orders: orders), - () => testQueryAndCompare(testQueryKind, sortedAndFiltered, - transactional: true, filters: filters, orders: orders), - () => testQueryAndCompare(testQueryKind, sortedAndFiltered, - transactional: false, - xg: true, - filters: filters, - orders: orders), - - // EntityKind query with IN filter + order - () => testQueryAndCompare(testQueryKind, sortedAndListFiltered, - transactional: false, filters: listFilters, orders: orders), - () => testQueryAndCompare(testQueryKind, sortedAndListFiltered, - transactional: true, filters: listFilters, orders: orders), - () => testQueryAndCompare(testQueryKind, sortedAndListFiltered, - transactional: false, - xg: true, - filters: listFilters, - orders: orders), - - // Limit & Offset test - () => testOffsetLimitQuery(testQueryKind, sorted, - transactional: false, orders: orders), - () => testOffsetLimitQuery(testQueryKind, sorted, - transactional: true, orders: orders), - () => testOffsetLimitQuery(testQueryKind, sorted, - transactional: false, xg: true, orders: orders), - - // Query for indexed property - () => testQueryAndCompare(testQueryKind, indexedEntity, - transactional: false, filters: indexedPropertyFilter), - () => testQueryAndCompare(testQueryKind, indexedEntity, - transactional: true, filters: indexedPropertyFilter), - () => testQueryAndCompare(testQueryKind, indexedEntity, - transactional: false, - xg: true, - filters: indexedPropertyFilter), - - // Query for un-indexed property - () => testQueryAndCompare(testQueryKind, [], - transactional: false, filters: unIndexedPropertyFilter), - () => testQueryAndCompare(testQueryKind, [], - transactional: true, filters: unIndexedPropertyFilter), - () => testQueryAndCompare(testQueryKind, [], - transactional: false, - xg: true, - filters: unIndexedPropertyFilter), - - // Delete results - () => delete(stringNamedKeys, transactional: true), - - // Wait until the entity deletes are reflected in the indices. - () => - waitUntilEntitiesGone(datastore, stringNamedKeys, partition), - - // Make sure queries don't return results - () => - testQueryAndCompare(testQueryKind, [], transactional: false), - () => testQueryAndCompare(testQueryKind, [], transactional: true), - () => testQueryAndCompare(testQueryKind, [], - transactional: true, xg: true), - () => testQueryAndCompare(testQueryKind, [], - transactional: false, filters: filters, orders: orders), - ]; - return Future.forEach(tests, (dynamic f) => f()); - }); - }); + test('query', () async { + await insert(stringNamedEntities, []); + await waitUntilEntitiesReady(datastore, stringNamedKeys, partition); + + // EntityKind query + await testQueryAndCompare(testQueryKind, stringNamedEntities, + transactional: false, correctOrder: false); + await testQueryAndCompare(testQueryKind, stringNamedEntities, + transactional: true, correctOrder: false); + await testQueryAndCompare(testQueryKind, stringNamedEntities, + transactional: true, correctOrder: false, xg: true); + + // EntityKind query with order + await testQueryAndCompare(testQueryKind, sorted, + transactional: false, orders: orders); + await testQueryAndCompare(testQueryKind, sorted, + transactional: true, orders: orders); + await testQueryAndCompare(testQueryKind, sorted, + transactional: false, xg: true, orders: orders); + + // EntityKind query with filter + await testQueryAndCompare(testQueryKind, filtered, + transactional: false, filters: filters); + await testQueryAndCompare(testQueryKind, filtered, + transactional: true, filters: filters); + await testQueryAndCompare(testQueryKind, filtered, + transactional: false, xg: true, filters: filters); + + // EntityKind query with filter + order + await testQueryAndCompare(testQueryKind, sortedAndFiltered, + transactional: false, filters: filters, orders: orders); + await testQueryAndCompare(testQueryKind, sortedAndFiltered, + transactional: true, filters: filters, orders: orders); + await testQueryAndCompare(testQueryKind, sortedAndFiltered, + transactional: false, xg: true, filters: filters, orders: orders); + + // EntityKind query with IN filter + order + await testQueryAndCompare(testQueryKind, sortedAndListFiltered, + transactional: false, filters: listFilters, orders: orders); + await testQueryAndCompare(testQueryKind, sortedAndListFiltered, + transactional: true, filters: listFilters, orders: orders); + await testQueryAndCompare(testQueryKind, sortedAndListFiltered, + transactional: false, + xg: true, + filters: listFilters, + orders: orders); + + // Limit & Offset test + await testOffsetLimitQuery(testQueryKind, sorted, + transactional: false, orders: orders); + await testOffsetLimitQuery(testQueryKind, sorted, + transactional: true, orders: orders); + await testOffsetLimitQuery(testQueryKind, sorted, + transactional: false, xg: true, orders: orders); + + // Query for indexed property + await testQueryAndCompare(testQueryKind, indexedEntity, + transactional: false, filters: indexedPropertyFilter); + await testQueryAndCompare(testQueryKind, indexedEntity, + transactional: true, filters: indexedPropertyFilter); + await testQueryAndCompare(testQueryKind, indexedEntity, + transactional: false, xg: true, filters: indexedPropertyFilter); + + // Query for un-indexed property + await testQueryAndCompare(testQueryKind, [], + transactional: false, filters: unIndexedPropertyFilter); + await testQueryAndCompare(testQueryKind, [], + transactional: true, filters: unIndexedPropertyFilter); + await testQueryAndCompare(testQueryKind, [], + transactional: false, xg: true, filters: unIndexedPropertyFilter); + + // Delete results + await delete(stringNamedKeys, transactional: true); + + // Wait until the entity deletes are reflected in the indices. + await waitUntilEntitiesGone(datastore, stringNamedKeys, partition); + + // Make sure queries don't return results + await testQueryAndCompare(testQueryKind, [], transactional: false); + await testQueryAndCompare(testQueryKind, [], transactional: true); + await testQueryAndCompare(testQueryKind, [], + transactional: true, xg: true); + await testQueryAndCompare(testQueryKind, [], + transactional: false, filters: filters, orders: orders); // TODO: query by multiple keys, multiple sort orders, ... }); - test('ancestor_query', () { + test('ancestor_query', () async { /* * This test creates an * RootKind:1 -- This defines the entity group (no entity with that key) @@ -924,141 +910,136 @@ void runTests(Datastore datastore, String? namespace) { var orders = [Order(OrderDirection.Ascending, '__key__')]; - return datastore.commit(inserts: [entity, entity2]).then((_) { - var futures = [ - // FIXME/TODO: Ancestor queries should be strongly consistent. - // We should not need to wait for them. - () { - return waitUntilEntitiesReady( - datastore, [subSubKey, subSubKey2], partition); - }, - // Test that lookup only returns inserted entities. - () { - return datastore - .lookup([rootKey, subKey, subSubKey, subSubKey2]).then( - (List entities) { - expect(entities.length, 4); - expect(entities[0], isNull); - expect(entities[1], isNull); - expect(entities[2], isNotNull); - expect(entities[3], isNotNull); - expect(compareEntity(entity, entities[2]!), isTrue); - expect(compareEntity(entity2, entities[3]!), isTrue); - }); - }, - - // Query by ancestor. - // - by [rootKey] - () { - var ancestorQuery = Query(ancestorKey: rootKey, orders: orders); - return consumePages((_) => - datastore.query(ancestorQuery, partition: partition)) - .then((results) { - expect(results.length, 2); - expect(compareEntity(entity, results[0]), isTrue); - expect(compareEntity(entity2, results[1]), isTrue); - }); - }, - // - by [subKey] - () { - var ancestorQuery = Query(ancestorKey: subKey, orders: orders); - return consumePages((_) => - datastore.query(ancestorQuery, partition: partition)) - .then((results) { - expect(results.length, 2); - expect(compareEntity(entity, results[0]), isTrue); - expect(compareEntity(entity2, results[1]), isTrue); - }); - }, - // - by [subSubKey] - () { - var ancestorQuery = Query(ancestorKey: subSubKey); - return consumePages((_) => - datastore.query(ancestorQuery, partition: partition)) - .then((results) { - expect(results.length, 1); - expect(compareEntity(entity, results[0]), isTrue); - }); - }, - // - by [subSubKey2] - () { - var ancestorQuery = Query(ancestorKey: subSubKey2); - return consumePages((_) => - datastore.query(ancestorQuery, partition: partition)) - .then((results) { - expect(results.length, 1); - expect(compareEntity(entity2, results[0]), isTrue); - }); - }, - - // Query by ancestor and kind. - // - by [rootKey] + 'SubSubKind' - () { - var query = Query(ancestorKey: rootKey, kind: 'SubSubKind'); - return consumePages( - (_) => datastore.query(query, partition: partition)) - .then((List results) { - expect(results.length, 1); - expect(compareEntity(entity, results[0]), isTrue); - }); - }, - // - by [rootKey] + 'SubSubKind2' - () { - var query = Query(ancestorKey: rootKey, kind: 'SubSubKind2'); - return consumePages( - (_) => datastore.query(query, partition: partition)) - .then((List results) { - expect(results.length, 1); - expect(compareEntity(entity2, results[0]), isTrue); - }); - }, - // - by [subSubKey] + 'SubSubKind' - () { - var query = Query(ancestorKey: subSubKey, kind: 'SubSubKind'); - return consumePages( - (_) => datastore.query(query, partition: partition)) - .then((List results) { - expect(results.length, 1); - expect(compareEntity(entity, results[0]), isTrue); - }); - }, - // - by [subSubKey2] + 'SubSubKind2' - () { - var query = Query(ancestorKey: subSubKey2, kind: 'SubSubKind2'); - return consumePages( - (_) => datastore.query(query, partition: partition)) - .then((List results) { - expect(results.length, 1); - expect(compareEntity(entity2, results[0]), isTrue); - }); - }, - // - by [subSubKey] + 'SubSubKind2' - () { - var query = Query(ancestorKey: subSubKey, kind: 'SubSubKind2'); - return consumePages( - (_) => datastore.query(query, partition: partition)) - .then((List results) { - expect(results.length, 0); - }); - }, - // - by [subSubKey2] + 'SubSubKind' - () { - var query = Query(ancestorKey: subSubKey2, kind: 'SubSubKind'); - return consumePages( - (_) => datastore.query(query, partition: partition)) - .then((List results) { - expect(results.length, 0); - }); - }, - - // Cleanup - () { - return datastore.commit(deletes: [subSubKey, subSubKey2]); - } - ]; - return Future.forEach(futures, (dynamic f) => f()) - .then(expectAsync1((_) {})); + await datastore.commit(inserts: [entity, entity2]); + + // FIXME/TODO: Ancestor queries should be strongly consistent. + // We should not need to wait for them. + await waitUntilEntitiesReady( + datastore, [subSubKey, subSubKey2], partition); + + // Test that lookup only returns inserted entities. + await datastore.lookup([rootKey, subKey, subSubKey, subSubKey2]).then( + (List entities) { + expect(entities.length, 4); + expect(entities[0], isNull); + expect(entities[1], isNull); + expect(entities[2], isNotNull); + expect(entities[3], isNotNull); + expect(compareEntity(entity, entities[2]!), isTrue); + expect(compareEntity(entity2, entities[3]!), isTrue); }); + + // Query by ancestor. + // - by [rootKey] + { + var ancestorQuery = Query(ancestorKey: rootKey, orders: orders); + await consumePages( + (_) => datastore.query(ancestorQuery, partition: partition)) + .then((results) { + expect(results.length, 2); + expect(compareEntity(entity, results[0]), isTrue); + expect(compareEntity(entity2, results[1]), isTrue); + }); + } + + // - by [subKey] + { + var ancestorQuery = Query(ancestorKey: subKey, orders: orders); + await consumePages( + (_) => datastore.query(ancestorQuery, partition: partition)) + .then((results) { + expect(results.length, 2); + expect(compareEntity(entity, results[0]), isTrue); + expect(compareEntity(entity2, results[1]), isTrue); + }); + } + + // - by [subSubKey] + { + var ancestorQuery = Query(ancestorKey: subSubKey); + await consumePages( + (_) => datastore.query(ancestorQuery, partition: partition)) + .then((results) { + expect(results.length, 1); + expect(compareEntity(entity, results[0]), isTrue); + }); + } + + // - by [subSubKey2] + { + var ancestorQuery = Query(ancestorKey: subSubKey2); + await consumePages( + (_) => datastore.query(ancestorQuery, partition: partition)) + .then((results) { + expect(results.length, 1); + expect(compareEntity(entity2, results[0]), isTrue); + }); + } + + // Query by ancestor and kind. + // - by [rootKey] + 'SubSubKind' + { + var query = Query(ancestorKey: rootKey, kind: 'SubSubKind'); + await consumePages( + (_) => datastore.query(query, partition: partition)) + .then((List results) { + expect(results.length, 1); + expect(compareEntity(entity, results[0]), isTrue); + }); + } + // - by [rootKey] + 'SubSubKind2' + { + var query = Query(ancestorKey: rootKey, kind: 'SubSubKind2'); + await consumePages( + (_) => datastore.query(query, partition: partition)) + .then((List results) { + expect(results.length, 1); + expect(compareEntity(entity2, results[0]), isTrue); + }); + } + // - by [subSubKey] + 'SubSubKind' + { + var query = Query(ancestorKey: subSubKey, kind: 'SubSubKind'); + await consumePages( + (_) => datastore.query(query, partition: partition)) + .then((List results) { + expect(results.length, 1); + expect(compareEntity(entity, results[0]), isTrue); + }); + } + // - by [subSubKey2] + 'SubSubKind2' + { + var query = Query(ancestorKey: subSubKey2, kind: 'SubSubKind2'); + await consumePages( + (_) => datastore.query(query, partition: partition)) + .then((List results) { + expect(results.length, 1); + expect(compareEntity(entity2, results[0]), isTrue); + }); + } + // - by [subSubKey] + 'SubSubKind2' + { + var query = Query(ancestorKey: subSubKey, kind: 'SubSubKind2'); + await consumePages( + (_) => datastore.query(query, partition: partition)) + .then((List results) { + expect(results.length, 0); + }); + } + // - by [subSubKey2] + 'SubSubKind' + { + var query = Query(ancestorKey: subSubKey2, kind: 'SubSubKind'); + await consumePages( + (_) => datastore.query(query, partition: partition)) + .then((List results) { + expect(results.length, 0); + }); + } + + // Cleanup + { + await datastore.commit(deletes: [subSubKey, subSubKey2]); + } }); }); }); From 0ce8b003f6721373cd62691c74b005655f95ec4e Mon Sep 17 00:00:00 2001 From: Michael Thomsen Date: Thu, 25 Aug 2022 13:06:47 +0200 Subject: [PATCH 200/239] Update README.md --- pkgs/gcloud/README.md | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/pkgs/gcloud/README.md b/pkgs/gcloud/README.md index cf7b4106..7294b281 100644 --- a/pkgs/gcloud/README.md +++ b/pkgs/gcloud/README.md @@ -8,6 +8,23 @@ following services are supported: * Cloud Storage * Cloud Pub/Sub +## Status: Experimental + +**NOTE**: This package is currently experimental and published under the +[labs.dart.dev](https://dart.dev/dart-team-packages) pub publisher in order to +solicit feedback. + +For packages in the labs.dart.dev publisher we generally plan to either graduate +the package into a supported publisher (dart.dev, tools.dart.dev) after a period +of feedback and iteration, or discontinue the package. These packages have a +much higher expected rate of API and breaking changes. + +Your feedback is valuable and will help us evolve this package. For general +feedback, suggestions, and comments, please file an issue in the +[bug tracker](https://github.com/dart-lang/http/issues). + +## API details + The APIs in this package are all based on the generic generated APIs in the [googleapis] and [googleapis_beta][googleapisbeta] packages. From 75bfae3b7838fe6598e9f142274653976e9bbbc8 Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Wed, 9 Nov 2022 15:34:04 -0800 Subject: [PATCH 201/239] blast_repo fixes (dart-lang/gcloud#146) Dependabot GitHub Action --- pkgs/gcloud/.github/dependabot.yml | 16 +++++++--------- pkgs/gcloud/.github/workflows/test-package.yml | 8 ++++---- 2 files changed, 11 insertions(+), 13 deletions(-) diff --git a/pkgs/gcloud/.github/dependabot.yml b/pkgs/gcloud/.github/dependabot.yml index 430a85e7..1603cdd9 100644 --- a/pkgs/gcloud/.github/dependabot.yml +++ b/pkgs/gcloud/.github/dependabot.yml @@ -1,11 +1,9 @@ -# Set update schedule for GitHub Actions -# See https://docs.github.com/en/free-pro-team@latest/github/administering-a-repository/keeping-your-actions-up-to-date-with-dependabot - +# Dependabot configuration file. +# See https://docs.github.com/en/code-security/dependabot/dependabot-version-updates version: 2 -updates: -- package-ecosystem: "github-actions" - directory: "/" - schedule: - # Check for updates to GitHub Actions every weekday - interval: "daily" +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "monthly" diff --git a/pkgs/gcloud/.github/workflows/test-package.yml b/pkgs/gcloud/.github/workflows/test-package.yml index 3c673769..644f1202 100644 --- a/pkgs/gcloud/.github/workflows/test-package.yml +++ b/pkgs/gcloud/.github/workflows/test-package.yml @@ -22,8 +22,8 @@ jobs: matrix: sdk: [dev] steps: - - uses: actions/checkout@v3 - - uses: dart-lang/setup-dart@v1 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + - uses: dart-lang/setup-dart@6a218f2413a3e78e9087f638a238f6b40893203d with: sdk: ${{ matrix.sdk }} - id: install @@ -49,8 +49,8 @@ jobs: os: [ubuntu-latest] sdk: [2.12.0, dev] steps: - - uses: actions/checkout@v3 - - uses: dart-lang/setup-dart@v1 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + - uses: dart-lang/setup-dart@6a218f2413a3e78e9087f638a238f6b40893203d with: sdk: ${{ matrix.sdk }} - id: install From 8467b1dcfda12eb28b76f69210c15c04f80489bb Mon Sep 17 00:00:00 2001 From: Jonas Finnemann Jensen Date: Tue, 22 Nov 2022 09:56:04 +0100 Subject: [PATCH 202/239] Fix dart-lang/gcloud#144 (dart-lang/gcloud#147) * Fix dart-lang/gcloud#144 * Remove deadcode from test * Removed unused import from test --- pkgs/gcloud/CHANGELOG.md | 4 ++ pkgs/gcloud/lib/src/storage_impl.dart | 25 +++++---- pkgs/gcloud/pubspec.yaml | 2 +- pkgs/gcloud/test/db/e2e/db_test_impl.dart | 6 --- pkgs/gcloud/test/storage/e2e_test.dart | 64 ++++++++++++++++------- 5 files changed, 65 insertions(+), 36 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index e47410cc..71ade45f 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.8.7 + +- Fix `Bucket.write` when size is below 1MB. + ## 0.8.6 - Throttle streams piped into `Bucket.write` when the size is not known diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index 448c5f14..6251c0c8 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -604,7 +604,8 @@ class _MediaUploadStreamSink implements StreamSink> { if (_state == _stateProbingLength) { // As the data is already cached don't bother to wait on somebody // listening on the stream before adding the data. - _startNormalUpload(Stream.value(_buffer.takeBytes()), _buffer.length); + final length = _buffer.length; + _startNormalUpload(Stream.value(_buffer.takeBytes()), length); } else { _resumableController.close(); } @@ -628,18 +629,22 @@ class _MediaUploadStreamSink implements StreamSink> { _doneCompleter.completeError(e, s); } - void _startNormalUpload(Stream> stream, int? length) { + void _startNormalUpload(Stream> stream, int? length) async { var contentType = _object.contentType ?? 'application/octet-stream'; var media = storage_api.Media(stream, length, contentType: contentType); - _api.objects - .insert(_object, _bucketName, - name: _objectName, - predefinedAcl: _predefinedAcl, - uploadMedia: media, - uploadOptions: storage_api.UploadOptions.defaultOptions) - .then((response) { + try { + final response = await _api.objects.insert( + _object, + _bucketName, + name: _objectName, + predefinedAcl: _predefinedAcl, + uploadMedia: media, + uploadOptions: storage_api.UploadOptions.defaultOptions, + ); _doneCompleter.complete(_ObjectInfoImpl(response)); - }, onError: _completeError); + } catch (e, st) { + _completeError(e, st); + } } void _startResumableUpload(Stream> stream, int? length) { diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index f42336e1..3cf81429 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.8.6 +version: 0.8.7 description: >- High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. repository: https://github.com/dart-lang/gcloud diff --git a/pkgs/gcloud/test/db/e2e/db_test_impl.dart b/pkgs/gcloud/test/db/e2e/db_test_impl.dart index 9e55c54e..39cfe265 100644 --- a/pkgs/gcloud/test/db/e2e/db_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/db_test_impl.dart @@ -47,7 +47,6 @@ import 'dart:async'; import 'package:gcloud/db.dart' as db; import 'package:gcloud/src/datastore_impl.dart' as datastore_impl; -import 'package:http/http.dart'; import 'package:test/test.dart'; import '../../common_e2e.dart'; @@ -733,7 +732,6 @@ Future waitUntilEntitiesHelper( Future main() async { late db.DatastoreDB store; - BaseClient? client; var scopes = datastore_impl.DatastoreImpl.scopes; await withAuthClient(scopes, (String project, httpClient) { @@ -743,9 +741,5 @@ Future main() async { }); }); - tearDownAll(() { - client?.close(); - }); - runTests(store, null); } diff --git a/pkgs/gcloud/test/storage/e2e_test.dart b/pkgs/gcloud/test/storage/e2e_test.dart index 7da0deb1..ffd8ab2a 100644 --- a/pkgs/gcloud/test/storage/e2e_test.dart +++ b/pkgs/gcloud/test/storage/e2e_test.dart @@ -113,32 +113,58 @@ void main() { group('object', () { // Run all object tests in the same bucket to try to avoid the rate-limit // for creating and deleting buckets while testing. - Future withTestBucket(Future Function(Bucket bucket) function) { - return function(testBucket).whenComplete(() { + Future withTestBucket(Future Function(Bucket bucket) fn) async { + try { + return await fn(testBucket); + } finally { // TODO: Clean the bucket. + } + } + + void testWithBucket( + String name, + FutureOr Function(Bucket bucket) fn, + ) { + test(name, () async { + try { + await fn(testBucket); + } finally { + // TODO: Clean the bucket. + } }); } - test('create-read-delete', () { - Future test(name, List bytes) { - return withTestBucket((Bucket bucket) { - return bucket.writeBytes('test', bytes).then(expectAsync1((info) { - expect(info, isNotNull); - return bucket.read('test').fold>( - [], (p, e) => p..addAll(e)).then(expectAsync1((result) { - expect(result, bytes); - return bucket.delete('test').then(expectAsync1((result) { - expect(result, isNull); - })); - })); - })); + group('create-read-delete', () { + void testCreateReadDelete(String name, List bytes) { + testWithBucket(name, (bucket) async { + final info = await bucket.writeBytes('test', bytes); + expect(info, isNotNull); + final result = await bucket + .read('test') + .fold>([], (p, e) => p..addAll(e)); + expect(result, bytes); + await bucket.delete('test'); + }); + } + + testCreateReadDelete('test-1', [1, 2, 3]); + testCreateReadDelete('test-2', bytesResumableUpload); + }); + + group('create-read-delete-streaming', () { + void testCreateReadDelete(String name, List bytes) { + testWithBucket(name, (bucket) async { + await Stream.value(bytes).pipe(bucket.write('test')); + final result = await bucket + .read('test') + .fold>([], (p, e) => p..addAll(e)); + expect(result, bytes); + await bucket.delete('test'); }); } - return Future.forEach([ - () => test('test-1', [1, 2, 3]), - () => test('test-2', bytesResumableUpload) - ], (Function f) => f().then(expectAsync1((_) {}))); + testCreateReadDelete('test-1', [1, 2, 3, 5, 6, 7, 8, 9]); + testCreateReadDelete('test-2', bytesResumableUpload); }); test('create-with-predefined-acl-delete', () { From 09c52d44dffe2dc23a67d92e877b950bbe4eb7be Mon Sep 17 00:00:00 2001 From: Sam Rawlins Date: Mon, 9 Jan 2023 14:58:31 -0800 Subject: [PATCH 203/239] Migrate from no-implicit-casts to strict-casts (dart-lang/gcloud#149) --- pkgs/gcloud/analysis_options.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/analysis_options.yaml b/pkgs/gcloud/analysis_options.yaml index 89810860..e3f44472 100644 --- a/pkgs/gcloud/analysis_options.yaml +++ b/pkgs/gcloud/analysis_options.yaml @@ -1,8 +1,8 @@ include: package:lints/recommended.yaml analyzer: - strong-mode: - implicit-casts: false + language: + strict-casts: true linter: rules: From 790373f58665bfa5b941569613ee3d32901ea406 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 11 Jan 2023 11:46:49 -0800 Subject: [PATCH 204/239] Bump actions/checkout from 3.1.0 to 3.2.0 (dart-lang/gcloud#148) Bumps [actions/checkout](https://github.com/actions/checkout) from 3.1.0 to 3.2.0. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8...755da8c3cf115ac066823e79a1e1788f8940201b) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pkgs/gcloud/.github/workflows/test-package.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/.github/workflows/test-package.yml b/pkgs/gcloud/.github/workflows/test-package.yml index 644f1202..cb2013bc 100644 --- a/pkgs/gcloud/.github/workflows/test-package.yml +++ b/pkgs/gcloud/.github/workflows/test-package.yml @@ -22,7 +22,7 @@ jobs: matrix: sdk: [dev] steps: - - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b - uses: dart-lang/setup-dart@6a218f2413a3e78e9087f638a238f6b40893203d with: sdk: ${{ matrix.sdk }} @@ -49,7 +49,7 @@ jobs: os: [ubuntu-latest] sdk: [2.12.0, dev] steps: - - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b - uses: dart-lang/setup-dart@6a218f2413a3e78e9087f638a238f6b40893203d with: sdk: ${{ matrix.sdk }} From 0a189baf88b93e013e0781d9cb4d754035501484 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 31 Jan 2023 21:59:15 -0800 Subject: [PATCH 205/239] Bump actions/checkout from 3.2.0 to 3.3.0 (dart-lang/gcloud#151) Bumps [actions/checkout](https://github.com/actions/checkout) from 3.2.0 to 3.3.0. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/755da8c3cf115ac066823e79a1e1788f8940201b...ac593985615ec2ede58e132d2e21d2b1cbd6127c) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pkgs/gcloud/.github/workflows/test-package.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/.github/workflows/test-package.yml b/pkgs/gcloud/.github/workflows/test-package.yml index cb2013bc..f51044f8 100644 --- a/pkgs/gcloud/.github/workflows/test-package.yml +++ b/pkgs/gcloud/.github/workflows/test-package.yml @@ -22,7 +22,7 @@ jobs: matrix: sdk: [dev] steps: - - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b + - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - uses: dart-lang/setup-dart@6a218f2413a3e78e9087f638a238f6b40893203d with: sdk: ${{ matrix.sdk }} @@ -49,7 +49,7 @@ jobs: os: [ubuntu-latest] sdk: [2.12.0, dev] steps: - - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b + - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - uses: dart-lang/setup-dart@6a218f2413a3e78e9087f638a238f6b40893203d with: sdk: ${{ matrix.sdk }} From 4dd5ac34dcd5bd479b7bcad06c03208740597a9d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 31 Jan 2023 23:29:18 -0800 Subject: [PATCH 206/239] Bump dart-lang/setup-dart from 1.3 to 1.4 (dart-lang/gcloud#152) Bumps [dart-lang/setup-dart](https://github.com/dart-lang/setup-dart) from 1.3 to 1.4. - [Release notes](https://github.com/dart-lang/setup-dart/releases) - [Changelog](https://github.com/dart-lang/setup-dart/blob/main/CHANGELOG.md) - [Commits](https://github.com/dart-lang/setup-dart/compare/6a218f2413a3e78e9087f638a238f6b40893203d...a57a6c04cf7d4840e88432aad6281d1e125f0d46) --- updated-dependencies: - dependency-name: dart-lang/setup-dart dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pkgs/gcloud/.github/workflows/test-package.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/.github/workflows/test-package.yml b/pkgs/gcloud/.github/workflows/test-package.yml index f51044f8..a843667e 100644 --- a/pkgs/gcloud/.github/workflows/test-package.yml +++ b/pkgs/gcloud/.github/workflows/test-package.yml @@ -23,7 +23,7 @@ jobs: sdk: [dev] steps: - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - - uses: dart-lang/setup-dart@6a218f2413a3e78e9087f638a238f6b40893203d + - uses: dart-lang/setup-dart@a57a6c04cf7d4840e88432aad6281d1e125f0d46 with: sdk: ${{ matrix.sdk }} - id: install @@ -50,7 +50,7 @@ jobs: sdk: [2.12.0, dev] steps: - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - - uses: dart-lang/setup-dart@6a218f2413a3e78e9087f638a238f6b40893203d + - uses: dart-lang/setup-dart@a57a6c04cf7d4840e88432aad6281d1e125f0d46 with: sdk: ${{ matrix.sdk }} - id: install From fb96fa712b2fd1070227b5525049a3aedd8730ec Mon Sep 17 00:00:00 2001 From: Sarah Zakarias Date: Tue, 7 Feb 2023 12:48:22 +0100 Subject: [PATCH 207/239] Add comment about upsert (dart-lang/gcloud#154) --- pkgs/gcloud/lib/src/db/db.dart | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pkgs/gcloud/lib/src/db/db.dart b/pkgs/gcloud/lib/src/db/db.dart index 1d7eda8a..510bbaf0 100644 --- a/pkgs/gcloud/lib/src/db/db.dart +++ b/pkgs/gcloud/lib/src/db/db.dart @@ -400,6 +400,9 @@ class DatastoreDB { /// direct lookups will see the effect but non-ancestor queries will see the /// change in an eventual consistent way. /// + /// The inserts are done as upserts unless the provided model does not have an + /// id, in which case an autoId will be generated. + /// /// For transactions, please use `beginTransaction` and it's returned /// [Transaction] object. Future commit({List? inserts, List? deletes}) { From ba2c5e78681108a43042a8a58acb2423ee83eeae Mon Sep 17 00:00:00 2001 From: Kevin Moore Date: Mon, 6 Mar 2023 02:14:05 -0800 Subject: [PATCH 208/239] Enable and fix new team lints, require Dart 2.19 (dart-lang/gcloud#155) --- .../gcloud/.github/workflows/test-package.yml | 2 +- pkgs/gcloud/CHANGELOG.md | 4 +++ pkgs/gcloud/analysis_options.yaml | 13 +--------- pkgs/gcloud/lib/common.dart | 2 -- pkgs/gcloud/lib/datastore.dart | 4 +-- pkgs/gcloud/lib/db.dart | 2 +- pkgs/gcloud/lib/db/metamodel.dart | 2 -- pkgs/gcloud/lib/http.dart | 2 +- pkgs/gcloud/lib/pubsub.dart | 2 -- pkgs/gcloud/lib/service_scope.dart | 2 +- pkgs/gcloud/lib/src/datastore_impl.dart | 2 -- pkgs/gcloud/lib/src/db/annotations.dart | 2 +- pkgs/gcloud/lib/src/db/db.dart | 7 ++--- pkgs/gcloud/lib/src/db/exceptions.dart | 2 +- pkgs/gcloud/lib/src/db/model_db.dart | 5 ++-- pkgs/gcloud/lib/src/db/model_db_impl.dart | 20 +++++++------- pkgs/gcloud/lib/src/db/models.dart | 7 ++--- pkgs/gcloud/lib/src/pubsub_impl.dart | 14 +++++----- pkgs/gcloud/lib/src/storage_impl.dart | 6 ++--- pkgs/gcloud/lib/storage.dart | 23 +++------------- pkgs/gcloud/pubspec.yaml | 8 +++--- pkgs/gcloud/test/common.dart | 14 +++++----- pkgs/gcloud/test/common_e2e.dart | 2 -- .../datastore/e2e/datastore_test_impl.dart | 24 +++++++---------- pkgs/gcloud/test/datastore/e2e/utils.dart | 2 -- .../gcloud/test/datastore/error_matchers.dart | 2 -- pkgs/gcloud/test/db/db_test.dart | 6 ----- pkgs/gcloud/test/db/e2e/db_test_impl.dart | 4 ++- .../test/db/e2e/metamodel_test_impl.dart | 4 +-- pkgs/gcloud/test/db/model_db_test.dart | 2 -- .../db/model_dbs/duplicate_fieldname.dart | 1 + .../test/db/model_dbs/duplicate_kind.dart | 1 + .../test/db/model_dbs/duplicate_property.dart | 1 + .../db/model_dbs/multiple_annotations.dart | 1 + .../db/model_dbs/no_default_constructor.dart | 1 + pkgs/gcloud/test/db/properties_test.dart | 5 ++-- pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart | 12 ++++----- pkgs/gcloud/test/pubsub/pubsub_test.dart | 26 +++++++++---------- pkgs/gcloud/test/service_scope_test.dart | 14 ++++++---- pkgs/gcloud/test/storage/e2e_test.dart | 4 ++- pkgs/gcloud/test/storage/storage_test.dart | 20 ++++++++------ 41 files changed, 124 insertions(+), 153 deletions(-) diff --git a/pkgs/gcloud/.github/workflows/test-package.yml b/pkgs/gcloud/.github/workflows/test-package.yml index a843667e..7e4c31e3 100644 --- a/pkgs/gcloud/.github/workflows/test-package.yml +++ b/pkgs/gcloud/.github/workflows/test-package.yml @@ -47,7 +47,7 @@ jobs: matrix: # Add macos-latest and/or windows-latest if relevant for this package. os: [ubuntu-latest] - sdk: [2.12.0, dev] + sdk: [2.19.0, dev] steps: - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - uses: dart-lang/setup-dart@a57a6c04cf7d4840e88432aad6281d1e125f0d46 diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 71ade45f..9d924b0b 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.8.8-dev + +- Require Dart 2.19 + ## 0.8.7 - Fix `Bucket.write` when size is below 1MB. diff --git a/pkgs/gcloud/analysis_options.yaml b/pkgs/gcloud/analysis_options.yaml index e3f44472..ecdba569 100644 --- a/pkgs/gcloud/analysis_options.yaml +++ b/pkgs/gcloud/analysis_options.yaml @@ -1,4 +1,4 @@ -include: package:lints/recommended.yaml +include: package:dart_flutter_team_lints/analysis_options.yaml analyzer: language: @@ -6,18 +6,7 @@ analyzer: linter: rules: - - await_only_futures - - camel_case_types - cancel_subscriptions - - control_flow_in_finally - - directives_ordering - - empty_statements - - iterable_contains_unrelated_type - - list_remove_unrelated_type - package_api_docs - - package_names - - package_prefixed_library_names - prefer_relative_imports - test_types_in_equals - - throw_in_finally - - unnecessary_brace_in_string_interps diff --git a/pkgs/gcloud/lib/common.dart b/pkgs/gcloud/lib/common.dart index 09e3bcd5..e564f51e 100644 --- a/pkgs/gcloud/lib/common.dart +++ b/pkgs/gcloud/lib/common.dart @@ -2,8 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -library gcloud.common; - import 'dart:async'; /// A single page of paged results from a query. diff --git a/pkgs/gcloud/lib/datastore.dart b/pkgs/gcloud/lib/datastore.dart index b1c857bc..3b05eeae 100644 --- a/pkgs/gcloud/lib/datastore.dart +++ b/pkgs/gcloud/lib/datastore.dart @@ -7,7 +7,7 @@ /// /// For more information on Cloud Datastore, please refer to the following /// developers page: https://cloud.google.com/datastore/docs -library gcloud.datastore; +library; import 'dart:async'; @@ -52,7 +52,7 @@ class DatastoreError implements Exception { final String message; DatastoreError([String? message]) - : message = (message ?? 'DatastoreError: An unknown error occured'); + : message = message ?? 'DatastoreError: An unknown error occured'; @override String toString() => message; diff --git a/pkgs/gcloud/lib/db.dart b/pkgs/gcloud/lib/db.dart index 11509e02..6996f6f2 100644 --- a/pkgs/gcloud/lib/db.dart +++ b/pkgs/gcloud/lib/db.dart @@ -2,7 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -library gcloud.db; +library; import 'dart:collection'; // dart:core is imported explicitly so it is available at top-level without diff --git a/pkgs/gcloud/lib/db/metamodel.dart b/pkgs/gcloud/lib/db/metamodel.dart index c019fd24..cc8bab44 100644 --- a/pkgs/gcloud/lib/db/metamodel.dart +++ b/pkgs/gcloud/lib/db/metamodel.dart @@ -2,8 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -library gcloud.db.meta_model; - import '../db.dart' as db; @db.Kind(name: '__namespace__') diff --git a/pkgs/gcloud/lib/http.dart b/pkgs/gcloud/lib/http.dart index 0ba6cdc8..30fd7aef 100644 --- a/pkgs/gcloud/lib/http.dart +++ b/pkgs/gcloud/lib/http.dart @@ -4,7 +4,7 @@ /// Provides access to an authenticated HTTP client which can be used to access /// Google APIs. -library gcloud.http; +library; import 'package:http/http.dart' as http; diff --git a/pkgs/gcloud/lib/pubsub.dart b/pkgs/gcloud/lib/pubsub.dart index a2951153..42551f87 100644 --- a/pkgs/gcloud/lib/pubsub.dart +++ b/pkgs/gcloud/lib/pubsub.dart @@ -2,8 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -library gcloud.pubsub; - import 'dart:async'; import 'dart:collection'; import 'dart:convert'; diff --git a/pkgs/gcloud/lib/service_scope.dart b/pkgs/gcloud/lib/service_scope.dart index 89768704..6e23c475 100644 --- a/pkgs/gcloud/lib/service_scope.dart +++ b/pkgs/gcloud/lib/service_scope.dart @@ -72,7 +72,7 @@ /// and instead depend only on the services needed (e.g. /// `package:gcloud/storage.dart`) by using getters in the service library (e.g. /// the `storageService`) which are implemented with service scope lookups. -library gcloud.service_scope; +library; import 'dart:async'; diff --git a/pkgs/gcloud/lib/src/datastore_impl.dart b/pkgs/gcloud/lib/src/datastore_impl.dart index fbe7c038..d62868c6 100644 --- a/pkgs/gcloud/lib/src/datastore_impl.dart +++ b/pkgs/gcloud/lib/src/datastore_impl.dart @@ -2,8 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -library gcloud.datastore_impl; - import 'dart:async'; import 'package:googleapis/datastore/v1.dart' as api; diff --git a/pkgs/gcloud/lib/src/db/annotations.dart b/pkgs/gcloud/lib/src/db/annotations.dart index 241d5797..de895bc2 100644 --- a/pkgs/gcloud/lib/src/db/annotations.dart +++ b/pkgs/gcloud/lib/src/db/annotations.dart @@ -2,7 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -part of gcloud.db; +part of '../../db.dart'; /// Annotation used to mark dart classes which can be stored into datastore. /// diff --git a/pkgs/gcloud/lib/src/db/db.dart b/pkgs/gcloud/lib/src/db/db.dart index 510bbaf0..9a4bbf3c 100644 --- a/pkgs/gcloud/lib/src/db/db.dart +++ b/pkgs/gcloud/lib/src/db/db.dart @@ -2,7 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -part of gcloud.db; +part of '../../db.dart'; /// A function definition for transactional functions. /// @@ -205,8 +205,9 @@ class Query { /// Adds an order to this [Query]. /// - /// [orderString] has the form "-name" where 'name' is a fieldName of the model - /// and the optional '-' says whether the order is descending or ascending. + /// [orderString] has the form "-name" where 'name' is a fieldName of the + /// model and the optional '-' says whether the order is descending or + /// ascending. void order(String orderString) { // TODO: validate [orderString] (e.g. is name valid) if (orderString.startsWith('-')) { diff --git a/pkgs/gcloud/lib/src/db/exceptions.dart b/pkgs/gcloud/lib/src/db/exceptions.dart index 11c48b1c..6eed41d8 100644 --- a/pkgs/gcloud/lib/src/db/exceptions.dart +++ b/pkgs/gcloud/lib/src/db/exceptions.dart @@ -2,7 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -part of gcloud.db; +part of '../../db.dart'; /// Exception that gets thrown when a caller attempts to look up a value by /// its key, and the key cannot be found in the datastore. diff --git a/pkgs/gcloud/lib/src/db/model_db.dart b/pkgs/gcloud/lib/src/db/model_db.dart index ba19caee..dd30c1bf 100644 --- a/pkgs/gcloud/lib/src/db/model_db.dart +++ b/pkgs/gcloud/lib/src/db/model_db.dart @@ -2,11 +2,12 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -part of gcloud.db; +part of '../../db.dart'; /// A database of all registered models. /// -/// Responsible for converting between dart model objects and datastore entities. +/// Responsible for converting between dart model objects and datastore +/// entities. abstract class ModelDB { /// Converts a [ds.Key] to a [Key]. Key fromDatastoreKey(ds.Key datastoreKey); diff --git a/pkgs/gcloud/lib/src/db/model_db_impl.dart b/pkgs/gcloud/lib/src/db/model_db_impl.dart index e17ddcff..0703ec5d 100644 --- a/pkgs/gcloud/lib/src/db/model_db_impl.dart +++ b/pkgs/gcloud/lib/src/db/model_db_impl.dart @@ -2,7 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -part of gcloud.db; +part of '../../db.dart'; /// An implementation of [ModelDB] based on model class annotations. /// @@ -202,9 +202,7 @@ class ModelDBImpl implements ModelDB { lm.declarations.values .whereType() .where((d) => d.hasReflectedType) - .forEach((declaration) { - _tryLoadNewModelClass(declaration); - }); + .forEach(_tryLoadNewModelClass); } // Ask every [ModelDescription] to compute whatever global state it wants @@ -228,7 +226,7 @@ class ModelDBImpl implements ModelDB { void _tryLoadNewModelClass(mirrors.ClassMirror classMirror) { Kind? kindAnnotation; for (var instance in classMirror.metadata) { - if (instance.reflectee.runtimeType == Kind) { + if ((instance.reflectee as Object).runtimeType == Kind) { if (kindAnnotation != null) { throw StateError( 'Cannot have more than one ModelMetadata() annotation ' @@ -461,8 +459,9 @@ class _ModelDescription { var mirror = classMirror.newInstance(const Symbol(''), []); // Set the id and the parent key - mirror.reflectee.id = key.id; - mirror.reflectee.parentKey = key.parent; + final model = mirror.reflectee as Model; + model.id = key.id; + model.parentKey = key.parent; db._propertiesForModel(this).forEach((String fieldName, Property prop) { _decodeProperty(db, entity, mirror, fieldName, prop); @@ -485,9 +484,12 @@ class _ModelDescription { try { mirror.setField(mirrors.MirrorSystem.getSymbol(fieldName), value); + // ignore: avoid_catching_errors } on TypeError catch (error) { - throw StateError('Error trying to set property "${prop.propertyName}" ' - 'to $value for field "$fieldName": $error'); + throw StateError( + 'Error trying to set property "${prop.propertyName}" ' + 'to $value for field "$fieldName": $error', + ); } } diff --git a/pkgs/gcloud/lib/src/db/models.dart b/pkgs/gcloud/lib/src/db/models.dart index 96a1b032..384e9f5f 100644 --- a/pkgs/gcloud/lib/src/db/models.dart +++ b/pkgs/gcloud/lib/src/db/models.dart @@ -2,7 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -part of gcloud.db; +part of '../../db.dart'; /// Represents a unique identifier for a [Model] stored in a datastore. /// @@ -97,8 +97,9 @@ class Partition { /// Superclass for all model classes. /// -/// Every model class has a [id] of type [T] which must be `int` or `String`, and -/// a [parentKey]. The [key] getter is returning the key for the model object. +/// Every model class has a [id] of type [T] which must be `int` or `String`, +/// and a [parentKey]. The [key] getter is returning the key for the model +/// object. abstract class Model { T? id; Key? parentKey; diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart index a546cfc8..ce491b18 100644 --- a/pkgs/gcloud/lib/src/pubsub_impl.dart +++ b/pkgs/gcloud/lib/src/pubsub_impl.dart @@ -2,7 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -part of gcloud.pubsub; +part of '../pubsub.dart'; class _PubSubImpl implements PubSub { @override @@ -337,14 +337,14 @@ class _PushEventImpl implements PushEvent { factory _PushEventImpl.fromJson(String json) { Map body = jsonDecode(json) as Map; - var data = body['message']['data'] as String; + var data = (body['message'] as Map)['data'] as String; Map labels = HashMap(); - body['message']['labels'].forEach((label) { - var key = label['key'] as String; - var value = label['strValue']; - value ??= label['numValue']; + for (var label in (body['message'] as Map)['labels'] as List) { + final l = label as Map; + var key = l['key'] as String; + var value = l['strValue'] ?? l['numValue']; labels[key] = value.toString(); - }); + } var subscription = body['subscription'] as String; // TODO(#1): Remove this when the push event subscription name is prefixed // with '/subscriptions/'. diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index 6251c0c8..d2850d9f 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -2,7 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -part of gcloud.storage; +part of '../storage.dart'; const String _absolutePrefix = 'gs://'; const String _directoryDelimiter = '/'; @@ -85,7 +85,7 @@ class _StorageImpl implements Storage { Future bucketInfo(String bucketName) { return _api.buckets .get(bucketName, projection: 'full') - .then((bucket) => _BucketInfoImpl(bucket)); + .then(_BucketInfoImpl.new); } @override @@ -555,7 +555,7 @@ class _MediaUploadStreamSink implements StreamSink> { } @override - void addError(errorEvent, [StackTrace? stackTrace]) { + void addError(Object errorEvent, [StackTrace? stackTrace]) { _controller.addError(errorEvent, stackTrace); } diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index 122e395f..3c726f2a 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -46,7 +46,7 @@ /// For most of the APIs in ths library which take instances of other classes /// from this library it is the assumption that the actual implementations /// provided here are used. -library gcloud.storage; +library; import 'dart:async'; import 'dart:collection' show UnmodifiableListView, UnmodifiableMapView; @@ -87,21 +87,6 @@ void registerStorageService(Storage storage) { ss.register(_storageKey, storage); } -int _jenkinsHash(List e) { - const _hashMask = 0x3fffffff; - var hash = 0; - for (var i = 0; i < e.length; i++) { - var c = e[i].hashCode; - hash = (hash + c) & _hashMask; - hash = (hash + (hash << 10)) & _hashMask; - hash ^= (hash >> 6); - } - hash = (hash + (hash << 3)) & _hashMask; - hash ^= (hash >> 11); - hash = (hash + (hash << 15)) & _hashMask; - return hash; -} - /// An ACL (Access Control List) describes access rights to buckets and /// objects. /// @@ -176,7 +161,7 @@ class Acl { } @override - late final int hashCode = _jenkinsHash(_entries); + late final int hashCode = Object.hashAll(_entries); @override bool operator ==(Object other) { @@ -221,7 +206,7 @@ class AclEntry { } @override - late final int hashCode = _jenkinsHash([scope, permission]); + late final int hashCode = Object.hash(scope, permission); @override bool operator ==(Object other) { @@ -287,7 +272,7 @@ abstract class AclScope { AclScope._(this._type, this._id); @override - late final int hashCode = _jenkinsHash([_type, _id]); + late final int hashCode = Object.hash(_type, _id); @override bool operator ==(Object other) { diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 3cf81429..b3023ccd 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,21 +1,21 @@ name: gcloud -version: 0.8.7 +version: 0.8.8-dev description: >- High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. repository: https://github.com/dart-lang/gcloud environment: - sdk: '>=2.12.0 <3.0.0' + sdk: '>=2.19.0 <3.0.0' dependencies: _discoveryapis_commons: ^1.0.0 - googleapis: '>=3.0.0 <10.0.0' + googleapis: '>=3.0.0 <11.0.0' http: ^0.13.0 meta: ^1.3.0 dev_dependencies: + dart_flutter_team_lints: ^1.0.0 googleapis_auth: ^1.1.0 http_parser: ^4.0.0 - lints: ^1.0.0 mime: ^1.0.0 test: ^1.17.5 diff --git a/pkgs/gcloud/test/common.dart b/pkgs/gcloud/test/common.dart index c4af9317..4179a410 100644 --- a/pkgs/gcloud/test/common.dart +++ b/pkgs/gcloud/test/common.dart @@ -2,6 +2,8 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// ignore_for_file: only_throw_errors + import 'dart:async'; import 'dart:convert'; @@ -87,16 +89,16 @@ class MockClient extends http.BaseClient { return client.send(request); } - Future respond(response) { - return Future.value(http.Response(jsonEncode(response.toJson()), 200, - headers: _responseHeaders)); + Future respond(dynamic response) { + return Future.value( + http.Response(jsonEncode(response), 200, headers: _responseHeaders)); } Future respondEmpty() { return Future.value(http.Response('{}', 200, headers: _responseHeaders)); } - Future respondInitiateResumableUpload(project) { + Future respondInitiateResumableUpload(String project) { final headers = Map.from(_responseHeaders); headers['location'] = 'https://$hostname/resumable/upload$rootPath' 'b/$project/o?uploadType=resumable&alt=json&' @@ -153,7 +155,7 @@ class MockClient extends http.BaseClient { [13, 10] ]) .transform(mime.MimeMultipartTransformer(boundary!)) - .listen(((mime.MimeMultipart mimeMultipart) { + .listen((mime.MimeMultipart mimeMultipart) { var contentType = mimeMultipart.headers['content-type']!; partCount++; if (partCount == 1) { @@ -176,7 +178,7 @@ class MockClient extends http.BaseClient { // Exactly two parts expected. throw 'Unexpected part count'; } - })); + }); return completer.future; } diff --git a/pkgs/gcloud/test/common_e2e.dart b/pkgs/gcloud/test/common_e2e.dart index ed35e482..9c90d44a 100644 --- a/pkgs/gcloud/test/common_e2e.dart +++ b/pkgs/gcloud/test/common_e2e.dart @@ -2,8 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -library gcloud.test.common_e2e; - import 'dart:async'; import 'dart:io'; diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index 1bba7adc..120ec5e1 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -2,8 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -library datastore_test; - /// NOTE: In order to run these tests, the following datastore indices must /// exist: /// $ cat index.yaml @@ -25,6 +23,8 @@ library datastore_test; /// $ gcloud datastore create-indexes index.yaml /// /// Now, wait for indexing done +library; + import 'dart:async'; import 'package:gcloud/common.dart'; @@ -37,12 +37,6 @@ import '../../common_e2e.dart'; import '../error_matchers.dart'; import 'utils.dart'; -Future sleep(Duration duration) { - var completer = Completer(); - Timer(duration, completer.complete); - return completer.future; -} - Future> consumePages(FirstPageProvider provider) { return StreamFromPages(provider).stream.toList(); } @@ -608,7 +602,7 @@ void runTests(Datastore datastore, String? namespace) { return Future.wait(transactions) .then((List transactions) { // Do a lookup for the entities in every transaction - List>> lookups = >>[]; + var lookups = >>[]; for (var transaction in transactions) { lookups.add(datastore.lookup(keys, transaction: transaction)); } @@ -714,7 +708,7 @@ void runTests(Datastore datastore, String? namespace) { {List? orders, bool transactional = false, bool xg = false}) { // We query for all subsets of expectedEntities // NOTE: This is O(0.5 * n^2) queries, but n is currently only 6. - var queryTests = []; + var queryTests = []; for (var start = 0; start < expectedEntities.length; start++) { for (var end = start; end < expectedEntities.length; end++) { var offset = start; @@ -740,7 +734,7 @@ void runTests(Datastore datastore, String? namespace) { limit: expectedEntities.length * 10); }); - return Future.forEach(queryTests, (dynamic f) => f()); + return Future.forEach(queryTests, (f) => f()); } const testQueryKind = 'TestQueryKind'; @@ -756,25 +750,25 @@ void runTests(Datastore datastore, String? namespace) { var queryListEntry = '${testListValue}2'; var queryIndexValue = '${testIndexedPropertyValuePrefix}1'; - reverseOrderFunction(Entity a, Entity b) { + int reverseOrderFunction(Entity a, Entity b) { // Reverse the order return -1 * (a.properties[queryKey] as String) .compareTo(b.properties[queryKey].toString()); } - filterFunction(Entity entity) { + bool filterFunction(Entity entity) { var value = entity.properties[queryKey] as String; return value.compareTo(queryUpperbound) == -1 && value.compareTo(queryLowerBound) == 1; } - listFilterFunction(Entity entity) { + bool listFilterFunction(Entity entity) { var values = entity.properties[testListProperty] as List; return values.contains(queryListEntry); } - indexFilterMatches(Entity entity) { + bool indexFilterMatches(Entity entity) { return entity.properties[testIndexedProperty] == queryIndexValue; } diff --git a/pkgs/gcloud/test/datastore/e2e/utils.dart b/pkgs/gcloud/test/datastore/e2e/utils.dart index f5dfb106..94c68eed 100644 --- a/pkgs/gcloud/test/datastore/e2e/utils.dart +++ b/pkgs/gcloud/test/datastore/e2e/utils.dart @@ -2,8 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -library raw_datastore_test_utils; - import 'package:gcloud/datastore.dart'; const _testKind = 'TestKind'; diff --git a/pkgs/gcloud/test/datastore/error_matchers.dart b/pkgs/gcloud/test/datastore/error_matchers.dart index 44bdfb32..76be4ff4 100644 --- a/pkgs/gcloud/test/datastore/error_matchers.dart +++ b/pkgs/gcloud/test/datastore/error_matchers.dart @@ -2,8 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -library error_matchers; - import 'dart:io'; import 'package:gcloud/datastore.dart'; diff --git a/pkgs/gcloud/test/db/db_test.dart b/pkgs/gcloud/test/db/db_test.dart index 6240eb57..d7b18b14 100644 --- a/pkgs/gcloud/test/db/db_test.dart +++ b/pkgs/gcloud/test/db/db_test.dart @@ -2,8 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -library gcloud.db_test; - import 'dart:mirrors' show reflectClass; import 'package:gcloud/datastore.dart' as datastore; @@ -101,10 +99,6 @@ class OnlyNamedArguments { const OnlyNamedArguments({int? arg, int? arg2}); } -class RequiredNamedArguments { - const RequiredNamedArguments({int? arg1, required int arg2}); -} - class DefaultArgumentValues { const DefaultArgumentValues([int arg1 = 1, int arg2 = 2]); } diff --git a/pkgs/gcloud/test/db/e2e/db_test_impl.dart b/pkgs/gcloud/test/db/e2e/db_test_impl.dart index 39cfe265..16a25e89 100644 --- a/pkgs/gcloud/test/db/e2e/db_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/db_test_impl.dart @@ -2,7 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -library db_test; +// ignore_for_file: avoid_dynamic_calls /// NOTE: In order to run these tests, the following datastore indices must /// exist: @@ -43,6 +43,8 @@ library db_test; /// $ gcloud datastore create-indexes index.yaml /// /// Now, wait for indexing done +library; + import 'dart:async'; import 'package:gcloud/db.dart' as db; diff --git a/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart index 027b1d29..9ff1721c 100644 --- a/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart +++ b/pkgs/gcloud/test/db/e2e/metamodel_test_impl.dart @@ -2,7 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -library metamodel_test; +// ignore_for_file: avoid_dynamic_calls import 'dart:async'; @@ -43,7 +43,7 @@ Future sleep(Duration duration) { return completer.future; } -void runTests(datastore, db.DatastoreDB store) { +void runTests(Datastore datastore, db.DatastoreDB store) { // Shorten this name, so we don't have to break lines at 80 chars. final cond = predicate; diff --git a/pkgs/gcloud/test/db/model_db_test.dart b/pkgs/gcloud/test/db/model_db_test.dart index 2ffdd8f5..f2506469 100644 --- a/pkgs/gcloud/test/db/model_db_test.dart +++ b/pkgs/gcloud/test/db/model_db_test.dart @@ -2,8 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -library gcloud.db_impl_test; - import 'dart:async'; import 'package:gcloud/db.dart'; diff --git a/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart b/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart index d028614c..199aabd5 100644 --- a/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart +++ b/pkgs/gcloud/test/db/model_dbs/duplicate_fieldname.dart @@ -2,6 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// ignore: unnecessary_library_directive library gcloud.db.model_test.duplicate_fieldname; import 'package:gcloud/db.dart' as db; diff --git a/pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart b/pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart index 1859fdf9..84fb9b2f 100644 --- a/pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart +++ b/pkgs/gcloud/test/db/model_dbs/duplicate_kind.dart @@ -2,6 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// ignore: unnecessary_library_directive library gcloud.db.model_test.duplicate_kind; import 'package:gcloud/db.dart' as db; diff --git a/pkgs/gcloud/test/db/model_dbs/duplicate_property.dart b/pkgs/gcloud/test/db/model_dbs/duplicate_property.dart index de550898..2c3c9aba 100644 --- a/pkgs/gcloud/test/db/model_dbs/duplicate_property.dart +++ b/pkgs/gcloud/test/db/model_dbs/duplicate_property.dart @@ -2,6 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// ignore: unnecessary_library_directive library gcloud.db.model_test.duplicate_property; import 'package:gcloud/db.dart' as db; diff --git a/pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart b/pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart index 3ffd27ca..1869a4ed 100644 --- a/pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart +++ b/pkgs/gcloud/test/db/model_dbs/multiple_annotations.dart @@ -2,6 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// ignore: unnecessary_library_directive library gcloud.db.model_test.multiple_annotations; import 'package:gcloud/db.dart' as db; diff --git a/pkgs/gcloud/test/db/model_dbs/no_default_constructor.dart b/pkgs/gcloud/test/db/model_dbs/no_default_constructor.dart index 1c3b3d5e..11809497 100644 --- a/pkgs/gcloud/test/db/model_dbs/no_default_constructor.dart +++ b/pkgs/gcloud/test/db/model_dbs/no_default_constructor.dart @@ -2,6 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// ignore: unnecessary_library_directive library gcloud.db.model_test.no_default_constructor; import 'package:gcloud/db.dart' as db; diff --git a/pkgs/gcloud/test/db/properties_test.dart b/pkgs/gcloud/test/db/properties_test.dart index 61547876..f2629c5a 100644 --- a/pkgs/gcloud/test/db/properties_test.dart +++ b/pkgs/gcloud/test/db/properties_test.dart @@ -2,7 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -library gcloud.db.properties_test; +// ignore_for_file: only_throw_errors import 'dart:typed_data'; @@ -195,7 +195,7 @@ class Custom { int get hashCode => customValue.hashCode; @override - bool operator ==(other) { + bool operator ==(Object other) { return other is Custom && other.customValue == customValue; } } @@ -270,7 +270,6 @@ class ModelDBMock implements ModelDB { return _datastoreKey; } - Map? propertiesForModel(modelDescription) => null; @override T? fromDatastoreEntity(datastore.Entity? entity) => null; @override diff --git a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart index cb051307..b64d2319 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_e2e_test.dart @@ -2,8 +2,11 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// ignore_for_file: only_throw_errors + @Tags(['e2e']) @Timeout(Duration(seconds: 120)) +library; import 'package:gcloud/pubsub.dart'; import 'package:http/http.dart'; @@ -21,13 +24,10 @@ void main() { // Generate a unique prefix for all names generated by the tests. var id = DateTime.now().millisecondsSinceEpoch; prefix = 'dart-e2e-test-$id'; - await withAuthClient(PubSub.SCOPES, ( - String _project, - httpClient, - ) async { + await withAuthClient(PubSub.SCOPES, (p, httpClient) async { // Share the same pubsub connection for all tests. - pubsub = PubSub(httpClient, _project); - project = _project; + pubsub = PubSub(httpClient, p); + project = p; client = httpClient; }); }); diff --git a/pkgs/gcloud/test/pubsub/pubsub_test.dart b/pkgs/gcloud/test/pubsub/pubsub_test.dart index 2f73b98c..8994b133 100644 --- a/pkgs/gcloud/test/pubsub/pubsub_test.dart +++ b/pkgs/gcloud/test/pubsub/pubsub_test.dart @@ -2,6 +2,8 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// ignore_for_file: only_throw_errors + import 'dart:async'; import 'dart:convert'; @@ -308,7 +310,7 @@ void main() { var count = 0; late StreamSubscription subscription; subscription = api.listTopics().listen( - expectAsync1(((_) { + expectAsync1((_) { count++; if (count == 50) { if (withPause) { @@ -322,7 +324,7 @@ void main() { })); } return; - }), count: 50), + }, count: 50), onDone: expectAsync0(() {}), onError: expectAsync1( (e) => e is pubsub.DetailedApiRequestError), @@ -391,9 +393,7 @@ void main() { expect(page.items.length, page.isLast ? n - (totalPages - 1) * pageSize : pageSize); if (!page.isLast) { - page.next().then(expectAsync1((page) { - handlePage(page); - })); + page.next().then(expectAsync1(handlePage)); } else { expect(() => page.next(), throwsStateError); expect(pageCount, totalPages); @@ -716,7 +716,7 @@ void main() { var count = 0; late StreamSubscription subscription; subscription = api.listSubscriptions().listen( - expectAsync1(((_) { + expectAsync1((_) { count++; if (count == 50) { if (withPause) { @@ -731,7 +731,7 @@ void main() { })); } return; - }), count: 50), + }, count: 50), onDone: expectAsync0(() {}), onError: expectAsync1( (e) => e is pubsub.DetailedApiRequestError), @@ -821,9 +821,7 @@ void main() { expect(page.items.length, page.isLast ? n - (totalPages - 1) * pageSize : pageSize); if (!page.isLast) { - page.next().then((page) { - handlingPage(page); - }); + page.next().then(handlingPage); } else { expect(() => page.next(), throwsStateError); expect(pageCount, totalPages); @@ -904,12 +902,12 @@ void main() { var api = PubSub(mock, testProject); return api.lookupTopic(name).then(expectAsync1((topic) { mock.clear(); - registerPublish(mock, 4, ((request) { + registerPublish(mock, 4, (request) { expect(request.messages!.length, 1); expect(request.messages![0].data, messageBase64); expect(request.messages![0].attributes, isNull); return mock.respond(pubsub.PublishResponse()..messageIds = ['0']); - })); + }); return topic.publishString(message).then(expectAsync1((result) { expect(result, isNull); @@ -937,14 +935,14 @@ void main() { var api = PubSub(mock, testProject); return api.lookupTopic(name).then(expectAsync1((topic) { mock.clear(); - registerPublish(mock, 4, ((request) { + registerPublish(mock, 4, (request) { expect(request.messages!.length, 1); expect(request.messages![0].data, messageBase64); expect(request.messages![0].attributes, isNotNull); expect(request.messages![0].attributes!.length, attributes.length); expect(request.messages![0].attributes, attributes); return mock.respond(pubsub.PublishResponse()..messageIds = ['0']); - })); + }); return topic .publishString(message, attributes: attributes) diff --git a/pkgs/gcloud/test/service_scope_test.dart b/pkgs/gcloud/test/service_scope_test.dart index bb6874b6..424ed3e6 100644 --- a/pkgs/gcloud/test/service_scope_test.dart +++ b/pkgs/gcloud/test/service_scope_test.dart @@ -2,7 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -library gcloud.test.service_scope_test; +// ignore_for_file: only_throw_errors import 'dart:async'; @@ -139,12 +139,14 @@ void main() { test('service-scope-destroyed-after-callback-completes', () { // Ensure that once the closure passed to fork() completes, the service // scope is destroyed. - return ss.fork(expectAsync0(() => Future.sync(() { + return ss.fork( + expectAsync0( + () => Future.sync(() { var key = 1; ss.register(key, 'firstValue'); ss.registerScopeExitCallback(Zone.current.bindCallback(() { - // Spawn an async task which will be run after the cleanups to ensure - // the service scope got destroyed. + // Spawn an async task which will be run after the cleanups to + // ensure the service scope got destroyed. Timer.run(expectAsync0(() { expect(() => ss.lookup(key), throwsA(isStateError)); expect(() => ss.register(2, 'value'), throwsA(isStateError)); @@ -154,7 +156,9 @@ void main() { return null; })); expect(ss.lookup(key), equals('firstValue')); - }))); + }), + ), + ); }); test('override-parent-value', () { diff --git a/pkgs/gcloud/test/storage/e2e_test.dart b/pkgs/gcloud/test/storage/e2e_test.dart index ffd8ab2a..b93453c9 100644 --- a/pkgs/gcloud/test/storage/e2e_test.dart +++ b/pkgs/gcloud/test/storage/e2e_test.dart @@ -1,6 +1,8 @@ // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// ignore_for_file: avoid_dynamic_calls + @Tags(['e2e']) library gcloud.storage; @@ -18,7 +20,7 @@ String generateBucketName() { return 'dart-e2e-test-$id'; } -bool testDetailedApiError(e) => e is storage_api.DetailedApiRequestError; +bool testDetailedApiError(Object e) => e is storage_api.DetailedApiRequestError; // Generate a list just above the limit when changing to resumable upload. const int mb = 1024 * 1024; diff --git a/pkgs/gcloud/test/storage/storage_test.dart b/pkgs/gcloud/test/storage/storage_test.dart index 0d7b7bba..83a5a3f0 100644 --- a/pkgs/gcloud/test/storage/storage_test.dart +++ b/pkgs/gcloud/test/storage/storage_test.dart @@ -2,7 +2,8 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -library gcloud.storage; +// ignore_for_file: only_throw_errors, avoid_catching_errors, +// ignore_for_file: avoid_dynamic_calls import 'dart:async'; import 'dart:convert'; @@ -887,13 +888,16 @@ void main() { await withMockClientAsync((MockClient mock, Storage api) async { var bucket = api.bucket(bucketName); - try { - await bucket.read(objectName, offset: 1).toList(); - fail('An exception should be thrown'); - } on ArgumentError catch (e) { - expect( - e.message, 'length must have a value if offset is non-zero.'); - } + await expectLater( + bucket.read(objectName, offset: 1).toList(), + throwsA( + isA().having( + (p0) => p0.message, + 'message', + 'length must have a value if offset is non-zero.', + ), + ), + ); }); }); From 9a22627425f2565dc8e415665d44a9070d5a86b5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 2 Apr 2023 19:50:20 -0700 Subject: [PATCH 209/239] Bump actions/checkout from 3.3.0 to 3.5.0 (dart-lang/gcloud#156) Bumps [actions/checkout](https://github.com/actions/checkout) from 3.3.0 to 3.5.0. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/ac593985615ec2ede58e132d2e21d2b1cbd6127c...8f4b7f84864484a7bf31766abe9204da3cbe65b3) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pkgs/gcloud/.github/workflows/test-package.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/.github/workflows/test-package.yml b/pkgs/gcloud/.github/workflows/test-package.yml index 7e4c31e3..5a67f91e 100644 --- a/pkgs/gcloud/.github/workflows/test-package.yml +++ b/pkgs/gcloud/.github/workflows/test-package.yml @@ -22,7 +22,7 @@ jobs: matrix: sdk: [dev] steps: - - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c + - uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 - uses: dart-lang/setup-dart@a57a6c04cf7d4840e88432aad6281d1e125f0d46 with: sdk: ${{ matrix.sdk }} @@ -49,7 +49,7 @@ jobs: os: [ubuntu-latest] sdk: [2.19.0, dev] steps: - - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c + - uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 - uses: dart-lang/setup-dart@a57a6c04cf7d4840e88432aad6281d1e125f0d46 with: sdk: ${{ matrix.sdk }} From 0cd5ea7b9e2a9f80ace8321eb6b7a05dceb1e69e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 2 Apr 2023 19:55:44 -0700 Subject: [PATCH 210/239] Bump dart-lang/setup-dart from 1.4.0 to 1.5.0 (dart-lang/gcloud#157) Bumps [dart-lang/setup-dart](https://github.com/dart-lang/setup-dart) from 1.4.0 to 1.5.0. - [Release notes](https://github.com/dart-lang/setup-dart/releases) - [Changelog](https://github.com/dart-lang/setup-dart/blob/main/CHANGELOG.md) - [Commits](https://github.com/dart-lang/setup-dart/compare/a57a6c04cf7d4840e88432aad6281d1e125f0d46...d6a63dab3335f427404425de0fbfed4686d93c4f) --- updated-dependencies: - dependency-name: dart-lang/setup-dart dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pkgs/gcloud/.github/workflows/test-package.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/.github/workflows/test-package.yml b/pkgs/gcloud/.github/workflows/test-package.yml index 5a67f91e..d308d77c 100644 --- a/pkgs/gcloud/.github/workflows/test-package.yml +++ b/pkgs/gcloud/.github/workflows/test-package.yml @@ -23,7 +23,7 @@ jobs: sdk: [dev] steps: - uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 - - uses: dart-lang/setup-dart@a57a6c04cf7d4840e88432aad6281d1e125f0d46 + - uses: dart-lang/setup-dart@d6a63dab3335f427404425de0fbfed4686d93c4f with: sdk: ${{ matrix.sdk }} - id: install @@ -50,7 +50,7 @@ jobs: sdk: [2.19.0, dev] steps: - uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 - - uses: dart-lang/setup-dart@a57a6c04cf7d4840e88432aad6281d1e125f0d46 + - uses: dart-lang/setup-dart@d6a63dab3335f427404425de0fbfed4686d93c4f with: sdk: ${{ matrix.sdk }} - id: install From 24c4e7b7fb459fc6f0d57321adeabb447f66fd01 Mon Sep 17 00:00:00 2001 From: Jonas Finnemann Jensen Date: Thu, 4 May 2023 14:33:07 +0200 Subject: [PATCH 211/239] Prepare release with topics (dart-lang/gcloud#160) --- pkgs/gcloud/CHANGELOG.md | 3 ++- pkgs/gcloud/pubspec.yaml | 5 ++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 9d924b0b..9c4e9fe6 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,6 +1,7 @@ -## 0.8.8-dev +## 0.8.8 - Require Dart 2.19 +- Add topics in `pubspec.yaml`. ## 0.8.7 diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index b3023ccd..a0b3a971 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,8 +1,11 @@ name: gcloud -version: 0.8.8-dev +version: 0.8.8 description: >- High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. repository: https://github.com/dart-lang/gcloud +topics: + - cloud + - gcp environment: sdk: '>=2.19.0 <3.0.0' From ddf76d5db9e2149989e8886373811fb242bbe309 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 May 2023 12:34:47 -0700 Subject: [PATCH 212/239] Bump actions/checkout from 3.5.0 to 3.5.2 (dart-lang/gcloud#159) Bumps [actions/checkout](https://github.com/actions/checkout) from 3.5.0 to 3.5.2. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/8f4b7f84864484a7bf31766abe9204da3cbe65b3...8e5e7e5ab8b370d6c329ec480221332ada57f0ab) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pkgs/gcloud/.github/workflows/test-package.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/.github/workflows/test-package.yml b/pkgs/gcloud/.github/workflows/test-package.yml index d308d77c..fe279551 100644 --- a/pkgs/gcloud/.github/workflows/test-package.yml +++ b/pkgs/gcloud/.github/workflows/test-package.yml @@ -22,7 +22,7 @@ jobs: matrix: sdk: [dev] steps: - - uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 + - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab - uses: dart-lang/setup-dart@d6a63dab3335f427404425de0fbfed4686d93c4f with: sdk: ${{ matrix.sdk }} @@ -49,7 +49,7 @@ jobs: os: [ubuntu-latest] sdk: [2.19.0, dev] steps: - - uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 + - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab - uses: dart-lang/setup-dart@d6a63dab3335f427404425de0fbfed4686d93c4f with: sdk: ${{ matrix.sdk }} From 597fe308faef2cadfe3e51221d8db3e652b0f162 Mon Sep 17 00:00:00 2001 From: Devon Carew Date: Wed, 17 May 2023 10:40:34 -0700 Subject: [PATCH 213/239] blast_repo fixes (dart-lang/gcloud#161) dependabot --- pkgs/gcloud/.github/dependabot.yml | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/pkgs/gcloud/.github/dependabot.yml b/pkgs/gcloud/.github/dependabot.yml index 1603cdd9..725f03af 100644 --- a/pkgs/gcloud/.github/dependabot.yml +++ b/pkgs/gcloud/.github/dependabot.yml @@ -3,7 +3,9 @@ version: 2 updates: - - package-ecosystem: "github-actions" - directory: "/" + - package-ecosystem: github-actions + directory: / schedule: - interval: "monthly" + interval: monthly + labels: + - autosubmit From b9716a1ea1091b31b0ebb2edb668602f19f6f102 Mon Sep 17 00:00:00 2001 From: Devon Carew Date: Wed, 21 Jun 2023 12:49:39 -0700 Subject: [PATCH 214/239] blast repo changes: auto-publish (dart-lang/gcloud#165) * blast_repo fixes auto-publish * add two exclusions for unreachable_from_main --- pkgs/gcloud/.github/workflows/publish.yaml | 14 ++++++++++++++ pkgs/gcloud/test/db/db_test.dart | 2 ++ pkgs/gcloud/test/db/properties_test.dart | 2 +- 3 files changed, 17 insertions(+), 1 deletion(-) create mode 100644 pkgs/gcloud/.github/workflows/publish.yaml diff --git a/pkgs/gcloud/.github/workflows/publish.yaml b/pkgs/gcloud/.github/workflows/publish.yaml new file mode 100644 index 00000000..2239b63d --- /dev/null +++ b/pkgs/gcloud/.github/workflows/publish.yaml @@ -0,0 +1,14 @@ +# A CI configuration to auto-publish pub packages. + +name: Publish + +on: + pull_request: + branches: [ master ] + push: + tags: [ 'v[0-9]+.[0-9]+.[0-9]+' ] + +jobs: + publish: + if: ${{ github.repository_owner == 'dart-lang' }} + uses: dart-lang/ecosystem/.github/workflows/publish.yaml@main diff --git a/pkgs/gcloud/test/db/db_test.dart b/pkgs/gcloud/test/db/db_test.dart index d7b18b14..6039ab30 100644 --- a/pkgs/gcloud/test/db/db_test.dart +++ b/pkgs/gcloud/test/db/db_test.dart @@ -2,6 +2,8 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +// ignore_for_file: unreachable_from_main + import 'dart:mirrors' show reflectClass; import 'package:gcloud/datastore.dart' as datastore; diff --git a/pkgs/gcloud/test/db/properties_test.dart b/pkgs/gcloud/test/db/properties_test.dart index f2629c5a..25a7145f 100644 --- a/pkgs/gcloud/test/db/properties_test.dart +++ b/pkgs/gcloud/test/db/properties_test.dart @@ -2,7 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -// ignore_for_file: only_throw_errors +// ignore_for_file: only_throw_errors, unreachable_from_main import 'dart:typed_data'; From f873d209faae20c2e8b4276b6538d0f79a2a394f Mon Sep 17 00:00:00 2001 From: Felix Angelov Date: Wed, 21 Jun 2023 14:56:28 -0500 Subject: [PATCH 215/239] allow latest pkg:http and pkg:googleapis (dart-lang/gcloud#164) * allow latest pkg:http * allow latest pkg:googleapis * chore: bump version and add CHANGELOG entry --- pkgs/gcloud/CHANGELOG.md | 5 +++++ pkgs/gcloud/pubspec.yaml | 6 +++--- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 9c4e9fe6..a93af8f9 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,8 @@ +## 0.8.9 + +- Support the latest version 1.0.0 of the `http` package. +- Support the latest version 12.0.0 of the `googleapis` package. + ## 0.8.8 - Require Dart 2.19 diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index a0b3a971..676df66b 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.8.8 +version: 0.8.9 description: >- High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. repository: https://github.com/dart-lang/gcloud @@ -12,8 +12,8 @@ environment: dependencies: _discoveryapis_commons: ^1.0.0 - googleapis: '>=3.0.0 <11.0.0' - http: ^0.13.0 + googleapis: '>=3.0.0 <12.0.0' + http: '>=0.13.5 <2.0.0' meta: ^1.3.0 dev_dependencies: From 02066dd6d088c1882fcf38073af17831cc900fc5 Mon Sep 17 00:00:00 2001 From: Devon Carew Date: Wed, 21 Jun 2023 13:36:01 -0700 Subject: [PATCH 216/239] widen the sdk constraint (dart-lang/gcloud#166) --- pkgs/gcloud/CHANGELOG.md | 4 ++++ pkgs/gcloud/README.md | 12 +++++++++--- pkgs/gcloud/pubspec.yaml | 5 +++-- 3 files changed, 16 insertions(+), 5 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index a93af8f9..2efd48dc 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.8.10-wip + +- Widen the SDK constraint to support Dart 3.0 + ## 0.8.9 - Support the latest version 1.0.0 of the `http` package. diff --git a/pkgs/gcloud/README.md b/pkgs/gcloud/README.md index 7294b281..779f7d0c 100644 --- a/pkgs/gcloud/README.md +++ b/pkgs/gcloud/README.md @@ -1,8 +1,13 @@ +[![Dart CI](https://github.com/dart-lang/gcloud/actions/workflows/test-package.yml/badge.svg)](https://github.com/dart-lang/gcloud/actions/workflows/test-package.yml) +[![pub package](https://img.shields.io/pub/v/gcloud.svg)](https://pub.dev/packages/gcloud) +[![package publisher](https://img.shields.io/pub/publisher/gcloud.svg)](https://pub.dev/packages/gcloud/publisher) + +The `gcloud` package provides a high level idiomatic Dart interface to some of +the most widely used Google Cloud Platform services. + ## Google Cloud Platform support package (gcloud) -The `gcloud` package provides a high level "idiomatic Dart" interface to -some of the most widely used Google Cloud Platform services. Currently the -following services are supported: +Currently the following services are supported: * Cloud Datastore * Cloud Storage @@ -40,6 +45,7 @@ the following imports are present: ```dart import 'dart:io'; + import 'package:googleapis_auth/auth_io.dart' as auth; import 'package:http/http.dart' as http; import 'package:gcloud/db.dart'; diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 676df66b..e5e48a3f 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,14 +1,15 @@ name: gcloud -version: 0.8.9 +version: 0.8.10-wip description: >- High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. repository: https://github.com/dart-lang/gcloud + topics: - cloud - gcp environment: - sdk: '>=2.19.0 <3.0.0' + sdk: '>=2.19.0 <4.0.0' dependencies: _discoveryapis_commons: ^1.0.0 From bdd42446fe2db6d5526ee72d47b44c33281fab10 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 1 Jul 2023 01:16:20 +0000 Subject: [PATCH 217/239] Bump actions/checkout from 3.5.2 to 3.5.3 (dart-lang/gcloud#167) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/checkout](https://github.com/actions/checkout) from 3.5.2 to 3.5.3.
Release notes

Sourced from actions/checkout's releases.

v3.5.3

What's Changed

New Contributors

Full Changelog: https://github.com/actions/checkout/compare/v3...v3.5.3

Changelog

Sourced from actions/checkout's changelog.

Changelog

v3.5.3

v3.5.2

v3.5.1

v3.5.0

v3.4.0

v3.3.0

v3.2.0

v3.1.0

v3.0.2

v3.0.1

v3.0.0

v2.3.1

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/checkout&package-manager=github_actions&previous-version=3.5.2&new-version=3.5.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- pkgs/gcloud/.github/workflows/test-package.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/.github/workflows/test-package.yml b/pkgs/gcloud/.github/workflows/test-package.yml index fe279551..0fdcc690 100644 --- a/pkgs/gcloud/.github/workflows/test-package.yml +++ b/pkgs/gcloud/.github/workflows/test-package.yml @@ -22,7 +22,7 @@ jobs: matrix: sdk: [dev] steps: - - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab + - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 - uses: dart-lang/setup-dart@d6a63dab3335f427404425de0fbfed4686d93c4f with: sdk: ${{ matrix.sdk }} @@ -49,7 +49,7 @@ jobs: os: [ubuntu-latest] sdk: [2.19.0, dev] steps: - - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab + - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 - uses: dart-lang/setup-dart@d6a63dab3335f427404425de0fbfed4686d93c4f with: sdk: ${{ matrix.sdk }} From 0b7c4bbfbfecb4fe86063a1edb3446a097e2a9e3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20So=C3=B3s?= Date: Fri, 21 Jul 2023 13:40:47 +0200 Subject: [PATCH 218/239] Support retrying Datastore operations. (dart-lang/gcloud#168) * Support retrying Datastore operations. * Expose only maxAttempts * add more errors --- pkgs/gcloud/CHANGELOG.md | 3 +- pkgs/gcloud/lib/datastore.dart | 18 ++ pkgs/gcloud/lib/src/retry_datastore_impl.dart | 159 ++++++++++++++++++ pkgs/gcloud/pubspec.yaml | 3 +- pkgs/gcloud/test/db_all_e2e_test.dart | 6 +- 5 files changed, 185 insertions(+), 4 deletions(-) create mode 100644 pkgs/gcloud/lib/src/retry_datastore_impl.dart diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 2efd48dc..bb604150 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,6 +1,7 @@ -## 0.8.10-wip +## 0.8.10 - Widen the SDK constraint to support Dart 3.0 +- Support retrying Datastore operations. ## 0.8.9 diff --git a/pkgs/gcloud/lib/datastore.dart b/pkgs/gcloud/lib/datastore.dart index 3b05eeae..53ca4eb2 100644 --- a/pkgs/gcloud/lib/datastore.dart +++ b/pkgs/gcloud/lib/datastore.dart @@ -12,10 +12,12 @@ library; import 'dart:async'; import 'package:http/http.dart' as http; +import 'package:retry/retry.dart'; import 'common.dart' show Page; import 'service_scope.dart' as ss; import 'src/datastore_impl.dart' show DatastoreImpl; +import 'src/retry_datastore_impl.dart'; const Symbol _datastoreKey = #gcloud.datastore; @@ -391,6 +393,22 @@ abstract class Datastore { return DatastoreImpl(client, project); } + /// Retry Datastore operations where the issue seems to be transient. + /// + /// The [delegate] is the configured [Datastore] implementation that will be + /// used. + /// + /// The operations will be retried at maximum of [maxAttempts]. + factory Datastore.withRetry( + Datastore delegate, { + int? maxAttempts, + }) { + return RetryDatastoreImpl( + delegate, + RetryOptions(maxAttempts: maxAttempts ?? 3), + ); + } + /// Allocate integer IDs for the partially populated [keys] given as argument. /// /// The returned [Key]s will be fully populated with the allocated IDs. diff --git a/pkgs/gcloud/lib/src/retry_datastore_impl.dart b/pkgs/gcloud/lib/src/retry_datastore_impl.dart new file mode 100644 index 00000000..e57410cf --- /dev/null +++ b/pkgs/gcloud/lib/src/retry_datastore_impl.dart @@ -0,0 +1,159 @@ +// Copyright (c) 2023, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'package:retry/retry.dart'; + +import '../common.dart'; +import '../datastore.dart' as datastore; + +/// Datastore implementation which retries most operations +class RetryDatastoreImpl implements datastore.Datastore { + final datastore.Datastore _delegate; + final RetryOptions _retryOptions; + + RetryDatastoreImpl(this._delegate, this._retryOptions); + + @override + Future> allocateIds(List keys) async { + return await _retryOptions.retry( + () => _delegate.allocateIds(keys), + retryIf: _retryIf, + ); + } + + @override + Future beginTransaction({ + bool crossEntityGroup = false, + }) async { + return await _retryOptions.retry( + () => _delegate.beginTransaction(crossEntityGroup: crossEntityGroup), + retryIf: _retryIf, + ); + } + + @override + Future commit({ + List inserts = const [], + List autoIdInserts = const [], + List deletes = const [], + datastore.Transaction? transaction, + }) async { + Future fn() async { + if (transaction == null) { + return await _delegate.commit( + inserts: inserts, + autoIdInserts: autoIdInserts, + deletes: deletes, + ); + } else { + return await _delegate.commit( + inserts: inserts, + autoIdInserts: autoIdInserts, + deletes: deletes, + transaction: transaction, + ); + } + } + + final shouldNotRetry = autoIdInserts.isNotEmpty && transaction == null; + if (shouldNotRetry) { + return await fn(); + } else { + return await _retryOptions.retry(fn, retryIf: _retryIf); + } + } + + @override + Future> lookup( + List keys, { + datastore.Transaction? transaction, + }) async { + return await _retryOptions.retry( + () async { + if (transaction == null) { + return await _delegate.lookup(keys); + } else { + return await _delegate.lookup(keys, transaction: transaction); + } + }, + retryIf: _retryIf, + ); + } + + @override + Future> query( + datastore.Query query, { + datastore.Partition? partition, + datastore.Transaction? transaction, + }) async { + Future> fn() async { + if (partition != null && transaction != null) { + return await _delegate.query( + query, + partition: partition, + transaction: transaction, + ); + } else if (partition != null) { + return await _delegate.query(query, partition: partition); + } else if (transaction != null) { + return await _delegate.query( + query, + transaction: transaction, + ); + } else { + return await _delegate.query(query); + } + } + + return await _retryOptions.retry( + () async => _RetryPage(await fn(), _retryOptions), + retryIf: _retryIf, + ); + } + + @override + Future rollback(datastore.Transaction transaction) async { + return await _retryOptions.retry( + () => _delegate.rollback(transaction), + retryIf: _retryIf, + ); + } +} + +class _RetryPage implements Page { + final Page _delegate; + final RetryOptions _retryOptions; + + _RetryPage(this._delegate, this._retryOptions); + + @override + bool get isLast => _delegate.isLast; + + @override + List get items => _delegate.items; + + @override + Future> next({int? pageSize}) async { + return await _retryOptions.retry( + () async { + if (pageSize == null) { + return await _delegate.next(); + } else { + return await _delegate.next(pageSize: pageSize); + } + }, + retryIf: _retryIf, + ); + } +} + +bool _retryIf(Exception e) { + if (e is datastore.TransactionAbortedError || + e is datastore.NeedIndexError || + e is datastore.QuotaExceededError || + e is datastore.PermissionDeniedError) { + return false; + } + return true; +} diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index e5e48a3f..7a58f69e 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.8.10-wip +version: 0.8.10 description: >- High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. repository: https://github.com/dart-lang/gcloud @@ -16,6 +16,7 @@ dependencies: googleapis: '>=3.0.0 <12.0.0' http: '>=0.13.5 <2.0.0' meta: ^1.3.0 + retry: ^3.1.1 dev_dependencies: dart_flutter_team_lints: ^1.0.0 diff --git a/pkgs/gcloud/test/db_all_e2e_test.dart b/pkgs/gcloud/test/db_all_e2e_test.dart index f66f5156..af37670f 100644 --- a/pkgs/gcloud/test/db_all_e2e_test.dart +++ b/pkgs/gcloud/test/db_all_e2e_test.dart @@ -10,6 +10,7 @@ library gcloud.test.db_all_test; import 'dart:async'; import 'dart:io'; +import 'package:gcloud/datastore.dart'; import 'package:gcloud/db.dart' as db; import 'package:gcloud/src/datastore_impl.dart' as datastore_impl; import 'package:http/http.dart'; @@ -25,12 +26,13 @@ Future main() async { var now = DateTime.now().millisecondsSinceEpoch; var namespace = '${Platform.operatingSystem}$now'; - late datastore_impl.DatastoreImpl datastore; + late Datastore datastore; late db.DatastoreDB datastoreDB; Client? client; await withAuthClient(scopes, (String project, httpClient) async { - datastore = datastore_impl.DatastoreImpl(httpClient, project); + datastore = + Datastore.withRetry(datastore_impl.DatastoreImpl(httpClient, project)); datastoreDB = db.DatastoreDB(datastore); client = httpClient; }); From 1652eefda349da2156855d252570c7d0f5958532 Mon Sep 17 00:00:00 2001 From: Jonas Finnemann Jensen Date: Thu, 10 Aug 2023 15:43:14 +0200 Subject: [PATCH 219/239] Fix missing retries in Page.next (dart-lang/gcloud#170) * Fix missing retries in Page.next * Ignore deprecated usage * Propogate deprecation message --- pkgs/gcloud/CHANGELOG.md | 7 +++++++ pkgs/gcloud/lib/src/pubsub_impl.dart | 6 +++++- pkgs/gcloud/lib/src/retry_datastore_impl.dart | 3 ++- pkgs/gcloud/pubspec.yaml | 2 +- 4 files changed, 15 insertions(+), 3 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index bb604150..a0466bf8 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,10 @@ +## 0.8.11 +- After the first `Page` created by `Datastore.withRetry()` retries were not + happening. This is now fixed, ensuring that `Page.next()` will always retry + when `Datastore` is wrapped with `Datastore.withRetry()`. +- Calling with `wait: false` in `Subscription.pull(wait: false)` for `PubSub` + have been deprecated. + ## 0.8.10 - Widen the SDK constraint to support Dart 3.0 diff --git a/pkgs/gcloud/lib/src/pubsub_impl.dart b/pkgs/gcloud/lib/src/pubsub_impl.dart index ce491b18..cfdb3aca 100644 --- a/pkgs/gcloud/lib/src/pubsub_impl.dart +++ b/pkgs/gcloud/lib/src/pubsub_impl.dart @@ -97,6 +97,7 @@ class _PubSubImpl implements PubSub { String subscription, bool returnImmediately) { var request = pubsub.PullRequest() ..maxMessages = 1 + // ignore: deprecated_member_use ..returnImmediately = returnImmediately; return _api.projects.subscriptions.pull(request, subscription); } @@ -428,7 +429,10 @@ class _SubscriptionImpl implements Subscription { Future delete() => _api._deleteSubscription(_subscription.name!); @override - Future pull({bool wait = true}) { + Future pull({ + @Deprecated('returnImmediately has been deprecated from pubsub') + bool wait = true, + }) { return _api._pull(_subscription.name!, !wait).then((response) { // The documentation says 'Returns an empty list if there are no // messages available in the backlog'. However the receivedMessages diff --git a/pkgs/gcloud/lib/src/retry_datastore_impl.dart b/pkgs/gcloud/lib/src/retry_datastore_impl.dart index e57410cf..72b75277 100644 --- a/pkgs/gcloud/lib/src/retry_datastore_impl.dart +++ b/pkgs/gcloud/lib/src/retry_datastore_impl.dart @@ -135,7 +135,7 @@ class _RetryPage implements Page { @override Future> next({int? pageSize}) async { - return await _retryOptions.retry( + final nextPage = await _retryOptions.retry( () async { if (pageSize == null) { return await _delegate.next(); @@ -145,6 +145,7 @@ class _RetryPage implements Page { }, retryIf: _retryIf, ); + return _RetryPage(nextPage, _retryOptions); } } diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 7a58f69e..aa121930 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.8.10 +version: 0.8.11 description: >- High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. repository: https://github.com/dart-lang/gcloud From cb792d843240455b7059e2d7d5cb586a62644809 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Sep 2023 01:17:06 +0000 Subject: [PATCH 220/239] Bump actions/checkout from 3.5.3 to 3.6.0 (dart-lang/gcloud#171) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/checkout](https://github.com/actions/checkout) from 3.5.3 to 3.6.0.
Release notes

Sourced from actions/checkout's releases.

v3.6.0

What's Changed

New Contributors

Full Changelog: https://github.com/actions/checkout/compare/v3.5.3...v3.6.0

Changelog

Sourced from actions/checkout's changelog.

Changelog

v3.6.0

v3.5.3

v3.5.2

v3.5.1

v3.5.0

v3.4.0

v3.3.0

v3.2.0

v3.1.0

v3.0.2

v3.0.1

v3.0.0

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/checkout&package-manager=github_actions&previous-version=3.5.3&new-version=3.6.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- pkgs/gcloud/.github/workflows/test-package.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/.github/workflows/test-package.yml b/pkgs/gcloud/.github/workflows/test-package.yml index 0fdcc690..ff7bf6fa 100644 --- a/pkgs/gcloud/.github/workflows/test-package.yml +++ b/pkgs/gcloud/.github/workflows/test-package.yml @@ -22,7 +22,7 @@ jobs: matrix: sdk: [dev] steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 - uses: dart-lang/setup-dart@d6a63dab3335f427404425de0fbfed4686d93c4f with: sdk: ${{ matrix.sdk }} @@ -49,7 +49,7 @@ jobs: os: [ubuntu-latest] sdk: [2.19.0, dev] steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 - uses: dart-lang/setup-dart@d6a63dab3335f427404425de0fbfed4686d93c4f with: sdk: ${{ matrix.sdk }} From 3c455ca0c6e8c1a6b5593d49bd47fe23e659d8b6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 1 Oct 2023 01:54:20 +0000 Subject: [PATCH 221/239] Bump dart-lang/setup-dart from 1.5.0 to 1.5.1 (dart-lang/gcloud#173) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [dart-lang/setup-dart](https://github.com/dart-lang/setup-dart) from 1.5.0 to 1.5.1.
Release notes

Sourced from dart-lang/setup-dart's releases.

v1.5.1

  • No longer test the setup-dart action on pre-2.12 SDKs.
  • Upgrade JS interop code to use extension types (the new name for inline classes).
  • The upcoming rename of the be channel to main is now supported with forward compatibility that switches when the rename happens.
Changelog

Sourced from dart-lang/setup-dart's changelog.

v1.6.0

  • Enable provisioning of the latest Dart SDK patch release by specifying just the major and minor version (e.g. 3.2).

v1.5.1

  • No longer test the setup-dart action on pre-2.12 SDKs.
  • Upgrade JS interop code to use extension types (the new name for inline classes).
  • The upcoming rename of the be channel to main is now supported with forward compatibility that switches when the rename happens.

v1.5.0

  • Re-wrote the implementation of the action into Dart.
  • Auto-detect the platform architecture (x64, ia32, arm, arm64).
  • Improved the caching and download resilience of the sdk.
  • Added a new action output: dart-version - the installed version of the sdk.

v1.4.0

  • Automatically create OIDC token for pub.dev.
  • Add a reusable workflow for publishing.

v1.3.0

  • The install location of the Dart SDK is now available in an environment variable, DART_HOME (dart-lang/gcloud#43).
  • Fixed an issue where cached downloads could lead to unzip issues on self-hosted runners (dart-lang/gcloud#35).

v1.2.0

  • Fixed a path issue impacting git dependencies on Windows.

v1.1.0

  • Added a flavor option setup.sh to allow downloading unpublished builds.

v1.0.0

  • Promoted to 1.0 stable.

v0.5

  • Fixed a Windows pub global activate path issue.

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=dart-lang/setup-dart&package-manager=github_actions&previous-version=1.5.0&new-version=1.5.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- pkgs/gcloud/.github/workflows/test-package.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/.github/workflows/test-package.yml b/pkgs/gcloud/.github/workflows/test-package.yml index ff7bf6fa..76575833 100644 --- a/pkgs/gcloud/.github/workflows/test-package.yml +++ b/pkgs/gcloud/.github/workflows/test-package.yml @@ -23,7 +23,7 @@ jobs: sdk: [dev] steps: - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 - - uses: dart-lang/setup-dart@d6a63dab3335f427404425de0fbfed4686d93c4f + - uses: dart-lang/setup-dart@8a4b97ea2017cc079571daec46542f76189836b1 with: sdk: ${{ matrix.sdk }} - id: install @@ -50,7 +50,7 @@ jobs: sdk: [2.19.0, dev] steps: - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 - - uses: dart-lang/setup-dart@d6a63dab3335f427404425de0fbfed4686d93c4f + - uses: dart-lang/setup-dart@8a4b97ea2017cc079571daec46542f76189836b1 with: sdk: ${{ matrix.sdk }} - id: install From 6c26969bbb2c6217c1bc86586411a796a484d4cd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 3 Oct 2023 07:15:13 -0700 Subject: [PATCH 222/239] Bump actions/checkout from 3.6.0 to 4.1.0 (dart-lang/gcloud#172) Bumps [actions/checkout](https://github.com/actions/checkout) from 3.6.0 to 4.1.0. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/f43a0e5ff2bd294095638e18286ca9a3d1956744...8ade135a41bc03ea155e62e844d188df1ea18608) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pkgs/gcloud/.github/workflows/test-package.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/.github/workflows/test-package.yml b/pkgs/gcloud/.github/workflows/test-package.yml index 76575833..abd1fc8d 100644 --- a/pkgs/gcloud/.github/workflows/test-package.yml +++ b/pkgs/gcloud/.github/workflows/test-package.yml @@ -22,7 +22,7 @@ jobs: matrix: sdk: [dev] steps: - - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 + - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 - uses: dart-lang/setup-dart@8a4b97ea2017cc079571daec46542f76189836b1 with: sdk: ${{ matrix.sdk }} @@ -49,7 +49,7 @@ jobs: os: [ubuntu-latest] sdk: [2.19.0, dev] steps: - - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 + - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 - uses: dart-lang/setup-dart@8a4b97ea2017cc079571daec46542f76189836b1 with: sdk: ${{ matrix.sdk }} From a4bd14bca3081ec24b8898e9692c2408a2845c38 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 1 Nov 2023 01:05:18 +0000 Subject: [PATCH 223/239] Bump actions/checkout from 4.1.0 to 4.1.1 (dart-lang/gcloud#175) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/checkout](https://github.com/actions/checkout) from 4.1.0 to 4.1.1.
Release notes

Sourced from actions/checkout's releases.

v4.1.1

What's Changed

New Contributors

Full Changelog: https://github.com/actions/checkout/compare/v4.1.0...v4.1.1

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/checkout&package-manager=github_actions&previous-version=4.1.0&new-version=4.1.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- pkgs/gcloud/.github/workflows/test-package.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/.github/workflows/test-package.yml b/pkgs/gcloud/.github/workflows/test-package.yml index abd1fc8d..1d3dc520 100644 --- a/pkgs/gcloud/.github/workflows/test-package.yml +++ b/pkgs/gcloud/.github/workflows/test-package.yml @@ -22,7 +22,7 @@ jobs: matrix: sdk: [dev] steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 - uses: dart-lang/setup-dart@8a4b97ea2017cc079571daec46542f76189836b1 with: sdk: ${{ matrix.sdk }} @@ -49,7 +49,7 @@ jobs: os: [ubuntu-latest] sdk: [2.19.0, dev] steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 - uses: dart-lang/setup-dart@8a4b97ea2017cc079571daec46542f76189836b1 with: sdk: ${{ matrix.sdk }} From e70fb399a87e1f2d4c391e482675d2cf2f0f4898 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 1 Nov 2023 01:07:58 +0000 Subject: [PATCH 224/239] Bump dart-lang/setup-dart from 1.5.1 to 1.6.0 (dart-lang/gcloud#176) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ⚠️ **Dependabot is rebasing this PR** ⚠️ Rebasing might not happen immediately, so don't worry if this takes some time. Note: if you make any changes to this PR yourself, they will take precedence over the rebase. --- Bumps [dart-lang/setup-dart](https://github.com/dart-lang/setup-dart) from 1.5.1 to 1.6.0.
Release notes

Sourced from dart-lang/setup-dart's releases.

v1.6.0

  • Enable provisioning of the latest Dart SDK patch release by specifying just the major and minor version (e.g. 3.2).
Changelog

Sourced from dart-lang/setup-dart's changelog.

v1.6.0

  • Enable provisioning of the latest Dart SDK patch release by specifying just the major and minor version (e.g. 3.2).

v1.5.1

  • No longer test the setup-dart action on pre-2.12 SDKs.
  • Upgrade JS interop code to use extension types (the new name for inline classes).
  • The upcoming rename of the be channel to main is now supported with forward compatibility that switches when the rename happens.

v1.5.0

  • Re-wrote the implementation of the action into Dart.
  • Auto-detect the platform architecture (x64, ia32, arm, arm64).
  • Improved the caching and download resilience of the sdk.
  • Added a new action output: dart-version - the installed version of the sdk.

v1.4.0

  • Automatically create OIDC token for pub.dev.
  • Add a reusable workflow for publishing.

v1.3.0

  • The install location of the Dart SDK is now available in an environment variable, DART_HOME (dart-lang/gcloud#43).
  • Fixed an issue where cached downloads could lead to unzip issues on self-hosted runners (dart-lang/gcloud#35).

v1.2.0

  • Fixed a path issue impacting git dependencies on Windows.

v1.1.0

  • Added a flavor option setup.sh to allow downloading unpublished builds.

v1.0.0

  • Promoted to 1.0 stable.

v0.5

  • Fixed a Windows pub global activate path issue.

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=dart-lang/setup-dart&package-manager=github_actions&previous-version=1.5.1&new-version=1.6.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- pkgs/gcloud/.github/workflows/test-package.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/.github/workflows/test-package.yml b/pkgs/gcloud/.github/workflows/test-package.yml index 1d3dc520..91fbd409 100644 --- a/pkgs/gcloud/.github/workflows/test-package.yml +++ b/pkgs/gcloud/.github/workflows/test-package.yml @@ -23,7 +23,7 @@ jobs: sdk: [dev] steps: - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 - - uses: dart-lang/setup-dart@8a4b97ea2017cc079571daec46542f76189836b1 + - uses: dart-lang/setup-dart@b64355ae6ca0b5d484f0106a033dd1388965d06d with: sdk: ${{ matrix.sdk }} - id: install @@ -50,7 +50,7 @@ jobs: sdk: [2.19.0, dev] steps: - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 - - uses: dart-lang/setup-dart@8a4b97ea2017cc079571daec46542f76189836b1 + - uses: dart-lang/setup-dart@b64355ae6ca0b5d484f0106a033dd1388965d06d with: sdk: ${{ matrix.sdk }} - id: install From 6bbb1e29f22985164d997bd0d949424814256b08 Mon Sep 17 00:00:00 2001 From: Jonas Finnemann Jensen Date: Wed, 10 Jan 2024 14:04:40 +0100 Subject: [PATCH 225/239] Extend version constraint to allow googleapis 12.x (dart-lang/gcloud#179) Fix dart-lang/gcloud#178 --- pkgs/gcloud/CHANGELOG.md | 3 +++ pkgs/gcloud/pubspec.yaml | 4 ++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index a0466bf8..7612b0e2 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,6 @@ +## 0.8.12 +- Support the latest version 12.0.0 of the `googleapis` package. + ## 0.8.11 - After the first `Page` created by `Datastore.withRetry()` retries were not happening. This is now fixed, ensuring that `Page.next()` will always retry diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index aa121930..b02ccb4f 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.8.11 +version: 0.8.12 description: >- High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. repository: https://github.com/dart-lang/gcloud @@ -13,7 +13,7 @@ environment: dependencies: _discoveryapis_commons: ^1.0.0 - googleapis: '>=3.0.0 <12.0.0' + googleapis: '>=3.0.0 <13.0.0' http: '>=0.13.5 <2.0.0' meta: ^1.3.0 retry: ^3.1.1 From f3249e1b151c45af366813af33771e7aa8a09dc6 Mon Sep 17 00:00:00 2001 From: jarrodcolburn Date: Wed, 10 Jan 2024 07:08:21 -0600 Subject: [PATCH 226/239] type (dart-lang/gcloud#177) link was to `http` (not `gcloud`) issues/bug tracker --- pkgs/gcloud/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/gcloud/README.md b/pkgs/gcloud/README.md index 779f7d0c..e5abc850 100644 --- a/pkgs/gcloud/README.md +++ b/pkgs/gcloud/README.md @@ -26,7 +26,7 @@ much higher expected rate of API and breaking changes. Your feedback is valuable and will help us evolve this package. For general feedback, suggestions, and comments, please file an issue in the -[bug tracker](https://github.com/dart-lang/http/issues). +[bug tracker](https://github.com/dart-lang/gcloud/issues). ## API details From 20b889dbea28e0ea8eaaaed0660eb88395e0d14e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 1 Feb 2024 01:34:10 +0000 Subject: [PATCH 227/239] Bump dart-lang/setup-dart from 1.6.0 to 1.6.2 (dart-lang/gcloud#180) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [dart-lang/setup-dart](https://github.com/dart-lang/setup-dart) from 1.6.0 to 1.6.2.
Release notes

Sourced from dart-lang/setup-dart's releases.

v1.6.2

v1.6.1

  • Updated the google storage url for main channel releases.
Changelog

Sourced from dart-lang/setup-dart's changelog.

v1.6.2

v1.6.1

  • Updated the google storage url for main channel releases.

v1.6.0

  • Enable provisioning of the latest Dart SDK patch release by specifying just the major and minor version (e.g. 3.2).

v1.5.1

  • No longer test the setup-dart action on pre-2.12 SDKs.
  • Upgrade JS interop code to use extension types (the new name for inline classes).
  • The upcoming rename of the be channel to main is now supported with forward compatibility that switches when the rename happens.

v1.5.0

  • Re-wrote the implementation of the action into Dart.
  • Auto-detect the platform architecture (x64, ia32, arm, arm64).
  • Improved the caching and download resilience of the sdk.
  • Added a new action output: dart-version - the installed version of the sdk.

v1.4.0

  • Automatically create OIDC token for pub.dev.
  • Add a reusable workflow for publishing.

v1.3.0

  • The install location of the Dart SDK is now available in an environment variable, DART_HOME (dart-lang/gcloud#43).
  • Fixed an issue where cached downloads could lead to unzip issues on self-hosted runners (dart-lang/gcloud#35).

v1.2.0

  • Fixed a path issue impacting git dependencies on Windows.

v1.1.0

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=dart-lang/setup-dart&package-manager=github_actions&previous-version=1.6.0&new-version=1.6.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- pkgs/gcloud/.github/workflows/test-package.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/.github/workflows/test-package.yml b/pkgs/gcloud/.github/workflows/test-package.yml index 91fbd409..32563899 100644 --- a/pkgs/gcloud/.github/workflows/test-package.yml +++ b/pkgs/gcloud/.github/workflows/test-package.yml @@ -23,7 +23,7 @@ jobs: sdk: [dev] steps: - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 - - uses: dart-lang/setup-dart@b64355ae6ca0b5d484f0106a033dd1388965d06d + - uses: dart-lang/setup-dart@fedb1266e91cf51be2fdb382869461a434b920a3 with: sdk: ${{ matrix.sdk }} - id: install @@ -50,7 +50,7 @@ jobs: sdk: [2.19.0, dev] steps: - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 - - uses: dart-lang/setup-dart@b64355ae6ca0b5d484f0106a033dd1388965d06d + - uses: dart-lang/setup-dart@fedb1266e91cf51be2fdb382869461a434b920a3 with: sdk: ${{ matrix.sdk }} - id: install From 197e5a1c373aa6718a0af69ee30bfb86dd864393 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Apr 2024 01:28:51 +0000 Subject: [PATCH 228/239] Bump actions/checkout from 4.1.1 to 4.1.2 (dart-lang/gcloud#183) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/checkout](https://github.com/actions/checkout) from 4.1.1 to 4.1.2.
Release notes

Sourced from actions/checkout's releases.

v4.1.2

We are investigating the following issue with this release and have rolled-back the v4 tag to point to v4.1.1

What's Changed

New Contributors

Full Changelog: https://github.com/actions/checkout/compare/v4.1.1...v4.1.2

Changelog

Sourced from actions/checkout's changelog.

Changelog

v4.1.2

v4.1.1

v4.1.0

v4.0.0

v3.6.0

v3.5.3

v3.5.2

v3.5.1

v3.5.0

v3.4.0

v3.3.0

v3.2.0

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/checkout&package-manager=github_actions&previous-version=4.1.1&new-version=4.1.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- pkgs/gcloud/.github/workflows/test-package.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/.github/workflows/test-package.yml b/pkgs/gcloud/.github/workflows/test-package.yml index 32563899..a01355a3 100644 --- a/pkgs/gcloud/.github/workflows/test-package.yml +++ b/pkgs/gcloud/.github/workflows/test-package.yml @@ -22,7 +22,7 @@ jobs: matrix: sdk: [dev] steps: - - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 + - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 - uses: dart-lang/setup-dart@fedb1266e91cf51be2fdb382869461a434b920a3 with: sdk: ${{ matrix.sdk }} @@ -49,7 +49,7 @@ jobs: os: [ubuntu-latest] sdk: [2.19.0, dev] steps: - - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 + - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 - uses: dart-lang/setup-dart@fedb1266e91cf51be2fdb382869461a434b920a3 with: sdk: ${{ matrix.sdk }} From 9171859219029bab6bf12ae73470fcecdf56545e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 1 May 2024 01:27:23 +0000 Subject: [PATCH 229/239] Bump actions/checkout from 4.1.2 to 4.1.4 (dart-lang/gcloud#184) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/checkout](https://github.com/actions/checkout) from 4.1.2 to 4.1.4.
Release notes

Sourced from actions/checkout's releases.

v4.1.4

What's Changed

Full Changelog: https://github.com/actions/checkout/compare/v4.1.3...v4.1.4

v4.1.3

What's Changed

Full Changelog: https://github.com/actions/checkout/compare/v4.1.2...v4.1.3

Changelog

Sourced from actions/checkout's changelog.

Changelog

v4.1.4

v4.1.3

v4.1.2

v4.1.1

v4.1.0

v4.0.0

v3.6.0

v3.5.3

v3.5.2

v3.5.1

v3.5.0

v3.4.0

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/checkout&package-manager=github_actions&previous-version=4.1.2&new-version=4.1.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- pkgs/gcloud/.github/workflows/test-package.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/.github/workflows/test-package.yml b/pkgs/gcloud/.github/workflows/test-package.yml index a01355a3..d79eb06b 100644 --- a/pkgs/gcloud/.github/workflows/test-package.yml +++ b/pkgs/gcloud/.github/workflows/test-package.yml @@ -22,7 +22,7 @@ jobs: matrix: sdk: [dev] steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b - uses: dart-lang/setup-dart@fedb1266e91cf51be2fdb382869461a434b920a3 with: sdk: ${{ matrix.sdk }} @@ -49,7 +49,7 @@ jobs: os: [ubuntu-latest] sdk: [2.19.0, dev] steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b - uses: dart-lang/setup-dart@fedb1266e91cf51be2fdb382869461a434b920a3 with: sdk: ${{ matrix.sdk }} From c2290d9783ce462242dc3f1a03acb18576f28ab7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 1 May 2024 01:29:58 +0000 Subject: [PATCH 230/239] Bump dart-lang/setup-dart from 1.6.2 to 1.6.4 (dart-lang/gcloud#185) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [dart-lang/setup-dart](https://github.com/dart-lang/setup-dart) from 1.6.2 to 1.6.4.
Release notes

Sourced from dart-lang/setup-dart's releases.

v1.6.4

  • Rebuild JS code to include changes from v1.6.3

v1.6.3

Changelog

Sourced from dart-lang/setup-dart's changelog.

v1.6.4

  • Rebuild JS code.

v1.6.3

v1.6.2

v1.6.1

  • Updated the google storage url for main channel releases.

v1.6.0

  • Enable provisioning of the latest Dart SDK patch release by specifying just the major and minor version (e.g. 3.2).

v1.5.1

  • No longer test the setup-dart action on pre-2.12 SDKs.
  • Upgrade JS interop code to use extension types (the new name for inline classes).
  • The upcoming rename of the be channel to main is now supported with forward compatibility that switches when the rename happens.

v1.5.0

  • Re-wrote the implementation of the action into Dart.
  • Auto-detect the platform architecture (x64, ia32, arm, arm64).
  • Improved the caching and download resilience of the sdk.
  • Added a new action output: dart-version - the installed version of the sdk.

v1.4.0

  • Automatically create OIDC token for pub.dev.
  • Add a reusable workflow for publishing.

v1.3.0

  • The install location of the Dart SDK is now available

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=dart-lang/setup-dart&package-manager=github_actions&previous-version=1.6.2&new-version=1.6.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- pkgs/gcloud/.github/workflows/test-package.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/.github/workflows/test-package.yml b/pkgs/gcloud/.github/workflows/test-package.yml index d79eb06b..f9c6d868 100644 --- a/pkgs/gcloud/.github/workflows/test-package.yml +++ b/pkgs/gcloud/.github/workflows/test-package.yml @@ -23,7 +23,7 @@ jobs: sdk: [dev] steps: - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b - - uses: dart-lang/setup-dart@fedb1266e91cf51be2fdb382869461a434b920a3 + - uses: dart-lang/setup-dart@f0ead981b4d9a35b37f30d36160575d60931ec30 with: sdk: ${{ matrix.sdk }} - id: install @@ -50,7 +50,7 @@ jobs: sdk: [2.19.0, dev] steps: - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b - - uses: dart-lang/setup-dart@fedb1266e91cf51be2fdb382869461a434b920a3 + - uses: dart-lang/setup-dart@f0ead981b4d9a35b37f30d36160575d60931ec30 with: sdk: ${{ matrix.sdk }} - id: install From 498ef7758ade06487dd7bddfc2f08484cdaa2c6c Mon Sep 17 00:00:00 2001 From: Devon Carew Date: Mon, 6 May 2024 15:05:54 -0700 Subject: [PATCH 231/239] blast_repo fixes (dart-lang/gcloud#186) dependabot --- pkgs/gcloud/.github/dependabot.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pkgs/gcloud/.github/dependabot.yml b/pkgs/gcloud/.github/dependabot.yml index 725f03af..cde02ad6 100644 --- a/pkgs/gcloud/.github/dependabot.yml +++ b/pkgs/gcloud/.github/dependabot.yml @@ -9,3 +9,7 @@ updates: interval: monthly labels: - autosubmit + groups: + github-actions: + patterns: + - "*" From bccde7a7bde129f23da461fb268d7460f7048ef4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 6 May 2024 22:09:53 +0000 Subject: [PATCH 232/239] Bump actions/checkout from 4.1.4 to 4.1.5 in the github-actions group (dart-lang/gcloud#187) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps the github-actions group with 1 update: [actions/checkout](https://github.com/actions/checkout). Updates `actions/checkout` from 4.1.4 to 4.1.5
Release notes

Sourced from actions/checkout's releases.

v4.1.5

What's Changed

Full Changelog: https://github.com/actions/checkout/compare/v4.1.4...v4.1.5

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/checkout&package-manager=github_actions&previous-version=4.1.4&new-version=4.1.5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore major version` will close this group update PR and stop Dependabot creating any more for the specific dependency's major version (unless you unignore this specific dependency's major version or upgrade to it yourself) - `@dependabot ignore minor version` will close this group update PR and stop Dependabot creating any more for the specific dependency's minor version (unless you unignore this specific dependency's minor version or upgrade to it yourself) - `@dependabot ignore ` will close this group update PR and stop Dependabot creating any more for the specific dependency (unless you unignore this specific dependency or upgrade to it yourself) - `@dependabot unignore ` will remove all of the ignore conditions of the specified dependency - `@dependabot unignore ` will remove the ignore condition of the specified dependency and ignore conditions
--- pkgs/gcloud/.github/workflows/test-package.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/.github/workflows/test-package.yml b/pkgs/gcloud/.github/workflows/test-package.yml index f9c6d868..fd2cc776 100644 --- a/pkgs/gcloud/.github/workflows/test-package.yml +++ b/pkgs/gcloud/.github/workflows/test-package.yml @@ -22,7 +22,7 @@ jobs: matrix: sdk: [dev] steps: - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b + - uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b - uses: dart-lang/setup-dart@f0ead981b4d9a35b37f30d36160575d60931ec30 with: sdk: ${{ matrix.sdk }} @@ -49,7 +49,7 @@ jobs: os: [ubuntu-latest] sdk: [2.19.0, dev] steps: - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b + - uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b - uses: dart-lang/setup-dart@f0ead981b4d9a35b37f30d36160575d60931ec30 with: sdk: ${{ matrix.sdk }} From 2676806868b574f13fd36ab28577ca3edbd957b8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 1 Jun 2024 01:07:57 +0000 Subject: [PATCH 233/239] Bump actions/checkout from 4.1.5 to 4.1.6 in the github-actions group (dart-lang/gcloud#188) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps the github-actions group with 1 update: [actions/checkout](https://github.com/actions/checkout). Updates `actions/checkout` from 4.1.5 to 4.1.6
Release notes

Sourced from actions/checkout's releases.

v4.1.6

What's Changed

Full Changelog: https://github.com/actions/checkout/compare/v4.1.5...v4.1.6

Changelog

Sourced from actions/checkout's changelog.

Changelog

v4.1.6

v4.1.5

v4.1.4

v4.1.3

v4.1.2

v4.1.1

v4.1.0

v4.0.0

v3.6.0

v3.5.3

v3.5.2

v3.5.1

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/checkout&package-manager=github_actions&previous-version=4.1.5&new-version=4.1.6)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore major version` will close this group update PR and stop Dependabot creating any more for the specific dependency's major version (unless you unignore this specific dependency's major version or upgrade to it yourself) - `@dependabot ignore minor version` will close this group update PR and stop Dependabot creating any more for the specific dependency's minor version (unless you unignore this specific dependency's minor version or upgrade to it yourself) - `@dependabot ignore ` will close this group update PR and stop Dependabot creating any more for the specific dependency (unless you unignore this specific dependency or upgrade to it yourself) - `@dependabot unignore ` will remove all of the ignore conditions of the specified dependency - `@dependabot unignore ` will remove the ignore condition of the specified dependency and ignore conditions
--- pkgs/gcloud/.github/workflows/test-package.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/.github/workflows/test-package.yml b/pkgs/gcloud/.github/workflows/test-package.yml index fd2cc776..0688ec6c 100644 --- a/pkgs/gcloud/.github/workflows/test-package.yml +++ b/pkgs/gcloud/.github/workflows/test-package.yml @@ -22,7 +22,7 @@ jobs: matrix: sdk: [dev] steps: - - uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b + - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 - uses: dart-lang/setup-dart@f0ead981b4d9a35b37f30d36160575d60931ec30 with: sdk: ${{ matrix.sdk }} @@ -49,7 +49,7 @@ jobs: os: [ubuntu-latest] sdk: [2.19.0, dev] steps: - - uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b + - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 - uses: dart-lang/setup-dart@f0ead981b4d9a35b37f30d36160575d60931ec30 with: sdk: ${{ matrix.sdk }} From 835f9797787f7e6e93573fcacde0133136504474 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Jul 2024 01:23:21 +0000 Subject: [PATCH 234/239] Bump the github-actions group with 2 updates (dart-lang/gcloud#190) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps the github-actions group with 2 updates: [actions/checkout](https://github.com/actions/checkout) and [dart-lang/setup-dart](https://github.com/dart-lang/setup-dart). Updates `actions/checkout` from 4.1.6 to 4.1.7
Release notes

Sourced from actions/checkout's releases.

v4.1.7

What's Changed

New Contributors

Full Changelog: https://github.com/actions/checkout/compare/v4.1.6...v4.1.7

Changelog

Sourced from actions/checkout's changelog.

Changelog

v4.1.7

v4.1.6

v4.1.5

v4.1.4

v4.1.3

v4.1.2

v4.1.1

v4.1.0

v4.0.0

v3.6.0

v3.5.3

... (truncated)

Commits

Updates `dart-lang/setup-dart` from 1.6.4 to 1.6.5
Release notes

Sourced from dart-lang/setup-dart's releases.

v1.6.5

dart-lang/gcloud#118: dart-lang/setup-dartdart-lang/gcloud#118

Changelog

Sourced from dart-lang/setup-dart's changelog.

v1.6.5

dart-lang/gcloud#118: dart-lang/setup-dartdart-lang/gcloud#118

v1.6.4

  • Rebuild JS code.

v1.6.3

v1.6.2

v1.6.1

  • Updated the google storage url for main channel releases.

v1.6.0

  • Enable provisioning of the latest Dart SDK patch release by specifying just the major and minor version (e.g. 3.2).

v1.5.1

  • No longer test the setup-dart action on pre-2.12 SDKs.
  • Upgrade JS interop code to use extension types (the new name for inline classes).
  • The upcoming rename of the be channel to main is now supported with forward compatibility that switches when the rename happens.

v1.5.0

  • Re-wrote the implementation of the action into Dart.
  • Auto-detect the platform architecture (x64, ia32, arm, arm64).
  • Improved the caching and download resilience of the sdk.
  • Added a new action output: dart-version - the installed version of the sdk.

v1.4.0

... (truncated)

Commits

Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore major version` will close this group update PR and stop Dependabot creating any more for the specific dependency's major version (unless you unignore this specific dependency's major version or upgrade to it yourself) - `@dependabot ignore minor version` will close this group update PR and stop Dependabot creating any more for the specific dependency's minor version (unless you unignore this specific dependency's minor version or upgrade to it yourself) - `@dependabot ignore ` will close this group update PR and stop Dependabot creating any more for the specific dependency (unless you unignore this specific dependency or upgrade to it yourself) - `@dependabot unignore ` will remove all of the ignore conditions of the specified dependency - `@dependabot unignore ` will remove the ignore condition of the specified dependency and ignore conditions
--- pkgs/gcloud/.github/workflows/test-package.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pkgs/gcloud/.github/workflows/test-package.yml b/pkgs/gcloud/.github/workflows/test-package.yml index 0688ec6c..1cb1c8e2 100644 --- a/pkgs/gcloud/.github/workflows/test-package.yml +++ b/pkgs/gcloud/.github/workflows/test-package.yml @@ -22,8 +22,8 @@ jobs: matrix: sdk: [dev] steps: - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 - - uses: dart-lang/setup-dart@f0ead981b4d9a35b37f30d36160575d60931ec30 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 + - uses: dart-lang/setup-dart@0a8a0fc875eb934c15d08629302413c671d3f672 with: sdk: ${{ matrix.sdk }} - id: install @@ -49,8 +49,8 @@ jobs: os: [ubuntu-latest] sdk: [2.19.0, dev] steps: - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 - - uses: dart-lang/setup-dart@f0ead981b4d9a35b37f30d36160575d60931ec30 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 + - uses: dart-lang/setup-dart@0a8a0fc875eb934c15d08629302413c671d3f672 with: sdk: ${{ matrix.sdk }} - id: install From 8e80241b0277446e00dacccb7d073106b03af0ca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20So=C3=B3s?= Date: Thu, 5 Sep 2024 13:17:25 +0200 Subject: [PATCH 235/239] Upgrade googleapis constraint (dart-lang/gcloud#192) --- pkgs/gcloud/CHANGELOG.md | 3 +++ pkgs/gcloud/pubspec.yaml | 4 ++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 7612b0e2..0407ec8c 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,6 @@ +## 0.8.13 +- Support the latest version `^13.0.0` of the `googleapis` package. + ## 0.8.12 - Support the latest version 12.0.0 of the `googleapis` package. diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index b02ccb4f..b315607c 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.8.12 +version: 0.8.13 description: >- High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. repository: https://github.com/dart-lang/gcloud @@ -13,7 +13,7 @@ environment: dependencies: _discoveryapis_commons: ^1.0.0 - googleapis: '>=3.0.0 <13.0.0' + googleapis: '>=3.0.0 <14.0.0' http: '>=0.13.5 <2.0.0' meta: ^1.3.0 retry: ^3.1.1 From 355b010663ed121581740809e43d02f6a74ac367 Mon Sep 17 00:00:00 2001 From: Josh Soref <2119212+jsoref@users.noreply.github.com> Date: Wed, 11 Sep 2024 08:35:17 -0400 Subject: [PATCH 236/239] Spelling (dart-lang/gcloud#189) * spelling: available Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com> * spelling: comparison Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com> * spelling: item Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com> * spelling: its Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com> * spelling: keys Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com> * spelling: nonexistent Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com> * spelling: occurred Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com> * spelling: returns Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com> * spelling: scope Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com> * spelling: service Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com> * spelling: this Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com> * spelling: unnamed Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com> * spelling: useful Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com> --------- Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com> --- pkgs/gcloud/CHANGELOG.md | 6 +++--- pkgs/gcloud/lib/datastore.dart | 8 ++++---- pkgs/gcloud/lib/pubsub.dart | 4 ++-- pkgs/gcloud/lib/service_scope.dart | 2 +- pkgs/gcloud/lib/src/datastore_impl.dart | 2 +- pkgs/gcloud/lib/src/db/db.dart | 2 +- pkgs/gcloud/lib/storage.dart | 4 ++-- pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart | 2 +- pkgs/gcloud/test/service_scope_test.dart | 4 ++-- 9 files changed, 17 insertions(+), 17 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 0407ec8c..9c8a4f32 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -158,9 +158,9 @@ * Remove support for `FilterRelation.In` and "propertyname IN" for queries: This is not supported by the newer APIs and was originally part of fat-client - libraries which performed multiple queries for each iten in the list. + libraries which performed multiple queries for each item in the list. -* Adds optional `forComparision` named argument to `Property.encodeValue` which +* Adds optional `forComparison` named argument to `Property.encodeValue` which will be set to `true` when encoding a value for comparison in queries. * Upgrade to newer versions of `package:googleapis` and `package:googleapis_beta` @@ -235,7 +235,7 @@ ## 0.1.3+1 -* Change the service scope keys keys to non-private symbols. +* Change the service scope keys to non-private symbols. ## 0.1.3 diff --git a/pkgs/gcloud/lib/datastore.dart b/pkgs/gcloud/lib/datastore.dart index 53ca4eb2..24ffb733 100644 --- a/pkgs/gcloud/lib/datastore.dart +++ b/pkgs/gcloud/lib/datastore.dart @@ -54,14 +54,14 @@ class DatastoreError implements Exception { final String message; DatastoreError([String? message]) - : message = message ?? 'DatastoreError: An unknown error occured'; + : message = message ?? 'DatastoreError: An unknown error occurred'; @override String toString() => message; } class UnknownDatastoreError extends DatastoreError { - UnknownDatastoreError(error) : super('An unknown error occured ($error).'); + UnknownDatastoreError(error) : super('An unknown error occurred ($error).'); } class TransactionAbortedError extends DatastoreError { @@ -123,7 +123,7 @@ class Entity { /// A key can uniquely identify a datastore `Entity`s. It consists of a /// partition and path. The path consists of one or more `KeyElement`s. /// -/// A key may be incomplete. This is usesful when inserting `Entity`s which IDs +/// A key may be incomplete. This is useful when inserting `Entity`s which IDs /// should be automatically allocated. /// /// Example of a fully populated [Key]: @@ -386,7 +386,7 @@ abstract class Datastore { /// /// The [project] is the name of the Google Cloud project. /// - /// Returs an object providing access to Datastore. The passed-in [client] + /// Returns an object providing access to Datastore. The passed-in [client] /// will not be closed automatically. The caller is responsible for closing /// it. factory Datastore(http.Client client, String project) { diff --git a/pkgs/gcloud/lib/pubsub.dart b/pkgs/gcloud/lib/pubsub.dart index 42551f87..efa220cf 100644 --- a/pkgs/gcloud/lib/pubsub.dart +++ b/pkgs/gcloud/lib/pubsub.dart @@ -31,7 +31,7 @@ PubSub get pubsubService => ss.lookup(_pubsubKey) as PubSub; /// Registers the [pubsub] object within the current service scope. /// -/// The provided `pubsub` object will be avilable via the top-level +/// The provided `pubsub` object will be available via the top-level /// `pubsubService` getter. /// /// Calling this function outside of a service scope will result in an error. @@ -121,7 +121,7 @@ abstract class PubSub { /// /// The [project] is the name of the Google Cloud project. /// - /// Returs an object providing access to Pub/Sub. The passed-in [client] will + /// Returns an object providing access to Pub/Sub. The passed-in [client] will /// not be closed automatically. The caller is responsible for closing it. factory PubSub(http.Client client, String project) { var emulator = Platform.environment['PUBSUB_EMULATOR_HOST']; diff --git a/pkgs/gcloud/lib/service_scope.dart b/pkgs/gcloud/lib/service_scope.dart index 6e23c475..d85aed45 100644 --- a/pkgs/gcloud/lib/service_scope.dart +++ b/pkgs/gcloud/lib/service_scope.dart @@ -261,7 +261,7 @@ class _ServiceScope { _destroyed = true; if (errors.isNotEmpty) { throw Exception( - 'The following errors occured while running scope exit handlers' + 'The following errors occurred while running scope exit handlers' ': $errors'); } }); diff --git a/pkgs/gcloud/lib/src/datastore_impl.dart b/pkgs/gcloud/lib/src/datastore_impl.dart index d62868c6..25907566 100644 --- a/pkgs/gcloud/lib/src/datastore_impl.dart +++ b/pkgs/gcloud/lib/src/datastore_impl.dart @@ -277,7 +277,7 @@ class DatastoreImpl implements datastore.Datastore { if (error.status == 400) { return Future.error( datastore.ApplicationError( - error.message ?? 'An unknown error occured', + error.message ?? 'An unknown error occurred', ), stack, ); diff --git a/pkgs/gcloud/lib/src/db/db.dart b/pkgs/gcloud/lib/src/db/db.dart index 9a4bbf3c..a6e51492 100644 --- a/pkgs/gcloud/lib/src/db/db.dart +++ b/pkgs/gcloud/lib/src/db/db.dart @@ -425,7 +425,7 @@ Future _commitHelper(DatastoreDB db, for (var model in inserts) { // If parent was not explicitly set, we assume this model will map to - // it's own entity group. + // its own entity group. model.parentKey ??= db.defaultPartition.emptyKey; if (model.id == null) { autoIdModelInserts.add(model); diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index 3c726f2a..540c6510 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -43,7 +43,7 @@ /// In most cases relative names are used. Absolute names are typically /// only used for operations involving objects in different buckets. /// -/// For most of the APIs in ths library which take instances of other classes +/// For most of the APIs in this library which take instances of other classes /// from this library it is the assumption that the actual implementations /// provided here are used. library; @@ -260,7 +260,7 @@ abstract class AclScope { /// are set in the different subclasses. final String _id; - /// The type of this acope this ACL scope represents. + /// The type of this scope this ACL scope represents. final int _type; /// ACL scope for all authenticated users. diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index 120ec5e1..549063e6 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -441,7 +441,7 @@ void runTests(Datastore datastore, String? namespace) { }); }); - // This should not work with [unamedEntities20], but is working! + // This should not work with [unnamedEntities20], but is working! // FIXME TODO FIXME : look into this. test('delete_transactional', () { return insert([], unnamedEntities99, transactional: false).then((keys) { diff --git a/pkgs/gcloud/test/service_scope_test.dart b/pkgs/gcloud/test/service_scope_test.dart index 424ed3e6..1dfdf4b8 100644 --- a/pkgs/gcloud/test/service_scope_test.dart +++ b/pkgs/gcloud/test/service_scope_test.dart @@ -32,7 +32,7 @@ void main() { })); }); - test('non-existent-key', () { + test('nonexistent-key', () { return ss.fork(expectAsync0(() { expect(ss.lookup(1), isNull); return Future.value(); @@ -188,7 +188,7 @@ void main() { }); test('nested-fork-and-insert', () { - // Ensure that independently fork()ed serice scopes can insert keys + // Ensure that independently fork()ed service scopes can insert keys // independently and they cannot see each others values but can see parent // service scope values. var rootKey = 1; From 535244d2862e5629d962404ad4eee06d742d78a3 Mon Sep 17 00:00:00 2001 From: Josh Soref <2119212+jsoref@users.noreply.github.com> Date: Wed, 11 Sep 2024 09:36:57 -0400 Subject: [PATCH 237/239] Spelling api (dart-lang/gcloud#193) * spelling: descending [api] Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com> * spelling: greater [api] Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com> --------- Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com> --- pkgs/gcloud/lib/datastore.dart | 24 +++++++++++++-- pkgs/gcloud/lib/fix_data.yaml | 29 +++++++++++++++++++ pkgs/gcloud/lib/src/datastore_impl.dart | 4 +-- pkgs/gcloud/lib/src/db/db.dart | 6 ++-- .../datastore/e2e/datastore_test_impl.dart | 4 +-- 5 files changed, 57 insertions(+), 10 deletions(-) create mode 100644 pkgs/gcloud/lib/fix_data.yaml diff --git a/pkgs/gcloud/lib/datastore.dart b/pkgs/gcloud/lib/datastore.dart index 24ffb733..cf7eaac9 100644 --- a/pkgs/gcloud/lib/datastore.dart +++ b/pkgs/gcloud/lib/datastore.dart @@ -252,9 +252,21 @@ class FilterRelation { // ignore: constant_identifier_names static const FilterRelation LessThanOrEqual = FilterRelation._('<='); // ignore: constant_identifier_names - static const FilterRelation GreatherThan = FilterRelation._('>'); + static const FilterRelation GreaterThan = FilterRelation._('>'); + + /// Old misspelled name for [GreaterThan], retained for compatibility. + /// + /// @nodoc + @Deprecated('Use FilterRelation.GreaterThan instead') + static const FilterRelation GreatherThan = GreaterThan; // ignore: constant_identifier_names - static const FilterRelation GreatherThanOrEqual = FilterRelation._('>='); + static const FilterRelation GreaterThanOrEqual = FilterRelation._('>='); + + /// Old misspelled name for [GreaterThanOrEqual], retained for compatibility. + /// + /// @nodoc + @Deprecated('Use FilterRelation.GreaterThanOrEqual instead') + static const FilterRelation GreatherThanOrEqual = GreaterThanOrEqual; // ignore: constant_identifier_names static const FilterRelation Equal = FilterRelation._('=='); @@ -289,7 +301,13 @@ class OrderDirection { // ignore: constant_identifier_names static const OrderDirection Ascending = OrderDirection._('Ascending'); // ignore: constant_identifier_names - static const OrderDirection Decending = OrderDirection._('Decending'); + static const OrderDirection Descending = OrderDirection._('Descending'); + + /// Old misspelled name for [Descending], retained for compatibility. + /// + /// @nodoc + @Deprecated('Use OrderDirection.Descending instead') + static const OrderDirection Decending = Descending; final String name; diff --git a/pkgs/gcloud/lib/fix_data.yaml b/pkgs/gcloud/lib/fix_data.yaml new file mode 100644 index 00000000..976365ae --- /dev/null +++ b/pkgs/gcloud/lib/fix_data.yaml @@ -0,0 +1,29 @@ +version: 1 +transforms: + - title: 'Rename to Descending' + date: 2024-09-11 + element: + uris: ['datastore.dart'] + field: 'Decending' + inClass: 'OrderDirection' + changes: + - kind: 'rename' + newName: 'Descending' + - title: 'Rename to GreaterThan' + date: 2024-09-11 + element: + uris: ['datastore.dart'] + field: 'GreatherThan' + inClass: 'FilterRelation' + changes: + - kind: 'rename' + newName: 'GreaterThan' + - title: 'Rename to GreaterThanOrEqual' + date: 2024-09-11 + element: + uris: ['datastore.dart'] + field: 'GreatherThanOrEqual' + inClass: 'FilterRelation' + changes: + - kind: 'rename' + newName: 'GreaterThanOrEqual' diff --git a/pkgs/gcloud/lib/src/datastore_impl.dart b/pkgs/gcloud/lib/src/datastore_impl.dart index 25907566..4be5c499 100644 --- a/pkgs/gcloud/lib/src/datastore_impl.dart +++ b/pkgs/gcloud/lib/src/datastore_impl.dart @@ -205,8 +205,8 @@ class DatastoreImpl implements datastore.Datastore { datastore.FilterRelation.LessThan: 'LESS_THAN', datastore.FilterRelation.LessThanOrEqual: 'LESS_THAN_OR_EQUAL', datastore.FilterRelation.Equal: 'EQUAL', - datastore.FilterRelation.GreatherThan: 'GREATER_THAN', - datastore.FilterRelation.GreatherThanOrEqual: 'GREATER_THAN_OR_EQUAL', + datastore.FilterRelation.GreaterThan: 'GREATER_THAN', + datastore.FilterRelation.GreaterThanOrEqual: 'GREATER_THAN_OR_EQUAL', }; api.Filter _convertDatastore2ApiFilter(datastore.Filter filter) { diff --git a/pkgs/gcloud/lib/src/db/db.dart b/pkgs/gcloud/lib/src/db/db.dart index a6e51492..5c2f888f 100644 --- a/pkgs/gcloud/lib/src/db/db.dart +++ b/pkgs/gcloud/lib/src/db/db.dart @@ -143,8 +143,8 @@ class Query { final _relationMapping = const { '<': ds.FilterRelation.LessThan, '<=': ds.FilterRelation.LessThanOrEqual, - '>': ds.FilterRelation.GreatherThan, - '>=': ds.FilterRelation.GreatherThanOrEqual, + '>': ds.FilterRelation.GreaterThan, + '>=': ds.FilterRelation.GreaterThanOrEqual, '=': ds.FilterRelation.Equal, }; @@ -211,7 +211,7 @@ class Query { void order(String orderString) { // TODO: validate [orderString] (e.g. is name valid) if (orderString.startsWith('-')) { - _orders.add(ds.Order(ds.OrderDirection.Decending, + _orders.add(ds.Order(ds.OrderDirection.Descending, _convertToDatastoreName(orderString.substring(1)))); } else { _orders.add(ds.Order( diff --git a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart index 549063e6..3163afef 100644 --- a/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart +++ b/pkgs/gcloud/test/datastore/e2e/datastore_test_impl.dart @@ -780,7 +780,7 @@ void runTests(Datastore datastore, String? namespace) { assert(indexedEntity.length == 1); var filters = [ - Filter(FilterRelation.GreatherThan, queryKey, queryLowerBound), + Filter(FilterRelation.GreaterThan, queryKey, queryLowerBound), Filter(FilterRelation.LessThan, queryKey, queryUpperbound), ]; var listFilters = [ @@ -795,7 +795,7 @@ void runTests(Datastore datastore, String? namespace) { Filter(FilterRelation.Equal, testUnindexedProperty, queryIndexValue) ]; - var orders = [Order(OrderDirection.Decending, queryKey)]; + var orders = [Order(OrderDirection.Descending, queryKey)]; test('query', () async { await insert(stringNamedEntities, []); From c061502ac56a54a1f1ce9156e4e3d60a5d270ae0 Mon Sep 17 00:00:00 2001 From: Jonas Finnemann Jensen Date: Fri, 27 Sep 2024 09:55:33 -0400 Subject: [PATCH 238/239] Add support for metadata in Storage.copyObject (dart-lang/gcloud#194) --- pkgs/gcloud/CHANGELOG.md | 3 ++ pkgs/gcloud/lib/src/storage_impl.dart | 7 ++-- pkgs/gcloud/lib/storage.dart | 4 ++- pkgs/gcloud/pubspec.yaml | 2 +- pkgs/gcloud/test/storage/e2e_test.dart | 46 ++++++++++++++++++++++++++ 5 files changed, 58 insertions(+), 4 deletions(-) diff --git a/pkgs/gcloud/CHANGELOG.md b/pkgs/gcloud/CHANGELOG.md index 9c8a4f32..f7d487e9 100644 --- a/pkgs/gcloud/CHANGELOG.md +++ b/pkgs/gcloud/CHANGELOG.md @@ -1,3 +1,6 @@ +## 0.8.14 +- Support override metadata properties in `copyObject`. + ## 0.8.13 - Support the latest version `^13.0.0` of the `googleapis` package. diff --git a/pkgs/gcloud/lib/src/storage_impl.dart b/pkgs/gcloud/lib/src/storage_impl.dart index d2850d9f..afaf8a70 100644 --- a/pkgs/gcloud/lib/src/storage_impl.dart +++ b/pkgs/gcloud/lib/src/storage_impl.dart @@ -106,11 +106,14 @@ class _StorageImpl implements Storage { } @override - Future copyObject(String src, String dest) { + Future copyObject(String src, String dest, {ObjectMetadata? metadata}) { var srcName = _AbsoluteName.parse(src); var destName = _AbsoluteName.parse(dest); + metadata ??= _ObjectMetadata(); + var objectMetadata = metadata as _ObjectMetadata; + final object = objectMetadata._object; return _api.objects - .copy(storage_api.Object(), srcName.bucketName, srcName.objectName, + .copy(object, srcName.bucketName, srcName.objectName, destName.bucketName, destName.objectName) .then((_) => null); } diff --git a/pkgs/gcloud/lib/storage.dart b/pkgs/gcloud/lib/storage.dart index 540c6510..21e7705f 100644 --- a/pkgs/gcloud/lib/storage.dart +++ b/pkgs/gcloud/lib/storage.dart @@ -561,7 +561,9 @@ abstract class Storage { /// Copy object [src] to object [dest]. /// /// The names of [src] and [dest] must be absolute. - Future copyObject(String src, String dest); + /// + /// [metadata] can be used to overwrite metadata properties. + Future copyObject(String src, String dest, {ObjectMetadata? metadata}); } /// Information on a specific object. diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index b315607c..542893fa 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -1,5 +1,5 @@ name: gcloud -version: 0.8.13 +version: 0.8.14 description: >- High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. repository: https://github.com/dart-lang/gcloud diff --git a/pkgs/gcloud/test/storage/e2e_test.dart b/pkgs/gcloud/test/storage/e2e_test.dart index b93453c9..fa9a317c 100644 --- a/pkgs/gcloud/test/storage/e2e_test.dart +++ b/pkgs/gcloud/test/storage/e2e_test.dart @@ -153,6 +153,52 @@ void main() { testCreateReadDelete('test-2', bytesResumableUpload); }); + testWithBucket('create-copy-read-delete', (bucket) async { + final bytes = [1, 2, 3]; + final info = await bucket.writeBytes('test-for-copy', bytes); + expect(info, isNotNull); + + await storage.copyObject( + bucket.absoluteObjectName('test-for-copy'), + bucket.absoluteObjectName('test'), + ); + + final result = + await bucket.read('test').fold>([], (p, e) => p..addAll(e)); + expect(result, bytes); + + await bucket.delete('test'); + await bucket.delete('test-for-copy'); + }); + + testWithBucket('create-copy-metadata-read-delete', (bucket) async { + final bytes = [1, 2, 3]; + final info = await bucket.writeBytes( + 'test-for-copy', + bytes, + metadata: ObjectMetadata(contentType: 'text/plain'), + ); + expect(info, isNotNull); + + await storage.copyObject( + bucket.absoluteObjectName('test-for-copy'), + bucket.absoluteObjectName('test'), + metadata: ObjectMetadata(contentType: 'application/octet'), + ); + + final r1 = await bucket.info('test-for-copy'); + expect(r1.metadata.contentType, 'text/plain'); + final r2 = await bucket.info('test'); + expect(r2.metadata.contentType, 'application/octet'); + + final result = + await bucket.read('test').fold>([], (p, e) => p..addAll(e)); + expect(result, bytes); + + await bucket.delete('test'); + await bucket.delete('test-for-copy'); + }); + group('create-read-delete-streaming', () { void testCreateReadDelete(String name, List bytes) { testWithBucket(name, (bucket) async { From a42de7f11bf92d1d4844f949c99cbb16426ba165 Mon Sep 17 00:00:00 2001 From: Devon Carew Date: Fri, 27 Sep 2024 13:19:22 -0700 Subject: [PATCH 239/239] update package:gcloud metadata and CI config --- .../workflows/gcloud.yml | 24 ++++++++++++++----- pkgs/gcloud/.github/dependabot.yml | 15 ------------ pkgs/gcloud/.github/workflows/publish.yaml | 14 ----------- pkgs/gcloud/README.md | 4 ++-- pkgs/gcloud/lib/datastore.dart | 3 +++ pkgs/gcloud/lib/src/datastore_impl.dart | 4 +++- pkgs/gcloud/pubspec.yaml | 2 +- 7 files changed, 27 insertions(+), 39 deletions(-) rename pkgs/gcloud/.github/workflows/test-package.yml => .github/workflows/gcloud.yml (80%) delete mode 100644 pkgs/gcloud/.github/dependabot.yml delete mode 100644 pkgs/gcloud/.github/workflows/publish.yaml diff --git a/pkgs/gcloud/.github/workflows/test-package.yml b/.github/workflows/gcloud.yml similarity index 80% rename from pkgs/gcloud/.github/workflows/test-package.yml rename to .github/workflows/gcloud.yml index 1cb1c8e2..ccc195a0 100644 --- a/pkgs/gcloud/.github/workflows/test-package.yml +++ b/.github/workflows/gcloud.yml @@ -1,13 +1,19 @@ -name: Dart CI +name: package:gcloud +permissions: read-all on: - # Run on PRs and pushes to the default branch. - push: - branches: [ master ] pull_request: - branches: [ master ] + branches: [ main ] + paths: + - '.github/workflows/gcloud.yml' + - 'pkgs/gcloud/**' + push: + branches: [ main ] + paths: + - '.github/workflows/gcloud.yml' + - 'pkgs/gcloud/**' schedule: - - cron: "0 0 * * 0" + - cron: '0 0 * * 0' # weekly env: PUB_ENVIRONMENT: bot.github @@ -17,6 +23,9 @@ jobs: # against Dart dev. analyze: runs-on: ubuntu-latest + defaults: + run: + working-directory: pkgs/gcloud strategy: fail-fast: false matrix: @@ -42,6 +51,9 @@ jobs: test: needs: analyze runs-on: ${{ matrix.os }} + defaults: + run: + working-directory: pkgs/gcloud strategy: fail-fast: false matrix: diff --git a/pkgs/gcloud/.github/dependabot.yml b/pkgs/gcloud/.github/dependabot.yml deleted file mode 100644 index cde02ad6..00000000 --- a/pkgs/gcloud/.github/dependabot.yml +++ /dev/null @@ -1,15 +0,0 @@ -# Dependabot configuration file. -# See https://docs.github.com/en/code-security/dependabot/dependabot-version-updates -version: 2 - -updates: - - package-ecosystem: github-actions - directory: / - schedule: - interval: monthly - labels: - - autosubmit - groups: - github-actions: - patterns: - - "*" diff --git a/pkgs/gcloud/.github/workflows/publish.yaml b/pkgs/gcloud/.github/workflows/publish.yaml deleted file mode 100644 index 2239b63d..00000000 --- a/pkgs/gcloud/.github/workflows/publish.yaml +++ /dev/null @@ -1,14 +0,0 @@ -# A CI configuration to auto-publish pub packages. - -name: Publish - -on: - pull_request: - branches: [ master ] - push: - tags: [ 'v[0-9]+.[0-9]+.[0-9]+' ] - -jobs: - publish: - if: ${{ github.repository_owner == 'dart-lang' }} - uses: dart-lang/ecosystem/.github/workflows/publish.yaml@main diff --git a/pkgs/gcloud/README.md b/pkgs/gcloud/README.md index e5abc850..f8175bb9 100644 --- a/pkgs/gcloud/README.md +++ b/pkgs/gcloud/README.md @@ -1,4 +1,4 @@ -[![Dart CI](https://github.com/dart-lang/gcloud/actions/workflows/test-package.yml/badge.svg)](https://github.com/dart-lang/gcloud/actions/workflows/test-package.yml) +[![package:gcloud](https://github.com/dart-lang/labs/actions/workflows/gcloud.yml/badge.svg)](https://github.com/dart-lang/labs/actions/workflows/gcloud.yml) [![pub package](https://img.shields.io/pub/v/gcloud.svg)](https://pub.dev/packages/gcloud) [![package publisher](https://img.shields.io/pub/publisher/gcloud.svg)](https://pub.dev/packages/gcloud/publisher) @@ -26,7 +26,7 @@ much higher expected rate of API and breaking changes. Your feedback is valuable and will help us evolve this package. For general feedback, suggestions, and comments, please file an issue in the -[bug tracker](https://github.com/dart-lang/gcloud/issues). +[bug tracker](https://github.com/dart-lang/labs/issues). ## API details diff --git a/pkgs/gcloud/lib/datastore.dart b/pkgs/gcloud/lib/datastore.dart index cf7eaac9..f5f2c8fa 100644 --- a/pkgs/gcloud/lib/datastore.dart +++ b/pkgs/gcloud/lib/datastore.dart @@ -258,6 +258,7 @@ class FilterRelation { /// /// @nodoc @Deprecated('Use FilterRelation.GreaterThan instead') + // ignore: constant_identifier_names static const FilterRelation GreatherThan = GreaterThan; // ignore: constant_identifier_names static const FilterRelation GreaterThanOrEqual = FilterRelation._('>='); @@ -266,6 +267,7 @@ class FilterRelation { /// /// @nodoc @Deprecated('Use FilterRelation.GreaterThanOrEqual instead') + // ignore: constant_identifier_names static const FilterRelation GreatherThanOrEqual = GreaterThanOrEqual; // ignore: constant_identifier_names static const FilterRelation Equal = FilterRelation._('=='); @@ -307,6 +309,7 @@ class OrderDirection { /// /// @nodoc @Deprecated('Use OrderDirection.Descending instead') + // ignore: constant_identifier_names static const OrderDirection Decending = Descending; final String name; diff --git a/pkgs/gcloud/lib/src/datastore_impl.dart b/pkgs/gcloud/lib/src/datastore_impl.dart index 4be5c499..c0a9b366 100644 --- a/pkgs/gcloud/lib/src/datastore_impl.dart +++ b/pkgs/gcloud/lib/src/datastore_impl.dart @@ -99,7 +99,9 @@ class DatastoreImpl implements datastore.Datastore { for (var i = 0; i < a.path!.length; i++) { if (a.path![i].id != b.path![i].id || a.path![i].name != b.path![i].name || - a.path![i].kind != b.path![i].kind) return false; + a.path![i].kind != b.path![i].kind) { + return false; + } } return true; } diff --git a/pkgs/gcloud/pubspec.yaml b/pkgs/gcloud/pubspec.yaml index 542893fa..1e1e9367 100644 --- a/pkgs/gcloud/pubspec.yaml +++ b/pkgs/gcloud/pubspec.yaml @@ -2,7 +2,7 @@ name: gcloud version: 0.8.14 description: >- High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore. -repository: https://github.com/dart-lang/gcloud +repository: https://github.com/dart-lang/labs/tree/main/pkgs/gcloud topics: - cloud