[roll] Update third-party dart packages

Updated:
Change-Id: Ic9dff2ef3a43f155d4afdcb9eccf4440eccf7f07
diff --git a/gcloud/.gitignore b/gcloud/.gitignore
deleted file mode 100644
index 96ce539..0000000
--- a/gcloud/.gitignore
+++ /dev/null
@@ -1,6 +0,0 @@
-.dart_tool/
-pubspec.lock
-packages
-.pub
-.packages
-.idea
\ No newline at end of file
diff --git a/gcloud/.status b/gcloud/.status
deleted file mode 100644
index 4fce6c2..0000000
--- a/gcloud/.status
+++ /dev/null
@@ -1,39 +0,0 @@
-*/packages/*: Skip
-*/*/packages/*: Skip
-*/*/*/packages/*: Skip
-
-# We do not run the e2e tests inside the build/ directory in order to prevent
-# ./tools/test.py from running several e2e in parallel.
-build/test/db_all_e2e_test: Skip
-build/test/storage/e2e_test: Skip
-build/test/pubsub/pubsub_e2e_test: Skip
-
-# This test is slow because
-# - eventual consistency forces us to put in sleep()s
-# - it does e2e testing
-# - it combines several tests to avoid concurrent tests touching the same data
-test/db_all_e2e_test: Slow, Pass
-
-# This test is slow because
-# - it does e2e testing
-test/pubsub/pubsub_e2e_test: Slow, Pass
-
-[ $browser ]
-build/test/storage/e2e_test: Skip
-test/storage/e2e_test: Skip
-
-build/test/db_all_e2e_test: Skip
-test/db_all_e2e_test: Skip
-
-build/test/pubsub/pubsub_e2e_test: Skip
-test/pubsub/pubsub_e2e_test: Skip
-
-# Imports common_e2e.dart, which uses dart:io
-build/test/storage/storage_test: Skip
-test/storage/storage_test: Skip
-
-build/test/pubsub/pubsub_test: Skip
-test/pubsub/pubsub_test: Skip
-
-[ $compiler == dart2js ]
-*: Skip
diff --git a/gcloud/.travis.yml b/gcloud/.travis.yml
deleted file mode 100644
index 69a17db..0000000
--- a/gcloud/.travis.yml
+++ /dev/null
@@ -1,23 +0,0 @@
-language: dart
-
-dart:
-  - stable
-  - dev
-
-dart_task:
-  - dartanalyzer: --fatal-infos --fatal-warnings .
-  - test: -P travis
-
-matrix:
-  include:
-    # Only validate formatting using the dev release
-    - dart: dev
-      dart_task: dartfmt
-
-# Only building master means that we don't run two builds for each pull request.
-branches:
-  only: [master]
-
-cache:
- directories:
-   - $HOME/.pub-cache
diff --git a/gcloud/AUTHORS b/gcloud/AUTHORS
deleted file mode 100644
index 7c12ae6..0000000
--- a/gcloud/AUTHORS
+++ /dev/null
@@ -1,6 +0,0 @@
-# Below is a list of people and organizations that have contributed
-# to the Dart project. Names should be added to the list like so:
-#
-#   Name/Organization <email address>
-
-Google Inc.
diff --git a/gcloud/BUILD.gn b/gcloud/BUILD.gn
deleted file mode 100644
index dfb7390..0000000
--- a/gcloud/BUILD.gn
+++ /dev/null
@@ -1,19 +0,0 @@
-# This file is generated by importer.py for gcloud-0.6.0+4
-
-import("//build/dart/dart_library.gni")
-
-dart_library("gcloud") {
-  package_name = "gcloud"
-
-  # This parameter is left empty as we don't care about analysis or exporting
-  # these sources outside of the tree.
-  sources = []
-
-  disable_analysis = true
-
-  deps = [
-    "//third_party/dart-pkg/pub/googleapis",
-    "//third_party/dart-pkg/pub/http",
-    "//third_party/dart-pkg/pub/_discoveryapis_commons",
-  ]
-}
diff --git a/gcloud/CHANGELOG.md b/gcloud/CHANGELOG.md
deleted file mode 100644
index 55acc4e..0000000
--- a/gcloud/CHANGELOG.md
+++ /dev/null
@@ -1,141 +0,0 @@
-## 0.6.0+4
-
- * Updated package description.
- * Added an example showing how to use Google Cloud Storage.
-
-## 0.6.0+3
-
- * Fixed code formatting and lints.
-
-## 0.6.0+2
-
-* Support the latest `pkg:http`.
-
-## 0.6.0+1
-
-* Add explicit dependency to `package:_discoveryapis_commons`
-* Widen sdk constraint to <3.0.0
-
-## 0.6.0
-
-* **BREAKING CHANGE:** Add generics support. Instead of writing
-  `db.query(Person).run()` and getting back a generic `Stream<Model>`, you now
-  write `db.query<Person>().run()` and get `Stream<Person>`.
-  The same goes for `.lookup([key])`, which can now be written as
-  `.lookup<Person>([key])` and will return a `List<Person>`.
-
-## 0.5.0
-
-* Fixes to support Dart 2.
-
-## 0.4.0+1
-
-* Made a number of strong-mode improvements.
-
-* Updated dependency on `googleapis` and `googleapis_beta`.
-
-## 0.4.0
-
-* Remove support for `FilterRelation.In` and "propertyname IN" for queries:
-  This is not supported by the newer APIs and was originally part of fat-client
-  libraries which performed multiple queries for each iten in the list.
-
-* Adds optional `forComparision` named argument to `Property.encodeValue` which
-  will be set to `true` when encoding a value for comparison in queries.
-
-* Upgrade to newer versions of `package:googleapis` and `package:googleapis_beta`
-
-## 0.3.0
-
-* Upgrade to use stable `package:googleapis/datastore/v1.dart`.
-
-* The internal [DatastoreImpl] class takes now a project name without the `s~`
-  prefix.
-
-## 0.2.0+14
-
-* Fix analyzer warning.
-
-## 0.2.0+13
-
-* Remove crypto dependency and upgrade dart dependency to >=1.13 since
-  this dart version provides the Base64 codec.
-
-## 0.2.0+11
-
-* Throw a [StateError] in case a query returned a kind for which there was no
-  model registered.
-
-## 0.2.0+10
-
-* Address analyzer warnings.
-
-## 0.2.0+9
-
-* Support value transformation in `db.query().filter()`.
-* Widen constraint on `googleapis` and `googleapis_beta`.
-
-## 0.2.0+8
-
-* Widen constraint on `googleapis` and `googleapis_beta`.
-
-## 0.2.0+4
-
-* `Storage.read` now honors `offset` and `length` arguments.
-
-## 0.2.0+2
-
-* Widen constraint on `googleapis/googleapis_beta`
-
-## 0.2.0+1
-
-* Fix broken import of package:googleapis/common/common.dart.
-
-## 0.2.0
-
-* Add support for Cloud Pub/Sub.
-* Require Dart version 1.9.
-
-## 0.1.4+2
-
-* Enforce fully populated entity keys in a number of places.
-
-## 0.1.4+1
-
-* Deduce the query partition automatically from query ancestor key.
-
-## 0.1.4
-
-* Added optional `defaultPartition` parameter to the constructor of
-  `DatastoreDB`.
-
-## 0.1.3+2
-
-* Widened googleapis/googleapis_beta constraints in pubspec.yaml.
-
-## 0.1.3+1
-
-* Change the service scope keys keys to non-private symbols.
-
-## 0.1.3
-
-* Widen package:googleapis dependency constraint in pubspec.yaml.
-* Bugfix in `package:appengine/db.dart`: Correctly handle ListProperties
-of length 1.
-
-## 0.1.2
-
-* Introduced `package:gcloud/service_scope.dart` library.
-* Added global getters for getting gcloud services from the current service
-scope.
-* Added an `package:gcloud/http.dart` library using service scopes.
-
-## 0.1.1
-
-* Increased version constraint on googleapis{,_auth,_beta}.
-
-* Removed unused imports.
-
-## 0.1.0
-
-* First release.
diff --git a/gcloud/LICENSE b/gcloud/LICENSE
deleted file mode 100644
index 5c60afe..0000000
--- a/gcloud/LICENSE
+++ /dev/null
@@ -1,26 +0,0 @@
-Copyright 2014, the Dart project authors. All rights reserved.
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
-    * Redistributions of source code must retain the above copyright
-      notice, this list of conditions and the following disclaimer.
-    * Redistributions in binary form must reproduce the above
-      copyright notice, this list of conditions and the following
-      disclaimer in the documentation and/or other materials provided
-      with the distribution.
-    * Neither the name of Google Inc. nor the names of its
-      contributors may be used to endorse or promote products derived
-      from this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/gcloud/README.md b/gcloud/README.md
deleted file mode 100644
index b18070b..0000000
--- a/gcloud/README.md
+++ /dev/null
@@ -1,248 +0,0 @@
-## Google Cloud Platform support package (gcloud)
-
-The `gcloud` package provides a high level "idiomatic Dart" interface to
-some of the most widely used Google Cloud Platform services. Currently the
-following services are supported:
-
-  * Cloud Datastore
-  * Cloud Storage
-  * Cloud Pub/Sub
-
-The APIs in this package are all based on the generic generated APIs in the
-[googleapis] and [googleapis_beta][googleapisbeta] packages.
-
-This means that the authentication model for using the APIs in this package
-uses the [googleapis_auth][googleapisauth] package.
-
-Note that this package is only intended for being used with the standalone VM
-in a server or command line application. Don't expect this package to work on
-the browser.
-
-The code snippets below demonstrating the use of this package all assume that
-the following imports are present:
-
-```dart
-import 'dart:io';
-import 'package:googleapis_auth/auth_io.dart' as auth;
-import 'package:http/http.dart' as http;
-import 'package:gcloud/db.dart';
-import 'package:gcloud/storage.dart';
-import 'package:gcloud/pubsub.dart';
-import 'package:gcloud/service_scope.dart' as ss;
-import 'package:gcloud/src/datastore_impl.dart' as datastore_impl;
-```
-
-### Getting access to the APIs
-
-The first step in using the APIs is to get an authenticated HTTP client and
-with that create API class instances for accessing the different APIs. The
-code below assumes that you have a Google Cloud Project called `my-project`
-with credentials for a service account from that project stored in the file
-`my-project.json`.
-
-```dart
-// Read the service account credentials from the file.
-var jsonCredentials = new File('my-project.json').readAsStringSync();
-var credentials = new auth.ServiceAccountCredentials.fromJson(jsonCredentials);
-
-// Get an HTTP authenticated client using the service account credentials.
-var scopes = []
-    ..addAll(datastore_impl.DatastoreImpl.SCOPES)
-    ..addAll(Storage.SCOPES)
-    ..addAll(PubSub.SCOPES);
-var client = await auth.clientViaServiceAccount(credentials, scopes);
-
-// Instantiate objects to access Cloud Datastore, Cloud Storage
-// and Cloud Pub/Sub APIs.
-var db = new DatastoreDB(
-    new datastore_impl.DatastoreImpl(client, 's~my-project'));
-var storage = new Storage(client, 'my-project');
-var pubsub = new PubSub(client, 'my-project');
-```
-
-All the APIs in this package supports the use of 'service scopes'. Service
-scopes are described in details below.
-
-```dart
-ss.fork(() {
-  // register the services in the new service scope.
-  registerDbService(db);
-  registerStorageService(storage);
-  registerPubSubService(pubsub);
-
-  // Run application using these services.
-});
-```
-
-The services registered with the service scope can now be reached from within
-all the code running in the same service scope using the below getters.
-
-```dart
-dbService.
-storageService.
-pubsubService.
-```
-
-This way it is not necessary to pass the service objects around in your code.
-
-### Use with App Engine
-
-The `gcloud` package is also integrated in the Dart [appengine] package. This
-means the `gcloud` services are available both via the appengine context and
-service scopes. The authentication required to access the Google Cloud Platform
-services is handled automatically.
-
-This means that getting to the App Engine Datastore can be through either
-the App Engine context
-
-```dart
-var db = context.services.db;
-```
-
-or just using the service scope registration.
-
-```dart
-var db = dbService;
-```
-
-## Cloud Datastore
-Google Cloud Datastore provide a NoSQL, schemaless database for storing
-non-relational data. See the product page
-[https://cloud.google.com/datastore/][Datastore] for more information.
-
-The Cloud Datastore API provides a mapping of Dart objects to entities stored
-in the Datastore. The following example shows how to annotate a class to
-make it possible to store instances of it in the Datastore.
-
-```dart
-@db.Kind()
-class Person extends db.Model {
-  @db.StringProperty()
-  String name;
-
-  @db.IntProperty()
-  int age;
-}
-```
-
-The `Kind` annotation tell that instances of this class can be stored. The
-class must also inherit from `Model`. Now to store an object into the
-Datastore create an instance and use the `commit` function.
-
-```dart
-var person = new Person()
-    ..name = ''
-    ..age = 42;
-await db.commit(inserts: [person]);
-```
-
-The function `query` is used to build a `Query` object which can be run to
-perform the query.
-
-```dart
-var persons = (await db.query<Person>().run()).toList();
-```
-
-To fetch one or multiple existing entities, use `lookup`.
-
-```dart
-var person = (await db.lookup<Person>([key])).single;
-var people = await db.lookup<Person>([key1, key2]);
-```
-
-NOTE: This package include a lower level API provided through the class
-`Datastore` on top of which the `DatastoreDB` API is build. The main reason
-for this additional API level is to bridge the gap between the different APIs
-exposed inside App Engine and through the public REST API. We reserve the
-rights to modify and maybe even remove this additional layer at any time.
-
-## Cloud Storage
-Google Cloud Storage provide a highly available object store (aka BLOB
-store). See the product page [https://cloud.google.com/storage/][GCS]
-for more information.
-
-In Cloud Storage the objects (BLOBs) are organized in _buckets_. Each bucket
-has a name in a global namespace. The following code creates a new bucket
-named `my-bucket` and writes the content of the file `my-file.txt` to the
-object named `my-object`.
-
-```dart
-var bucket = await storage.createBucket('my-bucket');
-new File('my-file.txt').openRead().pipe(bucket.write('my-object'));
-```
-
-The following code will read back the object.
-
-```dart
-bucket.read('my-object').pipe(new File('my-file-copy.txt').openWrite());
-```
-
-## Cloud Pub/Sub
-Google Cloud Pub/Sub provides many-to-many, asynchronous messaging. See the
-product page [https://cloud.google.com/pubsub/][PubSub] for more information.
-
-Cloud Pub/Sub uses two concepts for messaging. _Topics_ are used if you want
-to send messages and _subscriptions_ are used to subscribe to topics and
-receive the messages. This decouples the producer of a message from the
-consumer of a message.
-
-The following code creates a _topic_ and sends a simple test message:
-
-```dart
-var topic = await pubsub.createTopic('my-topic');
-await topic.publishString('Hello, world!')
-```
-
-With the following code a _subscription_ is created on the _topic_ and
-a message is pulled using the subscription. A received message must be
-acknowledged when the consumer has processed it.
-
-```dart
-var subscription =
-    await pubsub.createSubscription('my-subscription', 'my-topic');
-var pullEvent = await subscription.pull();
-print(pullEvent.message.asString);
-await pullEvent.acknowledge();
-```
-
-It is also possible to receive messages using push events instead of pulling
-from the subscription. To do this the subscription should be configured as a
-push subscription with an HTTP endpoint.
-
-```dart
-await pubsub.createSubscription(
-    'my-subscription',
-    'my-topic',
-    endpoint: Uri.parse('https://server.example.com/push'));
-```
-
-With this subscription all messages will be send to the URL provided in the
-`endpoint` argument. The server needs to acknowledge the reception of the
-message with a `200 OK` reply.
-
-### Running tests
-
-If you want to run the end-to-end tests, a Google Cloud project is required.
-When running these tests the following environment variables need to be set:
-
-    GCLOUD_E2E_TEST_PROJECT
-    GCLOUD_E2E_TEST_KEY
-
-The value of the environment variable `GCLOUD_E2E_TEST_PROJECT` is the name
-of the Google Cloud project to use. The value of the environment variable
-`GCLOUD_E2E_TEST_KEY` is a Google Cloud Storage path (starting with `gs://`)
-to a JSON key file for a service account providing access to the Cloud Project.
-
-You will also need to create indexes as follows:
-
-```bash
-gcloud --project "$GCLOUD_E2E_TEST_PROJECT" datastore indexes create test/index.yaml
-```
-
-[Datastore]: https://cloud.google.com/datastore/
-[GCS]: https://cloud.google.com/storage/
-[PubSub]: https://cloud.google.com/pubsub/
-[googleapis]: https://pub.dartlang.org/packages/googleapis
-[googleapisbeta]: https://pub.dartlang.org/packages/googleapis_beta
-[googleapisauth]: https://pub.dartlang.org/packages/googleapis_beta
-[appengine]: https://pub.dartlang.org/packages/appengine
diff --git a/gcloud/analysis_options.yaml b/gcloud/analysis_options.yaml
deleted file mode 100644
index 50494a1..0000000
--- a/gcloud/analysis_options.yaml
+++ /dev/null
@@ -1,28 +0,0 @@
-include: package:pedantic/analysis_options.yaml
-analyzer:
-  strong-mode:
-    implicit-casts: false
-linter:
-  rules:
-    - avoid_null_checks_in_equality_operators
-    - await_only_futures
-    - camel_case_types
-    - cancel_subscriptions
-    - control_flow_in_finally
-    - directives_ordering
-    - empty_catches
-    - empty_constructor_bodies
-    - empty_statements
-    - iterable_contains_unrelated_type
-    - library_names
-    - library_prefixes
-    - list_remove_unrelated_type
-    - package_api_docs
-    - package_names
-    - package_prefixed_library_names
-    - prefer_final_fields
-    - super_goes_last
-    - test_types_in_equals
-    - throw_in_finally
-    - type_init_formals
-    - unnecessary_brace_in_string_interps
diff --git a/gcloud/dart_test.yaml b/gcloud/dart_test.yaml
deleted file mode 100644
index a465e8e..0000000
--- a/gcloud/dart_test.yaml
+++ /dev/null
@@ -1,5 +0,0 @@
-presets:
-  travis:
-    tags:
-      e2e:
-        skip: "e2e tests don't run on Travis"
diff --git a/gcloud/env.config b/gcloud/env.config
deleted file mode 100644
index cbb335f..0000000
--- a/gcloud/env.config
+++ /dev/null
@@ -1,2 +0,0 @@
-export GCLOUD_E2E_TEST_PROJECT='dart-serverless-experiment'
-export GCLOUD_E2E_TEST_KEY='/usr/local/google/home/jonasfj/credentials/dart-serverless-experiments-grpc-google-apis-playground.json'
diff --git a/gcloud/example/main.dart b/gcloud/example/main.dart
deleted file mode 100644
index 77dc695..0000000
--- a/gcloud/example/main.dart
+++ /dev/null
@@ -1,24 +0,0 @@
-// Copyright (c) 2019, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-import 'dart:async' show Future;
-import 'dart:convert' show utf8;
-import 'package:googleapis_auth/auth_io.dart' as auth;
-import 'package:gcloud/storage.dart';
-
-// Note: The README.md contains more details on how to use this package.
-
-Future<void> main() async {
-  // When running on Google Computer Engine, AppEngine or GKE credentials can
-  // be obtained from a meta-data server as follows.
-  final client = await auth.clientViaMetadataServer();
-  try {
-    final storage = Storage(client, 'my_gcp_project');
-    final b = storage.bucket('test-bucket');
-    await b.writeBytes('my-file.txt', utf8.encode('hello world'));
-    print('Wrote "hello world" to "my-file.txt" in "test-bucket"');
-  } finally {
-    client.close();
-  }
-}
diff --git a/gcloud/lib/common.dart b/gcloud/lib/common.dart
deleted file mode 100644
index e516807..0000000
--- a/gcloud/lib/common.dart
+++ /dev/null
@@ -1,89 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-library gcloud.common;
-
-import 'dart:async';
-
-/// A single page of paged results from a query.
-///
-/// Use `next` to move to the next page. If this is the last page `next`
-/// completes with `null`
-abstract class Page<T> {
-  /// The items in this page.
-  List<T> get items;
-
-  /// Whether this is the last page of results.
-  bool get isLast;
-
-  /// Move to the next page.
-  ///
-  /// The future returned completes with the next page or results.
-  ///
-  /// If [next] is called on the last page the returned future completes
-  /// with `null`.
-  Future<Page<T>> next({int pageSize});
-}
-
-typedef FirstPageProvider<T> = Future<Page<T>> Function(int pageSize);
-
-/// Helper class to turn a series of pages into a stream.
-class StreamFromPages<T> {
-  static const int _PAGE_SIZE = 50;
-  final FirstPageProvider<T> _firstPageProvider;
-  bool _pendingRequest = false;
-  bool _paused = false;
-  bool _cancelled = false;
-  Page<T> _currentPage;
-  StreamController<T> _controller;
-
-  StreamFromPages(this._firstPageProvider) {
-    _controller = StreamController<T>(
-        sync: true,
-        onListen: _onListen,
-        onPause: _onPause,
-        onResume: _onResume,
-        onCancel: _onCancel);
-  }
-
-  Stream<T> get stream => _controller.stream;
-
-  void _handleError(e, StackTrace s) {
-    _controller.addError(e, s);
-    _controller.close();
-  }
-
-  void _handlePage(Page<T> page) {
-    if (_cancelled) return;
-    _pendingRequest = false;
-    _currentPage = page;
-    page.items.forEach(_controller.add);
-    if (page.isLast) {
-      _controller.close();
-    } else if (!_paused && !_cancelled) {
-      page.next().then(_handlePage, onError: _handleError);
-    }
-  }
-
-  _onListen() {
-    int pageSize = _PAGE_SIZE;
-    _pendingRequest = true;
-    _firstPageProvider(pageSize).then(_handlePage, onError: _handleError);
-  }
-
-  _onPause() {
-    _paused = true;
-  }
-
-  _onResume() {
-    _paused = false;
-    if (_pendingRequest) return;
-    _pendingRequest = true;
-    _currentPage.next().then(_handlePage, onError: _handleError);
-  }
-
-  _onCancel() {
-    _cancelled = true;
-  }
-}
diff --git a/gcloud/lib/datastore.dart b/gcloud/lib/datastore.dart
deleted file mode 100644
index 1267014..0000000
--- a/gcloud/lib/datastore.dart
+++ /dev/null
@@ -1,425 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-/// This library provides a low-level API for accessing Google's Cloud
-/// Datastore.
-///
-/// For more information on Cloud Datastore, please refer to the following
-/// developers page: https://cloud.google.com/datastore/docs
-library gcloud.datastore;
-
-import 'dart:async';
-
-import 'common.dart' show Page;
-import 'service_scope.dart' as ss;
-
-const Symbol _datastoreKey = #gcloud.datastore;
-
-/// Access the [Datastore] object available in the current service scope.
-///
-/// The returned object will be the one which was previously registered with
-/// [registerDatastoreService] within the current (or a parent) service scope.
-///
-/// Accessing this getter outside of a service scope will result in an error.
-/// See the `package:gcloud/service_scope.dart` library for more information.
-Datastore get datastoreService => ss.lookup(_datastoreKey) as Datastore;
-
-/// Registers the [Datastore] object within the current service scope.
-///
-/// The provided `datastore` object will be available via the top-level
-/// `datastore` getter.
-///
-/// Calling this function outside of a service scope will result in an error.
-/// Calling this function more than once inside the same service scope is not
-/// allowed.
-void registerDatastoreService(Datastore datastore) {
-  ss.register(_datastoreKey, datastore);
-}
-
-class ApplicationError implements Exception {
-  final String message;
-  ApplicationError(this.message);
-
-  String toString() => "ApplicationError: $message";
-}
-
-class DatastoreError implements Exception {
-  final String message;
-
-  DatastoreError([String message])
-      : message = (message != null
-            ? message
-            : 'DatastoreError: An unknown error occured');
-
-  String toString() => '$message';
-}
-
-class UnknownDatastoreError extends DatastoreError {
-  UnknownDatastoreError(error) : super("An unknown error occured ($error).");
-}
-
-class TransactionAbortedError extends DatastoreError {
-  TransactionAbortedError() : super("The transaction was aborted.");
-}
-
-class TimeoutError extends DatastoreError {
-  TimeoutError() : super("The operation timed out.");
-}
-
-/// Thrown when a query would require an index which was not set.
-///
-/// An application needs to specify indices in a `index.yaml` file and needs to
-/// create indices using the `gcloud preview datastore create-indexes` command.
-class NeedIndexError extends DatastoreError {
-  NeedIndexError() : super("An index is needed for the query to succeed.");
-}
-
-class PermissionDeniedError extends DatastoreError {
-  PermissionDeniedError() : super("Permission denied.");
-}
-
-class InternalError extends DatastoreError {
-  InternalError() : super("Internal service error.");
-}
-
-class QuotaExceededError extends DatastoreError {
-  QuotaExceededError(error) : super("Quota was exceeded ($error).");
-}
-
-/// A datastore Entity
-///
-/// An entity is identified by a unique `key` and consists of a number of
-/// `properties`. If a property should not be indexed, it needs to be included
-/// in the `unIndexedProperties` set.
-///
-/// The `properties` field maps names to values. Values can be of a primitive
-/// type or of a composed type.
-///
-/// The following primitive types are supported:
-///   bool, int, double, String, DateTime, BlobValue, Key
-///
-/// It is possible to have a `List` of values. The values must be primitive.
-/// Lists inside lists are not supported.
-///
-/// Whether a property is indexed or not applies to all values (this is only
-/// relevant if the value is a list of primitive values).
-class Entity {
-  final Key key;
-  final Map<String, Object> properties;
-  final Set<String> unIndexedProperties;
-
-  Entity(this.key, this.properties, {this.unIndexedProperties});
-}
-
-/// A complete or partial key.
-///
-/// A key can uniquely identify a datastore `Entity`s. It consists of a
-/// partition and path. The path consists of one or more `KeyElement`s.
-///
-/// A key may be incomplete. This is usesful when inserting `Entity`s which IDs
-/// should be automatically allocated.
-///
-/// Example of a fully populated [Key]:
-///
-///     var fullKey = new Key([new KeyElement('Person', 1),
-///                            new KeyElement('Address', 2)]);
-///
-/// Example of a partially populated [Key] / an incomplete [Key]:
-///
-///     var partialKey = new Key([new KeyElement('Person', 1),
-///                               new KeyElement('Address', null)]);
-class Key {
-  /// The partition of this `Key`.
-  final Partition partition;
-
-  /// The path of `KeyElement`s.
-  final List<KeyElement> elements;
-
-  Key(this.elements, {Partition partition})
-      : this.partition = (partition == null) ? Partition.DEFAULT : partition;
-
-  factory Key.fromParent(String kind, int id, {Key parent}) {
-    Partition partition;
-    var elements = <KeyElement>[];
-    if (parent != null) {
-      partition = parent.partition;
-      elements.addAll(parent.elements);
-    }
-    elements.add(KeyElement(kind, id));
-    return Key(elements, partition: partition);
-  }
-
-  int get hashCode =>
-      elements.fold(partition.hashCode, (a, b) => a ^ b.hashCode);
-
-  bool operator ==(Object other) {
-    if (identical(this, other)) return true;
-
-    if (other is Key &&
-        partition == other.partition &&
-        elements.length == other.elements.length) {
-      for (int i = 0; i < elements.length; i++) {
-        if (elements[i] != other.elements[i]) return false;
-      }
-      return true;
-    }
-    return false;
-  }
-
-  String toString() {
-    var namespaceString =
-        partition.namespace == null ? 'null' : "'${partition.namespace}'";
-    return "Key(namespace=$namespaceString, path=[${elements.join(', ')}])";
-  }
-}
-
-/// A datastore partition.
-///
-/// A partition is used for partitioning a dataset into multiple namespaces.
-/// The default namespace is `null`. Using empty Strings as namespaces is
-/// invalid.
-///
-// TODO(Issue #6): Add dataset-id here.
-class Partition {
-  static const Partition DEFAULT = Partition._default();
-
-  /// The namespace of this partition.
-  final String namespace;
-
-  Partition(this.namespace) {
-    if (namespace == '') {
-      throw ArgumentError("'namespace' must not be empty");
-    }
-  }
-
-  const Partition._default() : this.namespace = null;
-
-  int get hashCode => namespace.hashCode;
-
-  bool operator ==(Object other) =>
-      other is Partition && namespace == other.namespace;
-}
-
-/// An element in a `Key`s path.
-class KeyElement {
-  /// The kind of this element.
-  final String kind;
-
-  /// The ID of this element. It must be either an `int` or a `String.
-  ///
-  /// This may be `null`, in which case it does not identify an Entity. It is
-  /// possible to insert [Entity]s with incomplete keys and let Datastore
-  /// automatically select a unused integer ID.
-  final id;
-
-  KeyElement(this.kind, this.id) {
-    if (kind == null) {
-      throw ArgumentError("'kind' must not be null");
-    }
-    if (id != null) {
-      if (id is! int && id is! String) {
-        throw ArgumentError("'id' must be either null, a String or an int");
-      }
-    }
-  }
-
-  int get hashCode => kind.hashCode ^ id.hashCode;
-
-  bool operator ==(Object other) =>
-      other is KeyElement && kind == other.kind && id == other.id;
-
-  String toString() => "$kind.$id";
-}
-
-/// A relation used in query filters.
-class FilterRelation {
-  static const FilterRelation LessThan = FilterRelation._('<');
-  static const FilterRelation LessThanOrEqual = FilterRelation._('<=');
-  static const FilterRelation GreatherThan = FilterRelation._('>');
-  static const FilterRelation GreatherThanOrEqual = FilterRelation._('>=');
-  static const FilterRelation Equal = FilterRelation._('==');
-
-  final String name;
-
-  const FilterRelation._(this.name);
-
-  String toString() => name;
-}
-
-/// A filter used in queries.
-class Filter {
-  /// The relation used for comparing `name` with `value`.
-  final FilterRelation relation;
-
-  /// The name of the datastore property used in the comparison.
-  final String name;
-
-  /// The value used for comparing against the property named by `name`.
-  final Object value;
-
-  Filter(this.relation, this.name, this.value);
-}
-
-/// The direction of a order.
-///
-// TODO(Issue #6): Make this class Private and add the two statics to the
-/// 'Order' class.
-/// [i.e. so one can write Order.Ascending, Order.Descending].
-class OrderDirection {
-  static const OrderDirection Ascending = OrderDirection._('Ascending');
-  static const OrderDirection Decending = OrderDirection._('Decending');
-
-  final String name;
-
-  const OrderDirection._(this.name);
-}
-
-/// A order used in queries.
-class Order {
-  /// The direction of the order.
-  final OrderDirection direction;
-
-  /// The name of the property used for the order.
-  final String propertyName;
-
-  // TODO(Issue #6): Make [direction] the second argument and make it optional.
-  Order(this.direction, this.propertyName);
-}
-
-/// A datastore query.
-///
-/// A query consists of filters (kind, ancestor and property filters), one or
-/// more orders and a offset/limit pair.
-///
-/// All fields may be optional.
-///
-/// Example of building a [Query]:
-///     var person = ....;
-///     var query = new Query(ancestorKey: personKey, kind: 'Address')
-class Query {
-  /// Restrict the result set to entities of this kind.
-  final String kind;
-
-  /// Restrict the result set to entities which have this  ancestorKey / parent.
-  final Key ancestorKey;
-
-  /// Restrict the result set by a list of property [Filter]s.
-  final List<Filter> filters;
-
-  /// Order the matching entities following the given property [Order]s.
-  final List<Order> orders;
-
-  /// Skip the first [offset] entities in the result set.
-  final int offset;
-
-  /// Limit the number of entities returned to [limit].
-  final int limit;
-
-  Query(
-      {this.ancestorKey,
-      this.kind,
-      this.filters,
-      this.orders,
-      this.offset,
-      this.limit});
-}
-
-/// The result of a commit.
-class CommitResult {
-  /// If the commit included `autoIdInserts`, this list will be the fully
-  /// populated Keys, including the automatically allocated integer IDs.
-  final List<Key> autoIdInsertKeys;
-
-  CommitResult(this.autoIdInsertKeys);
-}
-
-/// A blob value which can be used as a property value in `Entity`s.
-class BlobValue {
-  /// The binary data of this blob.
-  final List<int> bytes;
-
-  BlobValue(this.bytes);
-}
-
-/// An opaque token returned by the `beginTransaction` method of a [Datastore].
-///
-/// This token can be passed to the `commit` and `lookup` calls if they should
-/// operate within this transaction.
-abstract class Transaction {}
-
-/// Interface used to talk to the Google Cloud Datastore service.
-///
-/// It can be used to insert/update/delete [Entity]s, lookup/query [Entity]s
-/// and allocate IDs from the auto ID allocation policy.
-abstract class Datastore {
-  /// Allocate integer IDs for the partially populated [keys] given as argument.
-  ///
-  /// The returned [Key]s will be fully populated with the allocated IDs.
-  Future<List<Key>> allocateIds(List<Key> keys);
-
-  /// Starts a new transaction and returns an opaque value representing it.
-  ///
-  /// If [crossEntityGroup] is `true`, the transaction can work on up to 5
-  /// entity groups. Otherwise the transaction will be limited to only operate
-  /// on a single entity group.
-  Future<Transaction> beginTransaction({bool crossEntityGroup = false});
-
-  /// Make modifications to the datastore.
-  ///
-  ///  - `inserts` are [Entity]s which have a fully populated [Key] and should
-  ///    be either added to the datastore or updated.
-  ///
-  ///  - `autoIdInserts` are [Entity]s which do not have a fully populated [Key]
-  ///    and should be added to the dataset, automatically assigning integer
-  ///    IDs.
-  ///    The returned [CommitResult] will contain the fully populated keys.
-  ///
-  ///  - `deletes` are a list of fully populated [Key]s which uniquely identify
-  ///    the [Entity]s which should be deleted.
-  ///
-  /// If a [transaction] is given, all modifications will be done within that
-  /// transaction.
-  ///
-  /// This method might complete with a [TransactionAbortedError] error.
-  /// Users must take care of retrying transactions.
-  // TODO(Issue #6): Consider splitting `inserts` into insert/update/upsert.
-  Future<CommitResult> commit(
-      {List<Entity> inserts,
-      List<Entity> autoIdInserts,
-      List<Key> deletes,
-      Transaction transaction});
-
-  /// Roll a started transaction back.
-  Future rollback(Transaction transaction);
-
-  /// Looks up the fully populated [keys] in the datastore and returns either
-  /// the [Entity] corresponding to the [Key] or `null`. The order in the
-  /// returned [Entity]s is the same as in [keys].
-  ///
-  /// If a [transaction] is given, the lookup will be within this transaction.
-  Future<List<Entity>> lookup(List<Key> keys, {Transaction transaction});
-
-  /// Runs a query on the dataset and returns a [Page] of matching [Entity]s.
-  ///
-  /// The [Page] instance returned might not contain all matching [Entity]s -
-  /// in which case `isLast` is set to `false`. The page's `next` method can
-  /// be used to page through the whole result set.
-  /// The maximum number of [Entity]s returned within a single page is
-  /// implementation specific.
-  ///
-  ///  - `query` is used to restrict the number of returned [Entity]s and may
-  ///    may specify an order.
-  ///
-  ///  - `partition` can be used to specify the namespace used for the lookup.
-  ///
-  /// If a [transaction] is given, the query will be within this transaction.
-  /// But note that arbitrary queries within a transaction are not possible.
-  /// A transaction is limited to a very small number of entity groups. Usually
-  /// queries with transactions are restricted by providing an ancestor filter.
-  ///
-  /// Outside of transactions, the result set might be stale. Queries are by
-  /// default eventually consistent.
-  Future<Page<Entity>> query(Query query,
-      {Partition partition, Transaction transaction});
-}
diff --git a/gcloud/lib/db.dart b/gcloud/lib/db.dart
deleted file mode 100644
index 9fba24d..0000000
--- a/gcloud/lib/db.dart
+++ /dev/null
@@ -1,48 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-library gcloud.db;
-
-import 'dart:async';
-import 'dart:collection';
-// dart:core is imported explicitly so it is available at top-level without
-//   the `core` prefix defined below.
-import 'dart:core';
-// Importing `dart:core` as `core` to allow access to `String` in `IdType`
-//   without conflicts.
-import 'dart:core' as core;
-import 'dart:mirrors' as mirrors;
-
-import 'common.dart' show StreamFromPages;
-import 'datastore.dart' as ds;
-import 'service_scope.dart' as ss;
-
-part 'src/db/annotations.dart';
-part 'src/db/db.dart';
-part 'src/db/models.dart';
-part 'src/db/model_db.dart';
-part 'src/db/model_db_impl.dart';
-
-const Symbol _dbKey = #gcloud.db;
-
-/// Access the [DatastoreDB] object available in the current service scope.
-///
-/// The returned object will be the one which was previously registered with
-/// [registerDbService] within the current (or a parent) service scope.
-///
-/// Accessing this getter outside of a service scope will result in an error.
-/// See the `package:gcloud/service_scope.dart` library for more information.
-DatastoreDB get dbService => ss.lookup(_dbKey) as DatastoreDB;
-
-/// Registers the [DatastoreDB] object within the current service scope.
-///
-/// The provided `db` object will be available via the top-level `dbService`
-/// getter.
-///
-/// Calling this function outside of a service scope will result in an error.
-/// Calling this function more than once inside the same service scope is not
-/// allowed.
-void registerDbService(DatastoreDB db) {
-  ss.register(_dbKey, db);
-}
diff --git a/gcloud/lib/db/metamodel.dart b/gcloud/lib/db/metamodel.dart
deleted file mode 100644
index 906ddae..0000000
--- a/gcloud/lib/db/metamodel.dart
+++ /dev/null
@@ -1,23 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-library gcloud.db.meta_model;
-
-import '../db.dart' as db;
-
-@db.Kind(name: '__namespace__')
-class Namespace extends db.ExpandoModel {
-  static const int EmptyNamespaceId = 1;
-
-  String get name {
-    // The default namespace will be reported with id 1.
-    if (id == Namespace.EmptyNamespaceId) return null;
-    return id as String;
-  }
-}
-
-@db.Kind(name: '__kind__')
-class Kind extends db.Model {
-  String get name => id as String;
-}
diff --git a/gcloud/lib/http.dart b/gcloud/lib/http.dart
deleted file mode 100644
index 5051807..0000000
--- a/gcloud/lib/http.dart
+++ /dev/null
@@ -1,41 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-/// Provides access to an authenticated HTTP client which can be used to access
-/// Google APIs.
-library gcloud.http;
-
-import 'package:http/http.dart' as http;
-
-import 'service_scope.dart' as ss;
-
-const Symbol _authenticatedClientKey = #gcloud.http;
-
-/// Access the [http.Client] object available in the current service scope.
-///
-/// The returned object will be the one which was previously registered with
-/// [registerAuthClientService] within the current (or a parent) service
-/// scope.
-///
-/// Accessing this getter outside of a service scope will result in an error.
-/// See the `package:gcloud/service_scope.dart` library for more information.
-http.Client get authClientService =>
-    ss.lookup(_authenticatedClientKey) as http.Client;
-
-/// Registers the [http.Client] object within the current service scope.
-///
-/// The provided `client` object will be available via the top-level
-/// `authenticatedHttp` getter.
-///
-/// Calling this function outside of a service scope will result in an error.
-/// Calling this function more than once inside the same service scope is not
-/// allowed.
-void registerAuthClientService(http.Client client, {bool close = true}) {
-  ss.register(_authenticatedClientKey, client);
-  if (close) {
-    ss.registerScopeExitCallback(() {
-      client.close();
-    });
-  }
-}
diff --git a/gcloud/lib/pubsub.dart b/gcloud/lib/pubsub.dart
deleted file mode 100644
index 207967d..0000000
--- a/gcloud/lib/pubsub.dart
+++ /dev/null
@@ -1,428 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-library gcloud.pubsub;
-
-import 'dart:async';
-import 'dart:collection';
-import 'dart:convert';
-import 'dart:io';
-import 'package:http/http.dart' as http;
-
-import 'package:googleapis/pubsub/v1.dart' as pubsub;
-
-import 'common.dart';
-import 'service_scope.dart' as ss;
-
-export 'common.dart';
-
-part 'src/pubsub_impl.dart';
-
-const Symbol _pubsubKey = #gcloud.pubsub;
-
-/// Access the [PubSub] object available in the current service scope.
-///
-/// The returned object will be the one which was previously registered with
-/// [registerPubSubService] within the current (or a parent) service scope.
-///
-/// Accessing this getter outside of a service scope will result in an error.
-/// See the `package:gcloud/service_scope.dart` library for more information.
-PubSub get pubsubService => ss.lookup(_pubsubKey) as PubSub;
-
-/// Registers the [pubsub] object within the current service scope.
-///
-/// The provided `pubsub` object will be avilable via the top-level
-/// `pubsubService` getter.
-///
-/// Calling this function outside of a service scope will result in an error.
-/// Calling this function more than once inside the same service scope is not
-/// allowed.
-void registerPubSubService(PubSub pubsub) {
-  ss.register(_pubsubKey, pubsub);
-}
-
-/// A Cloud Pub/Sub client.
-///
-/// Connects to the Cloud Pub/Sub service and gives access to its operations.
-///
-/// Google Cloud Pub/Sub is a reliable, many-to-many, asynchronous messaging
-/// service from Google Cloud Platform. A detailed overview is available on
-/// [Pub/Sub docs](https://developers.google.com/pubsub/overview).
-///
-/// To access Pub/Sub, an authenticate HTTP client is required. This client
-/// should as a minimum provide access to the scopes `PubSub.Scopes`.
-///
-/// The following example shows how to access Pub/Sub using a service account
-/// and pull a message from a subscription.
-///
-///     import 'package:http/http.dart' as http;
-///     import 'package:googleapis_auth/auth_io.dart' as auth;
-///     import 'package:gcloud/pubsub.dart';
-///
-///     Future<http.Client> createClient() {
-///       // Service account credentials retrieved from Cloud Console.
-///       String creds =
-///           r'''
-///           {
-///             "private_key_id": ...,
-///             "private_key": ...,
-///             "client_email": ...,
-///             "client_id": ...,
-///             "type": "service_account"
-///           }''';
-///       return auth.clientViaServiceAccount(
-///           new auth.ServiceAccountCredentials.fromJson(creds),
-///           PubSub.Scopes);
-///     }
-///
-///     main() {
-///       var project = 'my-project';
-///       var client;
-///       var pubsub;
-///       createClient().then((c) {
-///         client = c;
-///         pubsub = new PubSub(client, project);
-///         return pubsub.lookupSubscription('my-subscription');
-///       })
-///       .then((Subscription subscription) => subscription.pull())
-///       .then((PullEvent event) => print('Message ${event.message.asString}'))
-///       .whenComplete(() => client.close());
-///     }
-///
-/// When working with topics and subscriptions they are referred to using
-/// names. These names can be either relative names or absolute names.
-///
-/// An absolute name of a topic starts with `projects/` and has the form:
-///
-///     projects/<project-id>/topics/<relative-name>
-///
-/// When a relative topic name is used, its absolute name is generated by
-/// pre-pending `projects/<project-id>/topics/`, where `<project-id>` is the
-/// project id passed to the constructor.
-///
-/// An absolute name of a subscription starts with `projects/` and has the
-/// form:
-///
-///     projects/<project-id>/subscriptions/<relative-name>
-///
-/// When a relative subscription name is used, its absolute name is
-/// generated by pre-pending `projects/<project-id>/subscriptions/`, where
-/// `<project-id>` is the project id passed to the constructor.
-///
-abstract class PubSub {
-  /// List of required OAuth2 scopes for Pub/Sub operation.
-  static const SCOPES = [pubsub.PubsubApi.PubsubScope];
-
-  /// Access Pub/Sub using an authenticated client.
-  ///
-  /// The [client] is an authenticated HTTP client. This client must
-  /// provide access to at least the scopes in `PubSub.Scopes`.
-  ///
-  /// The [project] is the name of the Google Cloud project.
-  ///
-  /// Returs an object providing access to Pub/Sub. The passed-in [client] will
-  /// not be closed automatically. The caller is responsible for closing it.
-  factory PubSub(http.Client client, String project) {
-    var emulator = Platform.environment['PUBSUB_EMULATOR_HOST'];
-    return emulator == null
-        ? _PubSubImpl(client, project)
-        : _PubSubImpl.rootUrl(client, project, "http://$emulator/");
-  }
-
-  /// The name of the project.
-  String get project;
-
-  /// Create a new topic named [name].
-  ///
-  /// The [name] can be either an absolute name or a relative name.
-  ///
-  /// Returns a `Future` which completes with the newly created topic.
-  Future<Topic> createTopic(String name);
-
-  /// Delete topic named [name].
-  ///
-  /// The [name] can be either an absolute name or a relative name.
-  ///
-  /// Returns a `Future` which completes with `null` when the operation
-  /// is finished.
-  Future deleteTopic(String name);
-
-  /// Look up topic named [name].
-  ///
-  /// The [name] can be either an absolute name or a relative name.
-  ///
-  /// Returns a `Future` which completes with the topic.
-  Future<Topic> lookupTopic(String name);
-
-  /// Lists all topics.
-  ///
-  /// Returns a `Stream` of topics.
-  Stream<Topic> listTopics();
-
-  /// Start paging through all topics.
-  ///
-  /// The maximum number of topics in each page is specified in [pageSize].
-  ///
-  /// Returns a `Future` which completes with a `Page` object holding the
-  /// first page. Use the `Page` object to move to the next page of topics.
-  Future<Page<Topic>> pageTopics({int pageSize = 50});
-
-  /// Create a new subscription named [name] listening on topic [topic].
-  ///
-  /// If [endpoint] is passed this will create a push subscription.
-  ///
-  /// Otherwise this will create a pull subscription.
-  ///
-  /// The [name] can be either an absolute name or a relative name.
-  ///
-  /// Returns a `Future` which completes with the newly created subscription.
-  Future<Subscription> createSubscription(String name, String topic,
-      {Uri endpoint});
-
-  /// Delete subscription named [name].
-  ///
-  /// The [name] can be either an absolute name or a relative name.
-  ///
-  /// Returns a `Future` which completes with the subscription.
-  Future deleteSubscription(String name);
-
-  /// Lookup subscription with named [name].
-  ///
-  /// The [name] can be either an absolute name or a relative name.
-  ///
-  /// Returns a `Future` which completes with the subscription.
-  Future<Subscription> lookupSubscription(String name);
-
-  /// List subscriptions.
-  ///
-  /// If [query] is passed this will list all subscriptions matching the query.
-  ///
-  /// Otherwise this will list all subscriptions.
-  ///
-  /// The only supported query string is the name of a topic. If a name of a
-  /// topic is passed as [query], this will list all subscriptions on that
-  /// topic.
-  ///
-  /// Returns a `Stream` of subscriptions.
-  Stream<Subscription> listSubscriptions([String query]);
-
-  /// Start paging through subscriptions.
-  ///
-  /// If [topic] is passed this will list all subscriptions to that topic.
-  ///
-  /// Otherwise this will list all subscriptions.
-  ///
-  /// The maximum number of subscriptions in each page is specified in
-  /// [pageSize]
-  ///
-  /// Returns a `Future` which completes with a `Page` object holding the
-  /// first page. Use the `Page` object to move to the next page of
-  /// subscriptions.
-  Future<Page<Subscription>> pageSubscriptions(
-      {String topic, int pageSize = 50});
-}
-
-/// A Pub/Sub topic.
-///
-/// A topic is used by a publisher to publish (send) messages.
-abstract class Topic {
-  /// The relative name of this topic.
-  String get name;
-
-  /// The name of the project for this topic.
-  String get project;
-
-  /// The absolute name of this topic.
-  String get absoluteName;
-
-  /// Delete this topic.
-  ///
-  /// Returns a `Future` which completes with `null` when the operation
-  /// is finished.
-  Future delete();
-
-  /// Publish a message.
-  ///
-  /// Returns a `Future` which completes with `null` when the operation
-  /// is finished.
-  Future publish(Message message);
-
-  /// Publish a string as a message.
-  ///
-  /// The message will get the attributes specified in [attributes].
-  ///
-  /// The [attributes] are passed together with the message to the receiver.
-  ///
-  /// Returns a `Future` which completes with `null` when the operation
-  /// is finished.
-  Future publishString(String message, {Map<String, String> attributes});
-
-  /// Publish bytes as a message.
-  ///
-  /// The message will get the attributes specified in [attributes].
-  ///
-  /// The [attributes] are passed together with the message to the receiver.
-  ///
-  /// Returns a `Future` which completes with `null` when the operation
-  /// is finished.
-  Future publishBytes(List<int> message, {Map<String, String> attributes});
-}
-
-/// A Pub/Sub subscription
-///
-/// A subscription is used to receive messages. A subscriber application
-/// create a subscription on a topic to receive messages from it.
-///
-/// Subscriptions can be either pull subscriptions or push subscriptions.
-///
-/// For a pull subscription the receiver calls the `Subscription.pull`
-/// method on the subscription object to get the next message.
-///
-/// For a push subscription a HTTPS endpoint is configured. This endpoint get
-/// POST requests with the messages.
-abstract class Subscription {
-  /// The relative name of this subscription.
-  String get name;
-
-  /// The name of the project for this subscription.
-  String get project;
-
-  /// The absolute name of this subscription.
-  String get absoluteName;
-
-  /// The topic subscribed to.
-  Topic get topic;
-
-  /// Whether this is a push subscription.
-  ///
-  /// A push subscription is configured with an endpoint URI, and messages
-  /// are automatically sent to this endpoint without needing to call [pull].
-  bool get isPush;
-
-  /// Whether this is a pull subscription.
-  ///
-  /// A subscription without a configured endpoint URI is a pull subscription.
-  /// Messages are not delivered automatically, but must instead be requested
-  /// using [pull].
-  bool get isPull;
-
-  /// The URI for the push endpoint.
-  ///
-  /// If this is a pull subscription this is `null`.
-  Uri get endpoint;
-
-  /// Update the push configuration with a new endpoint.
-  ///
-  /// if [endpoint] is `null`, the subscription stops delivering messages
-  /// automatically, and becomes a pull subscription, if it isn't already.
-  ///
-  /// If [endpoint] is not `null`, the subscription will be a push
-  /// subscription, if it wasn't already, and Pub/Sub will start automatically
-  /// delivering message to the endpoint URI.
-  ///
-  /// Returns a `Future` which completes when the operation completes.
-  Future updatePushConfiguration(Uri endpoint);
-
-  /// Delete this subscription.
-  ///
-  /// Returns a `Future` which completes when the operation completes.
-  Future delete();
-
-  /// Pull a message from the subscription.
-  ///
-  /// If `wait` is `true` (the default), the method will wait for a message
-  /// to become available, and will then complete the `Future` with a
-  /// `PullEvent` containing the message.
-  ///
-  /// If [wait] is `false`, the method will complete the returned `Future`
-  /// with `null` if it finds that there are no messages available.
-  Future<PullEvent> pull({bool wait = true});
-}
-
-/// The content of a Pub/Sub message.
-///
-/// All Pub/Sub messages consist of a body of binary data and has an optional
-/// set of attributes (key-value pairs) associated with it.
-///
-/// A `Message` contains the message body a list of bytes. The message body can
-/// be read and written as a String, in which case the string is converted to
-/// or from UTF-8 automatically.
-abstract class Message {
-  /// Creates a new message with a String for the body. The String will
-  /// be UTF-8 encoded to create the actual binary body for the message.
-  ///
-  /// Message attributes can be passed in the [attributes] map.
-  factory Message.withString(String message, {Map<String, String> attributes}) =
-      _MessageImpl.withString;
-
-  /// Creates a new message with a binary body.
-  ///
-  /// Message attributes can be passed in the [attributes] Map.
-  factory Message.withBytes(List<int> message,
-      {Map<String, String> attributes}) = _MessageImpl.withBytes;
-
-  /// The message body as a String.
-  ///
-  /// The binary body is decoded into a String using an UTF-8 decoder.
-  ///
-  /// If the body is not UTF-8 encoded use the [asBytes] getter and manually
-  /// apply the correct decoding.
-  String get asString;
-
-  /// The message body as bytes.
-  List<int> get asBytes;
-
-  /// The attributes for this message.
-  Map<String, String> get attributes;
-}
-
-/// A Pub/Sub pull event.
-///
-/// Instances of this class are returned when pulling messages with
-/// [Subscription.pull].
-abstract class PullEvent {
-  /// The message content.
-  Message get message;
-
-  /// Acknowledge reception of this message.
-  ///
-  /// Returns a `Future` which completes with `null` when the acknowledge has
-  /// been processed.
-  Future acknowledge();
-}
-
-/// Pub/Sub push event.
-///
-/// This class can be used in a HTTP server for decoding messages pushed to
-/// an endpoint.
-///
-/// When a message is received on a push endpoint use the [PushEvent.fromJson]
-/// constructor with the HTTP body to decode the received message.
-///
-/// E.g. with a `dart:io` HTTP handler:
-///
-///     void pushHandler(HttpRequest request) {
-///       // Decode the JSON body.
-///       request.transform(UTF8.decoder).join('').then((body) {
-///         // Decode the JSON into a push message.
-///         var message = new PushMessage.fromJson(body)
-///
-///         // Process the message...
-///
-///         // Respond with status code 20X to acknowledge the message.
-///         response.statusCode = statusCode;
-///         response.close();
-///       });
-///     }
-////
-abstract class PushEvent {
-  /// The message content.
-  Message get message;
-
-  /// The absolute name of the subscription.
-  String get subscriptionName;
-
-  /// Create a `PushMessage` from JSON received on a Pub/Sub push endpoint.
-  factory PushEvent.fromJson(String json) = _PushEventImpl.fromJson;
-}
diff --git a/gcloud/lib/service_scope.dart b/gcloud/lib/service_scope.dart
deleted file mode 100644
index ea67d9f..0000000
--- a/gcloud/lib/service_scope.dart
+++ /dev/null
@@ -1,286 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-/// This library enables one to create a service scope in which code can run.
-///
-/// A service scope is an environment in which code runs. The environment is a
-/// [Zone] with added functionality. Code can be run inside a new service scope
-/// by using the `fork(callback)` method. This will call `callback` inside a new
-/// service scope and will keep the scope alive until the Future returned by the
-/// callback completes. At this point the service scope ends.
-///
-/// Code running inside a new service scope can
-///
-///  - register objects (e.g. a database connection pool or a logging service)
-///  - look up previously registered objects
-///  - register on-scope-exit handlers
-///
-/// Service scopes can be nested. All registered values from the parent service
-/// scope are still accessible as long as they have not been overridden. The
-/// callback passed to `fork()` is responsible for not completing it's returned
-/// Future until all nested service scopes have ended.
-///
-/// The on-scope-exit callbacks will be called when the service scope ends. The
-/// callbacks are run in reverse registration order and are guaranteed to be
-/// executed. During a scope exit callback the active service scope cannot
-/// be modified anymore and `lookup()`s will only return values which were
-/// registered before the registration of the on-scope-exit callback.
-///
-/// One use-case of this is making services available to a server application.
-/// The server application will run inside a service scope which will have all
-/// necessary services registered.
-/// Once the server app shuts down, the registered on-scope-exit callbacks will
-/// automatically be invoked and the process will shut down cleanly.
-///
-/// Here is an example use case:
-///
-///      import 'dart:async';
-///      import 'package:gcloud/service_scope.dart' as scope;
-///
-///      class DBPool { ... }
-///
-///      DBPool get dbService => scope.lookup(#dbpool);
-///
-///      Future runApp() {
-///        // The application can use the registered objects (here the
-///        // dbService). It does not need to pass it around, but can use a
-///        // global getter.
-///        return dbService.query( ... ).listen(print).asFuture();
-///      }
-///
-///      main() {
-///        // Creates a new service scope and runs the given closure inside it.
-///        ss.fork(() {
-///          // We create a new database pool with a 10 active connections and
-///          // add it to the current service scope with key `#dbpool`.
-///          // In addition we insert a on-scope-exit callback which will be
-///          // called once the application is done.
-///          var pool = new DBPool(connections: 10);
-///          scope.register(#dbpool, pool, onScopeExit: () => pool.close());
-///          return runApp();
-///       }).then((_) {
-///         print('Server application shut down cleanly');
-///       });
-///     }
-///
-/// As an example, the `package:appengine/appengine.dart` package runs request
-/// handlers inside a service scope, which has most `package:gcloud` services
-/// registered.
-///
-/// The core application code can then be independent of `package:appengine`
-/// and instead depend only on the services needed (e.g.
-/// `package:gcloud/storage.dart`) by using getters in the service library (e.g.
-/// the `storageService`) which are implemented with service scope lookups.
-library gcloud.service_scope;
-
-import 'dart:async';
-
-/// The Symbol used as index in the zone map for the service scope object.
-const Symbol _ServiceScopeKey = #gcloud.service_scope;
-
-/// An empty service scope.
-///
-/// New service scope can be created by calling [fork] on the empty
-/// service scope.
-final _ServiceScope _emptyServiceScope = _ServiceScope();
-
-/// Returns the current [_ServiceScope] object.
-_ServiceScope get _serviceScope =>
-    Zone.current[_ServiceScopeKey] as _ServiceScope;
-
-/// Start a new zone with a new service scope and run [func] inside it.
-///
-/// The function [func] must return a `Future` and the service scope will end
-/// when this future completes.
-///
-/// If an uncaught error occurs and [onError] is given, it will be called. The
-/// `onError` parameter can take the same values as `Zone.current.fork`.
-Future fork(Future func(), {Function onError}) {
-  var currentServiceScope = _serviceScope;
-  if (currentServiceScope == null) {
-    currentServiceScope = _emptyServiceScope;
-  }
-  return currentServiceScope._fork(func, onError: onError);
-}
-
-/// Register a new [object] into the current service scope using the given
-/// [key].
-///
-/// If [onScopeExit] is provided, it will be called when the service scope ends.
-///
-/// The registered on-scope-exit functions are executed in reverse registration
-/// order.
-void register(Object key, Object value, {ScopeExitCallback onScopeExit}) {
-  var serviceScope = _serviceScope;
-  if (serviceScope == null) {
-    throw StateError('Not running inside a service scope zone.');
-  }
-  serviceScope.register(key, value, onScopeExit: onScopeExit);
-}
-
-/// Register a [onScopeExitCallback] to be invoked when this service scope ends.
-///
-/// The registered on-scope-exit functions are executed in reverse registration
-/// order.
-void registerScopeExitCallback(ScopeExitCallback onScopeExitCallback) {
-  var serviceScope = _serviceScope;
-  if (serviceScope == null) {
-    throw StateError('Not running inside a service scope zone.');
-  }
-  serviceScope.registerOnScopeExitCallback(onScopeExitCallback);
-}
-
-/// Look up an item by it's key in the currently active service scope.
-///
-/// Returns `null` if there is no entry with the given key.
-Object lookup(Object key) {
-  var serviceScope = _serviceScope;
-  if (serviceScope == null) {
-    throw StateError('Not running inside a service scope zone.');
-  }
-  return serviceScope.lookup(key);
-}
-
-/// Represents a global service scope of values stored via zones.
-class _ServiceScope {
-  /// A mapping of keys to values stored inside the service scope.
-  final Map<Object, _RegisteredEntry> _key2Values =
-      Map<Object, _RegisteredEntry>();
-
-  /// A set which indicates whether an object was copied from it's parent.
-  final Set<Object> _parentCopies = Set<Object>();
-
-  /// On-Scope-Exit functions which will be called in reverse insertion order.
-  final List<_RegisteredEntry> _registeredEntries = [];
-
-  bool _cleaningUp = false;
-  bool _destroyed = false;
-
-  /// Looks up an object by it's service scope key - returns `null` if not
-  /// found.
-  Object lookup(Object serviceScope) {
-    _ensureNotInDestroyingState();
-    var entry = _key2Values[serviceScope];
-    return entry != null ? entry.value : null;
-  }
-
-  /// Inserts a new item to the service scope using [serviceScopeKey].
-  ///
-  /// Optionally calls a [onScopeExit] function once this service scope ends.
-  void register(Object serviceScopeKey, Object value,
-      {ScopeExitCallback onScopeExit}) {
-    _ensureNotInCleaningState();
-    _ensureNotInDestroyingState();
-
-    bool isParentCopy = _parentCopies.contains(serviceScopeKey);
-    if (!isParentCopy && _key2Values.containsKey(serviceScopeKey)) {
-      throw ArgumentError(
-          'Servie scope already contains key $serviceScopeKey.');
-    }
-
-    var entry = _RegisteredEntry(serviceScopeKey, value, onScopeExit);
-
-    _key2Values[serviceScopeKey] = entry;
-    if (isParentCopy) _parentCopies.remove(serviceScopeKey);
-
-    _registeredEntries.add(entry);
-  }
-
-  /// Inserts a new on-scope-exit function to be called once this service scope
-  /// ends.
-  void registerOnScopeExitCallback(ScopeExitCallback onScopeExitCallback) {
-    _ensureNotInCleaningState();
-    _ensureNotInDestroyingState();
-
-    if (onScopeExitCallback != null) {
-      _registeredEntries.add(_RegisteredEntry(null, null, onScopeExitCallback));
-    }
-  }
-
-  /// Start a new zone with a forked service scope.
-  Future _fork(Future func(), {Function onError}) {
-    _ensureNotInCleaningState();
-    _ensureNotInDestroyingState();
-
-    var serviceScope = _copy();
-    var map = {_ServiceScopeKey: serviceScope};
-    return runZoned(() {
-      var f = func();
-      if (f is! Future) {
-        throw ArgumentError('Forking a service scope zone requires the '
-            'callback function to return a future.');
-      }
-      return f.whenComplete(serviceScope._runScopeExitHandlers);
-    }, zoneValues: map, onError: onError);
-  }
-
-  void _ensureNotInDestroyingState() {
-    if (_destroyed) {
-      throw StateError(
-          'The service scope has already been exited. It is therefore '
-          'forbidden to use this service scope anymore. '
-          'Please make sure that your code waits for all asynchronous tasks '
-          'before the closure passed to fork() completes.');
-    }
-  }
-
-  void _ensureNotInCleaningState() {
-    if (_cleaningUp) {
-      throw StateError(
-          'The service scope is in the process of cleaning up. It is therefore '
-          'forbidden to make any modifications to the current service scope. '
-          'Please make sure that your code waits for all asynchronous tasks '
-          'before the closure passed to fork() completes.');
-    }
-  }
-
-  /// Copies all service scope entries to a new service scope, but not their
-  /// on-scope-exit handlers.
-  _ServiceScope _copy() {
-    var serviceScopeCopy = _ServiceScope();
-    serviceScopeCopy._key2Values.addAll(_key2Values);
-    serviceScopeCopy._parentCopies.addAll(_key2Values.keys);
-    return serviceScopeCopy;
-  }
-
-  /// Runs all on-scope-exit functions in [_ServiceScope].
-  Future _runScopeExitHandlers() {
-    _cleaningUp = true;
-    var errors = [];
-
-    // We are running all on-scope-exit functions in reverse registration order.
-    // Even if one fails, we continue cleaning up and report then the list of
-    // errors (if there were any).
-    return Future.forEach(_registeredEntries.reversed,
-        (_RegisteredEntry registeredEntry) {
-      if (registeredEntry.key != null) {
-        _key2Values.remove(registeredEntry.key);
-      }
-      if (registeredEntry.scopeExitCallback != null) {
-        return Future.sync(registeredEntry.scopeExitCallback)
-            .catchError((e, s) => errors.add(e));
-      } else {
-        return Future.value();
-      }
-    }).then((_) {
-      _cleaningUp = true;
-      _destroyed = true;
-      if (errors.isNotEmpty) {
-        throw Exception(
-            'The following errors occured while running scope exit handlers'
-            ': $errors');
-      }
-    });
-  }
-}
-
-typedef ScopeExitCallback = Future Function();
-
-class _RegisteredEntry {
-  final Object key;
-  final Object value;
-  final ScopeExitCallback scopeExitCallback;
-
-  _RegisteredEntry(this.key, this.value, this.scopeExitCallback);
-}
diff --git a/gcloud/lib/src/datastore_impl.dart b/gcloud/lib/src/datastore_impl.dart
deleted file mode 100644
index 840409e..0000000
--- a/gcloud/lib/src/datastore_impl.dart
+++ /dev/null
@@ -1,609 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-library gcloud.datastore_impl;
-
-import 'dart:async';
-
-import 'package:googleapis/datastore/v1.dart' as api;
-import 'package:http/http.dart' as http;
-
-import '../common.dart' show Page;
-import '../datastore.dart' as datastore;
-
-class TransactionImpl implements datastore.Transaction {
-  final String data;
-  TransactionImpl(this.data);
-}
-
-class DatastoreImpl implements datastore.Datastore {
-  static const List<String> SCOPES = <String>[
-    api.DatastoreApi.DatastoreScope,
-    api.DatastoreApi.CloudPlatformScope,
-  ];
-
-  final api.DatastoreApi _api;
-  final String _project;
-
-  /// The [project] parameter is the name of the cloud project (it should not
-  /// start with a `s~`).
-  DatastoreImpl(http.Client client, String project)
-      : _api = api.DatastoreApi(client),
-        _project = project;
-
-  api.Key _convertDatastore2ApiKey(datastore.Key key, {bool enforceId = true}) {
-    var apiKey = api.Key();
-
-    apiKey.partitionId = api.PartitionId()
-      ..projectId = _project
-      ..namespaceId = key.partition.namespace;
-
-    apiKey.path = key.elements.map((datastore.KeyElement element) {
-      final part = api.PathElement();
-      part.kind = element.kind;
-      final id = element.id;
-      if (id is int) {
-        part.id = '$id';
-      } else if (id is String) {
-        part.name = id;
-      } else if (enforceId) {
-        throw datastore.ApplicationError(
-            'Error while encoding entity key: Using `null` as the id is not '
-            'allowed.');
-      }
-      return part;
-    }).toList();
-
-    return apiKey;
-  }
-
-  static datastore.Key _convertApi2DatastoreKey(api.Key key) {
-    var elements = key.path.map((api.PathElement element) {
-      if (element.id != null) {
-        return datastore.KeyElement(element.kind, int.parse(element.id));
-      } else if (element.name != null) {
-        return datastore.KeyElement(element.kind, element.name);
-      } else {
-        throw datastore.DatastoreError(
-            'Invalid server response: Expected allocated name/id.');
-      }
-    }).toList();
-
-    datastore.Partition partition;
-    if (key.partitionId != null) {
-      partition = datastore.Partition(key.partitionId.namespaceId);
-      // TODO: assert projectId.
-    }
-    return datastore.Key(elements, partition: partition);
-  }
-
-  bool _compareApiKey(api.Key a, api.Key b) {
-    if (a.path.length != b.path.length) return false;
-
-    // FIXME(Issue #2): Is this comparison working correctly?
-    if (a.partitionId != null) {
-      if (b.partitionId == null) return false;
-      if (a.partitionId.projectId != b.partitionId.projectId) return false;
-      if (a.partitionId.namespaceId != b.partitionId.namespaceId) return false;
-    } else {
-      if (b.partitionId != null) return false;
-    }
-
-    for (int i = 0; i < a.path.length; i++) {
-      if (a.path[i].id != b.path[i].id ||
-          a.path[i].name != b.path[i].name ||
-          a.path[i].kind != b.path[i].kind) return false;
-    }
-    return true;
-  }
-
-  api.Value _convertDatastore2ApiPropertyValue(value, bool indexed,
-      {bool lists = true}) {
-    var apiValue = api.Value()..excludeFromIndexes = !indexed;
-    if (value == null) {
-      return apiValue..nullValue = "NULL_VALUE";
-    } else if (value is bool) {
-      return apiValue..booleanValue = value;
-    } else if (value is int) {
-      return apiValue..integerValue = '$value';
-    } else if (value is double) {
-      return apiValue..doubleValue = value;
-    } else if (value is String) {
-      return apiValue..stringValue = value;
-    } else if (value is DateTime) {
-      return apiValue..timestampValue = value.toIso8601String();
-    } else if (value is datastore.BlobValue) {
-      return apiValue..blobValueAsBytes = value.bytes;
-    } else if (value is datastore.Key) {
-      return apiValue
-        ..keyValue = _convertDatastore2ApiKey(value, enforceId: false);
-    } else if (value is List) {
-      if (!lists) {
-        // FIXME(Issue #3): Consistently handle exceptions.
-        throw Exception('List values are not allowed.');
-      }
-
-      convertItem(i) =>
-          _convertDatastore2ApiPropertyValue(i, indexed, lists: false);
-
-      return api.Value()
-        ..arrayValue =
-            (api.ArrayValue()..values = value.map(convertItem).toList());
-    } else {
-      throw UnsupportedError(
-          'Types ${value.runtimeType} cannot be used for serializing.');
-    }
-  }
-
-  static dynamic _convertApi2DatastoreProperty(api.Value value) {
-    if (value.booleanValue != null)
-      return value.booleanValue;
-    else if (value.integerValue != null)
-      return int.parse(value.integerValue);
-    else if (value.doubleValue != null)
-      return value.doubleValue;
-    else if (value.stringValue != null)
-      return value.stringValue;
-    else if (value.timestampValue != null)
-      return DateTime.parse(value.timestampValue);
-    else if (value.blobValue != null)
-      return datastore.BlobValue(value.blobValueAsBytes);
-    else if (value.keyValue != null)
-      return _convertApi2DatastoreKey(value.keyValue);
-    else if (value.arrayValue != null && value.arrayValue.values != null)
-      return value.arrayValue.values
-          .map(_convertApi2DatastoreProperty)
-          .toList();
-    else if (value.entityValue != null)
-      throw UnsupportedError('Entity values are not supported.');
-    else if (value.geoPointValue != null)
-      throw UnsupportedError('GeoPoint values are not supported.');
-    return null;
-  }
-
-  static datastore.Entity _convertApi2DatastoreEntity(api.Entity entity) {
-    var unindexedProperties = Set<String>();
-    var properties = <String, Object>{};
-
-    if (entity.properties != null) {
-      entity.properties.forEach((String name, api.Value value) {
-        properties[name] = _convertApi2DatastoreProperty(value);
-        if (value.excludeFromIndexes != null && value.excludeFromIndexes) {
-          unindexedProperties.add(name);
-        }
-      });
-    }
-    return datastore.Entity(_convertApi2DatastoreKey(entity.key), properties,
-        unIndexedProperties: unindexedProperties);
-  }
-
-  api.Entity _convertDatastore2ApiEntity(datastore.Entity entity,
-      {bool enforceId = false}) {
-    var apiEntity = api.Entity();
-
-    apiEntity.key = _convertDatastore2ApiKey(entity.key, enforceId: enforceId);
-    apiEntity.properties = {};
-    if (entity.properties != null) {
-      for (var key in entity.properties.keys) {
-        var value = entity.properties[key];
-        bool indexed = false;
-        if (entity.unIndexedProperties != null) {
-          indexed = !entity.unIndexedProperties.contains(key);
-        }
-        var property = _convertDatastore2ApiPropertyValue(value, indexed);
-        apiEntity.properties[key] = property;
-      }
-    }
-    return apiEntity;
-  }
-
-  static Map<datastore.FilterRelation, String> relationMapping = const {
-    datastore.FilterRelation.LessThan: 'LESS_THAN',
-    datastore.FilterRelation.LessThanOrEqual: 'LESS_THAN_OR_EQUAL',
-    datastore.FilterRelation.Equal: 'EQUAL',
-    datastore.FilterRelation.GreatherThan: 'GREATER_THAN',
-    datastore.FilterRelation.GreatherThanOrEqual: 'GREATER_THAN_OR_EQUAL',
-  };
-
-  api.Filter _convertDatastore2ApiFilter(datastore.Filter filter) {
-    var pf = api.PropertyFilter();
-    var operator = relationMapping[filter.relation];
-    if (operator == null) {
-      throw ArgumentError('Unknown filter relation: ${filter.relation}.');
-    }
-    pf.op = operator;
-    pf.property = api.PropertyReference()..name = filter.name;
-    pf.value =
-        _convertDatastore2ApiPropertyValue(filter.value, true, lists: false);
-    return api.Filter()..propertyFilter = pf;
-  }
-
-  api.Filter _convertDatastoreAncestorKey2ApiFilter(datastore.Key key) {
-    var pf = api.PropertyFilter();
-    pf.op = 'HAS_ANCESTOR';
-    pf.property = api.PropertyReference()..name = '__key__';
-    pf.value = api.Value()
-      ..keyValue = _convertDatastore2ApiKey(key, enforceId: true);
-    return api.Filter()..propertyFilter = pf;
-  }
-
-  api.Filter _convertDatastore2ApiFilters(
-      List<datastore.Filter> filters, datastore.Key ancestorKey) {
-    if ((filters == null || filters.isEmpty) && ancestorKey == null) {
-      return null;
-    }
-
-    var compFilter = api.CompositeFilter();
-    if (filters != null) {
-      compFilter.filters = filters.map(_convertDatastore2ApiFilter).toList();
-    }
-    if (ancestorKey != null) {
-      var filter = _convertDatastoreAncestorKey2ApiFilter(ancestorKey);
-      if (compFilter.filters == null) {
-        compFilter.filters = [filter];
-      } else {
-        compFilter.filters.add(filter);
-      }
-    }
-    compFilter.op = 'AND';
-    return api.Filter()..compositeFilter = compFilter;
-  }
-
-  api.PropertyOrder _convertDatastore2ApiOrder(datastore.Order order) {
-    var property = api.PropertyReference()..name = order.propertyName;
-    var direction = order.direction == datastore.OrderDirection.Ascending
-        ? 'ASCENDING'
-        : 'DESCENDING';
-    return api.PropertyOrder()
-      ..direction = direction
-      ..property = property;
-  }
-
-  List<api.PropertyOrder> _convertDatastore2ApiOrders(
-      List<datastore.Order> orders) {
-    if (orders == null) return null;
-
-    return orders.map(_convertDatastore2ApiOrder).toList();
-  }
-
-  static Future<Null> _handleError(error, StackTrace stack) {
-    if (error is api.DetailedApiRequestError) {
-      if (error.status == 400) {
-        return Future.error(datastore.ApplicationError(error.message), stack);
-      } else if (error.status == 409) {
-        // NOTE: This is reported as:
-        // "too much contention on these datastore entities"
-        // TODO:
-        return Future.error(datastore.TransactionAbortedError(), stack);
-      } else if (error.status == 412) {
-        return Future.error(datastore.NeedIndexError(), stack);
-      }
-    }
-    return Future.error(error, stack);
-  }
-
-  Future<List<datastore.Key>> allocateIds(List<datastore.Key> keys) {
-    var request = api.AllocateIdsRequest();
-    request
-      ..keys = keys.map((key) {
-        return _convertDatastore2ApiKey(key, enforceId: false);
-      }).toList();
-    return _api.projects.allocateIds(request, _project).then((response) {
-      return response.keys.map(_convertApi2DatastoreKey).toList();
-    }, onError: _handleError);
-  }
-
-  Future<datastore.Transaction> beginTransaction(
-      {bool crossEntityGroup = false}) {
-    var request = api.BeginTransactionRequest();
-    return _api.projects.beginTransaction(request, _project).then((result) {
-      return TransactionImpl(result.transaction);
-    }, onError: _handleError);
-  }
-
-  Future<datastore.CommitResult> commit(
-      {List<datastore.Entity> inserts,
-      List<datastore.Entity> autoIdInserts,
-      List<datastore.Key> deletes,
-      datastore.Transaction transaction}) {
-    var request = api.CommitRequest();
-
-    if (transaction != null) {
-      request.mode = 'TRANSACTIONAL';
-      request.transaction = (transaction as TransactionImpl).data;
-    } else {
-      request.mode = 'NON_TRANSACTIONAL';
-    }
-
-    var mutations = request.mutations = <api.Mutation>[];
-    if (inserts != null) {
-      for (int i = 0; i < inserts.length; i++) {
-        mutations.add(api.Mutation()
-          ..upsert = _convertDatastore2ApiEntity(inserts[i], enforceId: true));
-      }
-    }
-    int autoIdStartIndex = -1;
-    if (autoIdInserts != null) {
-      autoIdStartIndex = mutations.length;
-      for (int i = 0; i < autoIdInserts.length; i++) {
-        mutations.add(api.Mutation()
-          ..insert =
-              _convertDatastore2ApiEntity(autoIdInserts[i], enforceId: false));
-      }
-    }
-    if (deletes != null) {
-      for (int i = 0; i < deletes.length; i++) {
-        mutations.add(api.Mutation()
-          ..delete = _convertDatastore2ApiKey(deletes[i], enforceId: true));
-      }
-    }
-    return _api.projects.commit(request, _project).then((result) {
-      List<datastore.Key> keys;
-      if (autoIdInserts != null && autoIdInserts.isNotEmpty) {
-        List<api.MutationResult> mutationResults = result.mutationResults;
-        assert(autoIdStartIndex != -1);
-        assert(mutationResults.length >=
-            (autoIdStartIndex + autoIdInserts.length));
-        keys = mutationResults
-            .skip(autoIdStartIndex)
-            .take(autoIdInserts.length)
-            .map<datastore.Key>(
-                (api.MutationResult r) => _convertApi2DatastoreKey(r.key))
-            .toList();
-      }
-      return datastore.CommitResult(keys);
-    }, onError: _handleError);
-  }
-
-  Future<List<datastore.Entity>> lookup(List<datastore.Key> keys,
-      {datastore.Transaction transaction}) {
-    var apiKeys = keys.map((key) {
-      return _convertDatastore2ApiKey(key, enforceId: true);
-    }).toList();
-    var request = api.LookupRequest();
-    request.keys = apiKeys;
-    if (transaction != null) {
-      // TODO: Make readOptions more configurable.
-      request.readOptions = api.ReadOptions();
-      request.readOptions.transaction = (transaction as TransactionImpl).data;
-    }
-    return _api.projects.lookup(request, _project).then((response) {
-      if (response.deferred != null && response.deferred.isNotEmpty) {
-        throw datastore.DatastoreError(
-            'Could not successfully look up all keys due to resource '
-            'constraints.');
-      }
-
-      // NOTE: This is worst-case O(n^2)!
-      // Maybe we can optimize this somehow. But the API says:
-      //  message LookupResponse {
-      //    // The order of results in these fields is undefined and has no relation to
-      //    // the order of the keys in the input.
-      //
-      //    // Entities found as ResultType.FULL entities.
-      //    repeated EntityResult found = 1;
-      //
-      //    // Entities not found as ResultType.KEY_ONLY entities.
-      //    repeated EntityResult missing = 2;
-      //
-      //    // A list of keys that were not looked up due to resource constraints.
-      //    repeated Key deferred = 3;
-      //  }
-      var entities = List<datastore.Entity>(apiKeys.length);
-      for (int i = 0; i < apiKeys.length; i++) {
-        var apiKey = apiKeys[i];
-
-        bool found = false;
-
-        if (response.found != null) {
-          for (var result in response.found) {
-            if (_compareApiKey(apiKey, result.entity.key)) {
-              entities[i] = _convertApi2DatastoreEntity(result.entity);
-              found = true;
-              break;
-            }
-          }
-        }
-
-        if (found) continue;
-
-        if (response.missing != null) {
-          for (var result in response.missing) {
-            if (_compareApiKey(apiKey, result.entity.key)) {
-              entities[i] = null;
-              found = true;
-              break;
-            }
-          }
-        }
-
-        if (!found) {
-          throw datastore.DatastoreError('Invalid server response: '
-              'Tried to lookup ${apiKey.toJson()} but entity was neither in '
-              'missing nor in found.');
-        }
-      }
-      return entities;
-    }, onError: _handleError);
-  }
-
-  Future<Page<datastore.Entity>> query(datastore.Query query,
-      {datastore.Partition partition, datastore.Transaction transaction}) {
-    // NOTE: We explicitly do not set 'limit' here, since this is handled by
-    // QueryPageImpl.runQuery.
-    var apiQuery = api.Query()
-      ..filter = _convertDatastore2ApiFilters(query.filters, query.ancestorKey)
-      ..order = _convertDatastore2ApiOrders(query.orders)
-      ..offset = query.offset;
-
-    if (query.kind != null) {
-      apiQuery.kind = [api.KindExpression()..name = query.kind];
-    }
-
-    var request = api.RunQueryRequest();
-    request.query = apiQuery;
-    if (transaction != null) {
-      // TODO: Make readOptions more configurable.
-      request.readOptions = api.ReadOptions();
-      request.readOptions.transaction = (transaction as TransactionImpl).data;
-    }
-    if (partition != null) {
-      request.partitionId = api.PartitionId()
-        ..namespaceId = partition.namespace;
-    }
-
-    return QueryPageImpl.runQuery(_api, _project, request, query.limit)
-        .catchError(_handleError);
-  }
-
-  Future rollback(datastore.Transaction transaction) {
-    // TODO: Handle [transaction]
-    var request = api.RollbackRequest()
-      ..transaction = (transaction as TransactionImpl).data;
-    return _api.projects.rollback(request, _project).catchError(_handleError);
-  }
-}
-
-class QueryPageImpl implements Page<datastore.Entity> {
-  static const int MAX_ENTITIES_PER_RESPONSE = 2000;
-
-  final api.DatastoreApi _api;
-  final String _project;
-  final api.RunQueryRequest _nextRequest;
-  final List<datastore.Entity> _entities;
-  final bool _isLast;
-
-  // This might be `null` in which case we request as many as we can get.
-  final int _remainingNumberOfEntities;
-
-  QueryPageImpl(this._api, this._project, this._nextRequest, this._entities,
-      this._isLast, this._remainingNumberOfEntities);
-
-  static Future<QueryPageImpl> runQuery(api.DatastoreApi api, String project,
-      api.RunQueryRequest request, int limit,
-      {int batchSize}) {
-    int batchLimit = batchSize;
-    if (batchLimit == null) {
-      batchLimit = MAX_ENTITIES_PER_RESPONSE;
-    }
-    if (limit != null && limit < batchLimit) {
-      batchLimit = limit;
-    }
-
-    request.query.limit = batchLimit;
-
-    return api.projects.runQuery(request, project).then((response) {
-      var returnedEntities = const <datastore.Entity>[];
-
-      var batch = response.batch;
-      if (batch.entityResults != null) {
-        returnedEntities = batch.entityResults
-            .map((result) => result.entity)
-            .map(DatastoreImpl._convertApi2DatastoreEntity)
-            .toList();
-      }
-
-      // This check is only necessary for the first request/response pair
-      // (if offset was supplied).
-      if (request.query.offset != null &&
-          request.query.offset > 0 &&
-          request.query.offset != response.batch.skippedResults) {
-        throw datastore.DatastoreError(
-            'Server did not skip over the specified ${request.query.offset} '
-            'entities.');
-      }
-
-      if (limit != null && returnedEntities.length > limit) {
-        throw datastore.DatastoreError(
-            'Server returned more entities then the limit for the request'
-            '(${request.query.limit}) was.');
-      }
-
-      // FIXME: TODO: Big hack!
-      // It looks like Apiary/Atlas is currently broken.
-      /*
-      if (limit != null &&
-          returnedEntities.length < batchLimit &&
-          response.batch.moreResults == 'MORE_RESULTS_AFTER_LIMIT') {
-        throw new datastore.DatastoreError(
-            'Server returned response with less entities then the limit was, '
-            'but signals there are more results after the limit.');
-      }
-      */
-
-      // In case a limit was specified, we need to subtraction the number of
-      // entities we already got.
-      // (the checks above guarantee that this subtraction is >= 0).
-      int remainingEntities;
-      if (limit != null) {
-        remainingEntities = limit - returnedEntities.length;
-      }
-
-      // If the server signals there are more entities and we either have no
-      // limit or our limit has not been reached, we set `moreBatches` to
-      // `true`.
-      bool moreBatches = (remainingEntities == null || remainingEntities > 0) &&
-          response.batch.moreResults == 'MORE_RESULTS_AFTER_LIMIT';
-
-      bool gotAll = limit != null && remainingEntities == 0;
-      bool noMore = response.batch.moreResults == 'NO_MORE_RESULTS';
-      bool isLast = gotAll || noMore;
-
-      // As a sanity check, we assert that `moreBatches XOR isLast`.
-      assert(isLast != moreBatches);
-
-      // FIXME: TODO: Big hack!
-      // It looks like Apiary/Atlas is currently broken.
-      if (moreBatches && returnedEntities.isEmpty) {
-        print('Warning: Api to Google Cloud Datastore returned bogus response. '
-            'Trying a workaround.');
-        isLast = true;
-        moreBatches = false;
-      }
-
-      if (!isLast && response.batch.endCursor == null) {
-        throw datastore.DatastoreError(
-            'Server did not supply an end cursor, even though the query '
-            'is not done.');
-      }
-
-      if (isLast) {
-        return QueryPageImpl(
-            api, project, request, returnedEntities, true, null);
-      } else {
-        // NOTE: We reuse the old RunQueryRequest object here .
-
-        // The offset will be 0 from now on, since the first request will have
-        // skipped over the first `offset` results.
-        request.query.offset = 0;
-
-        // Furthermore we set the startCursor to the endCursor of the previous
-        // result batch, so we can continue where we left off.
-        request.query.startCursor = batch.endCursor;
-
-        return QueryPageImpl(
-            api, project, request, returnedEntities, false, remainingEntities);
-      }
-    });
-  }
-
-  bool get isLast => _isLast;
-
-  List<datastore.Entity> get items => _entities;
-
-  Future<Page<datastore.Entity>> next({int pageSize}) {
-    // NOTE: We do not respect [pageSize] here, the only mechanism we can
-    // really use is `query.limit`, but this is user-specified when making
-    // the query.
-    if (isLast) {
-      return Future.sync(() {
-        throw ArgumentError('Cannot call next() on last page.');
-      });
-    }
-
-    return QueryPageImpl.runQuery(
-            _api, _project, _nextRequest, _remainingNumberOfEntities)
-        .catchError(DatastoreImpl._handleError);
-  }
-}
diff --git a/gcloud/lib/src/db/annotations.dart b/gcloud/lib/src/db/annotations.dart
deleted file mode 100644
index 5868a33..0000000
--- a/gcloud/lib/src/db/annotations.dart
+++ /dev/null
@@ -1,306 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-part of gcloud.db;
-
-/// Annotation used to mark dart classes which can be stored into datastore.
-///
-/// The `Kind` annotation on a class as well as other `Property` annotations on
-/// fields or getters of the class itself (and any of it's superclasses) up to
-/// the [Model] class describe the *mapping* of *dart objects* to datastore
-/// *entities*.
-///
-/// An "entity" is an object which can be stored into Google Cloud Datastore.
-/// It contains a number of named "properties", some of them might get indexed,
-/// others are not. A "property" value can be of a limited set of supported
-/// types (such as `int` and `String`).
-///
-/// Here is an example of a dart model class which can be stored into datastore:
-///     @Kind()
-///     class Person extends db.Model {
-///       @StringProperty()
-///       String name;
-///
-///       @IntProperty()
-///       int age;
-///
-///       @DateTimeProperty()
-///       DateTime dateOfBirth;
-///     }
-class Kind {
-  /// The kind name used when saving objects to datastore.
-  ///
-  /// If `null` the name will be the same as the class name at which the
-  /// annotation is placed.
-  final String name;
-
-  /// The type, either [ID_TYPE_INTEGER] or [ID_TYPE_STRING].
-  final IdType idType;
-
-  /// Annotation specifying the name of this kind and whether to use integer or
-  /// string `id`s.
-  ///
-  /// If `name` is omitted, it will default to the name of class to which this
-  /// annotation is attached to.
-  const Kind({this.name, this.idType = IdType.Integer});
-}
-
-/// The type used for id's of an entity.
-class IdType {
-  /// Use integer ids for identifying entities.
-  static const IdType Integer = IdType('Integer');
-
-  /// Use string ids for identifying entities.
-  static const IdType String = IdType('String');
-
-  final core.String _type;
-
-  const IdType(this._type);
-
-  core.String toString() => "IdType: $_type";
-}
-
-/// Describes a property of an Entity.
-///
-/// Please see [Kind] for an example on how to use them.
-abstract class Property {
-  /// The name of the property.
-  ///
-  /// If it is `null`, the name will be the same as used in the
-  /// model class.
-  final String propertyName;
-
-  /// Specifies whether this property is required or not.
-  ///
-  /// If required is `true`, it will be enforced when saving model objects to
-  /// the datastore and when retrieving them.
-  final bool required;
-
-  /// Specifies whether this property should be indexed or not.
-  ///
-  /// When running queries no this property, it is necessary to set [indexed] to
-  /// `true`.
-  final bool indexed;
-
-  const Property(
-      {this.propertyName, this.required = false, this.indexed = true});
-
-  bool validate(ModelDB db, Object value) {
-    if (required && value == null) return false;
-    return true;
-  }
-
-  Object encodeValue(ModelDB db, Object value, {bool forComparison = false});
-
-  Object decodePrimitiveValue(ModelDB db, Object value);
-}
-
-/// An abstract base class for primitive properties which can e.g. be used
-/// within a composed `ListProperty`.
-abstract class PrimitiveProperty extends Property {
-  const PrimitiveProperty(
-      {String propertyName, bool required = false, bool indexed = true})
-      : super(propertyName: propertyName, required: required, indexed: indexed);
-
-  Object encodeValue(ModelDB db, Object value, {bool forComparison = false}) =>
-      value;
-
-  Object decodePrimitiveValue(ModelDB db, Object value) => value;
-}
-
-/// A boolean [Property].
-///
-/// It will validate that values are booleans before writing them to the
-/// datastore and when reading them back.
-class BoolProperty extends PrimitiveProperty {
-  const BoolProperty(
-      {String propertyName, bool required = false, bool indexed = true})
-      : super(propertyName: propertyName, required: required, indexed: indexed);
-
-  bool validate(ModelDB db, Object value) =>
-      super.validate(db, value) && (value == null || value is bool);
-}
-
-/// A integer [Property].
-///
-/// It will validate that values are integers before writing them to the
-/// datastore and when reading them back.
-class IntProperty extends PrimitiveProperty {
-  const IntProperty(
-      {String propertyName, bool required = false, bool indexed = true})
-      : super(propertyName: propertyName, required: required, indexed: indexed);
-
-  bool validate(ModelDB db, Object value) =>
-      super.validate(db, value) && (value == null || value is int);
-}
-
-/// A double [Property].
-///
-/// It will validate that values are doubles before writing them to the
-/// datastore and when reading them back.
-class DoubleProperty extends PrimitiveProperty {
-  const DoubleProperty(
-      {String propertyName, bool required = false, bool indexed = true})
-      : super(propertyName: propertyName, required: required, indexed: indexed);
-
-  bool validate(ModelDB db, Object value) =>
-      super.validate(db, value) && (value == null || value is double);
-}
-
-/// A string [Property].
-///
-/// It will validate that values are strings before writing them to the
-/// datastore and when reading them back.
-class StringProperty extends PrimitiveProperty {
-  const StringProperty(
-      {String propertyName, bool required = false, bool indexed = true})
-      : super(propertyName: propertyName, required: required, indexed: indexed);
-
-  bool validate(ModelDB db, Object value) =>
-      super.validate(db, value) && (value == null || value is String);
-}
-
-/// A key [Property].
-///
-/// It will validate that values are keys before writing them to the
-/// datastore and when reading them back.
-class ModelKeyProperty extends PrimitiveProperty {
-  const ModelKeyProperty(
-      {String propertyName, bool required = false, bool indexed = true})
-      : super(propertyName: propertyName, required: required, indexed: indexed);
-
-  bool validate(ModelDB db, Object value) =>
-      super.validate(db, value) && (value == null || value is Key);
-
-  Object encodeValue(ModelDB db, Object value, {bool forComparison = false}) {
-    if (value == null) return null;
-    return db.toDatastoreKey(value as Key);
-  }
-
-  Object decodePrimitiveValue(ModelDB db, Object value) {
-    if (value == null) return null;
-    return db.fromDatastoreKey(value as ds.Key);
-  }
-}
-
-/// A binary blob [Property].
-///
-/// It will validate that values are blobs before writing them to the
-/// datastore and when reading them back. Blob values will be represented by
-/// List<int>.
-class BlobProperty extends PrimitiveProperty {
-  const BlobProperty({String propertyName, bool required = false})
-      : super(propertyName: propertyName, required: required, indexed: false);
-
-  // NOTE: We don't validate that the entries of the list are really integers
-  // of the range 0..255!
-  // If an untyped list was created the type check will always succeed. i.e.
-  //   "[1, true, 'bar'] is List<int>" evaluates to `true`
-  bool validate(ModelDB db, Object value) =>
-      super.validate(db, value) && (value == null || value is List<int>);
-
-  Object encodeValue(ModelDB db, Object value, {bool forComparison = false}) {
-    if (value == null) return null;
-    return ds.BlobValue(value as List<int>);
-  }
-
-  Object decodePrimitiveValue(ModelDB db, Object value) {
-    if (value == null) return null;
-
-    return (value as ds.BlobValue).bytes;
-  }
-}
-
-/// A datetime [Property].
-///
-/// It will validate that values are DateTime objects before writing them to the
-/// datastore and when reading them back.
-class DateTimeProperty extends PrimitiveProperty {
-  const DateTimeProperty(
-      {String propertyName, bool required = false, bool indexed = true})
-      : super(propertyName: propertyName, required: required, indexed: indexed);
-
-  bool validate(ModelDB db, Object value) =>
-      super.validate(db, value) && (value == null || value is DateTime);
-
-  Object decodePrimitiveValue(ModelDB db, Object value) {
-    if (value is int) {
-      return DateTime.fromMillisecondsSinceEpoch(value ~/ 1000, isUtc: true);
-    }
-    return value;
-  }
-}
-
-/// A composed list [Property], with a `subProperty` for the list elements.
-///
-/// It will validate that values are List objects before writing them to the
-/// datastore and when reading them back. It will also validate the elements
-/// of the list itself.
-class ListProperty extends Property {
-  final PrimitiveProperty subProperty;
-
-  // TODO: We want to support optional list properties as well.
-  // Get rid of "required: true" here.
-  const ListProperty(this.subProperty,
-      {String propertyName, bool indexed = true})
-      : super(propertyName: propertyName, required: true, indexed: indexed);
-
-  bool validate(ModelDB db, Object value) {
-    if (!super.validate(db, value) || value is! List) return false;
-
-    for (var entry in value) {
-      if (!subProperty.validate(db, entry)) return false;
-    }
-    return true;
-  }
-
-  Object encodeValue(ModelDB db, Object value, {bool forComparison = false}) {
-    if (forComparison) {
-      // If we have comparison of list properties (i.e. repeated property names)
-      // the comparison object must not be a list, but the value itself.
-      // i.e.
-      //
-      //   class Article {
-      //      ...
-      //      @ListProperty(StringProperty())
-      //      List<String> tags;
-      //      ...
-      //   }
-      //
-      // should be queried via
-      //
-      //   await db.query(Article, 'tags=', "Dart").toList();
-      //
-      // So the [value] for the comparison is of type `String` and not
-      // `List<String>`!
-      return subProperty.encodeValue(db, value, forComparison: true);
-    }
-
-    if (value == null) return null;
-    var list = value as List;
-    if (list.isEmpty) return null;
-    if (list.length == 1) return subProperty.encodeValue(db, list[0]);
-    return list.map((value) => subProperty.encodeValue(db, value)).toList();
-  }
-
-  Object decodePrimitiveValue(ModelDB db, Object value) {
-    if (value == null) return [];
-    if (value is! List) return [subProperty.decodePrimitiveValue(db, value)];
-    return (value as List)
-        .map((entry) => subProperty.decodePrimitiveValue(db, entry))
-        .toList();
-  }
-}
-
-/// A convenience [Property] for list of strings.
-class StringListProperty extends ListProperty {
-  const StringListProperty({String propertyName, bool indexed = true})
-      : super(const StringProperty(),
-            propertyName: propertyName, indexed: indexed);
-
-  @override
-  Object decodePrimitiveValue(ModelDB db, Object value) {
-    return (super.decodePrimitiveValue(db, value) as core.List).cast<String>();
-  }
-}
diff --git a/gcloud/lib/src/db/db.dart b/gcloud/lib/src/db/db.dart
deleted file mode 100644
index 6f98ef0..0000000
--- a/gcloud/lib/src/db/db.dart
+++ /dev/null
@@ -1,371 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-part of gcloud.db;
-
-/// A function definition for transactional functions.
-///
-/// The function will be given a [Transaction] object which can be used to make
-/// lookups/queries and queue modifications (inserts/updates/deletes).
-typedef TransactionHandler = Future Function(Transaction transaction);
-
-/// A datastore transaction.
-///
-/// It can be used for making lookups/queries and queue modifications
-/// (inserts/updates/deletes). Finally the transaction can be either committed
-/// or rolled back.
-class Transaction {
-  static const int _TRANSACTION_STARTED = 0;
-  static const int _TRANSACTION_ROLLED_BACK = 1;
-  static const int _TRANSACTION_COMMITTED = 2;
-
-  final DatastoreDB db;
-  final ds.Transaction _datastoreTransaction;
-
-  final List<Model> _inserts = [];
-  final List<Key> _deletes = [];
-
-  int _transactionState = _TRANSACTION_STARTED;
-
-  Transaction(this.db, this._datastoreTransaction);
-
-  /// Looks up [keys] within this transaction.
-  Future<List<T>> lookup<T extends Model>(List<Key> keys) {
-    return _lookupHelper<T>(db, keys,
-        datastoreTransaction: _datastoreTransaction);
-  }
-
-  /// Enqueues [inserts] and [deletes] which should be committed at commit time.
-  void queueMutations({List<Model> inserts, List<Key> deletes}) {
-    _checkSealed();
-    if (inserts != null) {
-      _inserts.addAll(inserts);
-    }
-    if (deletes != null) {
-      _deletes.addAll(deletes);
-    }
-  }
-
-  /// Query for [kind] models with [ancestorKey].
-  ///
-  /// Note that [ancestorKey] is required, since a transaction is not allowed to
-  /// touch/look at an arbitrary number of rows.
-  Query<T> query<T extends Model>(Key ancestorKey, {Partition partition}) {
-    // TODO(#25): The `partition` element is redundant and should be removed.
-    if (partition == null) {
-      partition = ancestorKey.partition;
-    } else if (ancestorKey.partition != partition) {
-      throw ArgumentError(
-          'Ancestor queries must have the same partition in the ancestor key '
-          'as the partition where the query executes in.');
-    }
-    _checkSealed();
-    return Query<T>(db,
-        partition: partition,
-        ancestorKey: ancestorKey,
-        datastoreTransaction: _datastoreTransaction);
-  }
-
-  /// Rolls this transaction back.
-  Future rollback() {
-    _checkSealed(changeState: _TRANSACTION_ROLLED_BACK);
-    return db.datastore.rollback(_datastoreTransaction);
-  }
-
-  /// Commits this transaction including all of the queued mutations.
-  Future commit() {
-    _checkSealed(changeState: _TRANSACTION_COMMITTED);
-    return _commitHelper(db,
-        inserts: _inserts,
-        deletes: _deletes,
-        datastoreTransaction: _datastoreTransaction);
-  }
-
-  _checkSealed({int changeState}) {
-    if (_transactionState == _TRANSACTION_COMMITTED) {
-      throw StateError('The transaction has already been committed.');
-    } else if (_transactionState == _TRANSACTION_ROLLED_BACK) {
-      throw StateError('The transaction has already been rolled back.');
-    }
-    if (changeState != null) {
-      _transactionState = changeState;
-    }
-  }
-}
-
-class Query<T extends Model> {
-  final _relationMapping = const <String, ds.FilterRelation>{
-    '<': ds.FilterRelation.LessThan,
-    '<=': ds.FilterRelation.LessThanOrEqual,
-    '>': ds.FilterRelation.GreatherThan,
-    '>=': ds.FilterRelation.GreatherThanOrEqual,
-    '=': ds.FilterRelation.Equal,
-  };
-
-  final DatastoreDB _db;
-  final ds.Transaction _transaction;
-  final String _kind;
-
-  final Partition _partition;
-  final Key _ancestorKey;
-
-  final List<ds.Filter> _filters = [];
-  final List<ds.Order> _orders = [];
-  int _offset;
-  int _limit;
-
-  Query(DatastoreDB dbImpl,
-      {Partition partition,
-      Key ancestorKey,
-      ds.Transaction datastoreTransaction})
-      : _db = dbImpl,
-        _kind = dbImpl.modelDB.kindName(T),
-        _partition = partition,
-        _ancestorKey = ancestorKey,
-        _transaction = datastoreTransaction;
-
-  /// Adds a filter to this [Query].
-  ///
-  /// [filterString] has form "name OP" where 'name' is a fieldName of the
-  /// model and OP is an operator. The following operators are supported:
-  ///
-  ///   * '<' (less than)
-  ///   * '<=' (less than or equal)
-  ///   * '>' (greater than)
-  ///   * '>=' (greater than or equal)
-  ///   * '=' (equal)
-  ///
-  /// [comparisonObject] is the object for comparison.
-  void filter(String filterString, Object comparisonObject) {
-    var parts = filterString.split(' ');
-    if (parts.length != 2 || !_relationMapping.containsKey(parts[1])) {
-      throw ArgumentError("Invalid filter string '$filterString'.");
-    }
-
-    var name = parts[0];
-    var comparison = parts[1];
-    var propertyName = _convertToDatastoreName(name);
-
-    // This is for backwards compatibility: We allow [datastore.Key]s for now.
-    // TODO: We should remove the condition in a major version update of
-    // `package:gcloud`.
-    if (comparisonObject is! ds.Key) {
-      comparisonObject = _db.modelDB
-          .toDatastoreValue(_kind, name, comparisonObject, forComparison: true);
-    }
-    _filters.add(ds.Filter(
-        _relationMapping[comparison], propertyName, comparisonObject));
-  }
-
-  /// Adds an order to this [Query].
-  ///
-  /// [orderString] has the form "-name" where 'name' is a fieldName of the model
-  /// and the optional '-' says whether the order is descending or ascending.
-  void order(String orderString) {
-    // TODO: validate [orderString] (e.g. is name valid)
-    if (orderString.startsWith('-')) {
-      _orders.add(ds.Order(ds.OrderDirection.Decending,
-          _convertToDatastoreName(orderString.substring(1))));
-    } else {
-      _orders.add(ds.Order(
-          ds.OrderDirection.Ascending, _convertToDatastoreName(orderString)));
-    }
-  }
-
-  /// Sets the [offset] of this [Query].
-  ///
-  /// When running this query, [offset] results will be skipped.
-  void offset(int offset) {
-    _offset = offset;
-  }
-
-  /// Sets the [limit] of this [Query].
-  ///
-  /// When running this query, a maximum of [limit] results will be returned.
-  void limit(int limit) {
-    _limit = limit;
-  }
-
-  /// Execute this [Query] on the datastore.
-  ///
-  /// Outside of transactions this method might return stale data or may not
-  /// return the newest updates performed on the datastore since updates
-  /// will be reflected in the indices in an eventual consistent way.
-  Stream<T> run() {
-    ds.Key ancestorKey;
-    if (_ancestorKey != null) {
-      ancestorKey = _db.modelDB.toDatastoreKey(_ancestorKey);
-    }
-    var query = ds.Query(
-        ancestorKey: ancestorKey,
-        kind: _kind,
-        filters: _filters,
-        orders: _orders,
-        offset: _offset,
-        limit: _limit);
-
-    ds.Partition partition;
-    if (_partition != null) {
-      partition = ds.Partition(_partition.namespace);
-    }
-
-    return StreamFromPages<ds.Entity>((int pageSize) {
-      return _db.datastore
-          .query(query, transaction: _transaction, partition: partition);
-    }).stream.map<T>(_db.modelDB.fromDatastoreEntity);
-  }
-
-  // TODO:
-  // - add runPaged() returning Page<Model>
-  // - add run*() method once we have EntityResult{Entity,Cursor} in low-level
-  //   API.
-
-  String _convertToDatastoreName(String name) {
-    var propertyName = _db.modelDB.fieldNameToPropertyName(_kind, name);
-    if (propertyName == null) {
-      throw ArgumentError("Field $name is not available for kind $_kind");
-    }
-    return propertyName;
-  }
-}
-
-class DatastoreDB {
-  final ds.Datastore datastore;
-  final ModelDB _modelDB;
-  Partition _defaultPartition;
-
-  DatastoreDB(this.datastore, {ModelDB modelDB, Partition defaultPartition})
-      : _modelDB = modelDB != null ? modelDB : ModelDBImpl() {
-    _defaultPartition =
-        defaultPartition != null ? defaultPartition : Partition(null);
-  }
-
-  /// The [ModelDB] used to serialize/deserialize objects.
-  ModelDB get modelDB => _modelDB;
-
-  /// Gets the empty key using the default [Partition].
-  ///
-  /// Model keys with parent set to [emptyKey] will create their own entity
-  /// groups.
-  Key get emptyKey => defaultPartition.emptyKey;
-
-  /// Gets the default [Partition].
-  Partition get defaultPartition => _defaultPartition;
-
-  /// Creates a new [Partition] with namespace [namespace].
-  Partition newPartition(String namespace) {
-    return Partition(namespace);
-  }
-
-  /// Begins a new a new transaction.
-  ///
-  /// A transaction can touch only a limited number of entity groups. This limit
-  /// is currently 5.
-  // TODO: Add retries and/or auto commit/rollback.
-  Future withTransaction(TransactionHandler transactionHandler) {
-    return datastore
-        .beginTransaction(crossEntityGroup: true)
-        .then((datastoreTransaction) {
-      var transaction = Transaction(this, datastoreTransaction);
-      return transactionHandler(transaction);
-    });
-  }
-
-  /// Build a query for [kind] models.
-  Query<T> query<T extends Model>({Partition partition, Key ancestorKey}) {
-    // TODO(#26): There is only one case where `partition` is not redundant
-    // Namely if `ancestorKey == null` and `partition != null`. We could
-    // say we get rid of `partition` and enforce `ancestorKey` to
-    // be `Partition.emptyKey`?
-    if (partition == null) {
-      if (ancestorKey != null) {
-        partition = ancestorKey.partition;
-      } else {
-        partition = defaultPartition;
-      }
-    } else if (ancestorKey != null && partition != ancestorKey.partition) {
-      throw ArgumentError(
-          'Ancestor queries must have the same partition in the ancestor key '
-          'as the partition where the query executes in.');
-    }
-    return Query<T>(this, partition: partition, ancestorKey: ancestorKey);
-  }
-
-  /// Looks up [keys] in the datastore and returns a list of [Model] objects.
-  ///
-  /// For transactions, please use [beginTransaction] and call the [lookup]
-  /// method on it's returned [Transaction] object.
-  Future<List<T>> lookup<T extends Model>(List<Key> keys) {
-    return _lookupHelper<T>(this, keys);
-  }
-
-  /// Add [inserts] to the datastore and remove [deletes] from it.
-  ///
-  /// The order of inserts and deletes is not specified. When the commit is done
-  /// direct lookups will see the effect but non-ancestor queries will see the
-  /// change in an eventual consistent way.
-  ///
-  /// For transactions, please use `beginTransaction` and it's returned
-  /// [Transaction] object.
-  Future commit({List<Model> inserts, List<Key> deletes}) {
-    return _commitHelper(this, inserts: inserts, deletes: deletes);
-  }
-}
-
-Future _commitHelper(DatastoreDB db,
-    {List<Model> inserts,
-    List<Key> deletes,
-    ds.Transaction datastoreTransaction}) {
-  List<ds.Entity> entityInserts, entityAutoIdInserts;
-  List<ds.Key> entityDeletes;
-  var autoIdModelInserts;
-  if (inserts != null) {
-    entityInserts = <ds.Entity>[];
-    entityAutoIdInserts = <ds.Entity>[];
-    autoIdModelInserts = <Model>[];
-
-    for (var model in inserts) {
-      // If parent was not explicitly set, we assume this model will map to
-      // it's own entity group.
-      if (model.parentKey == null) {
-        model.parentKey = db.defaultPartition.emptyKey;
-      }
-      if (model.id == null) {
-        autoIdModelInserts.add(model);
-        entityAutoIdInserts.add(db.modelDB.toDatastoreEntity(model));
-      } else {
-        entityInserts.add(db.modelDB.toDatastoreEntity(model));
-      }
-    }
-  }
-  if (deletes != null) {
-    entityDeletes = deletes.map(db.modelDB.toDatastoreKey).toList();
-  }
-
-  return db.datastore
-      .commit(
-          inserts: entityInserts,
-          autoIdInserts: entityAutoIdInserts,
-          deletes: entityDeletes,
-          transaction: datastoreTransaction)
-      .then((ds.CommitResult result) {
-    if (entityAutoIdInserts != null && entityAutoIdInserts.isNotEmpty) {
-      for (var i = 0; i < result.autoIdInsertKeys.length; i++) {
-        var key = db.modelDB.fromDatastoreKey(result.autoIdInsertKeys[i]);
-        autoIdModelInserts[i].parentKey = key.parent;
-        autoIdModelInserts[i].id = key.id;
-      }
-    }
-  });
-}
-
-Future<List<T>> _lookupHelper<T extends Model>(DatastoreDB db, List<Key> keys,
-    {ds.Transaction datastoreTransaction}) {
-  var entityKeys = keys.map(db.modelDB.toDatastoreKey).toList();
-  return db.datastore
-      .lookup(entityKeys, transaction: datastoreTransaction)
-      .then((List<ds.Entity> entities) {
-    return entities.map<T>(db.modelDB.fromDatastoreEntity).toList();
-  });
-}
diff --git a/gcloud/lib/src/db/model_db.dart b/gcloud/lib/src/db/model_db.dart
deleted file mode 100644
index 4d7c44e..0000000
--- a/gcloud/lib/src/db/model_db.dart
+++ /dev/null
@@ -1,33 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-part of gcloud.db;
-
-/// A database of all registered models.
-///
-/// Responsible for converting between dart model objects and datastore entities.
-abstract class ModelDB {
-  /// Converts a [ds.Key] to a [Key].
-  Key fromDatastoreKey(ds.Key datastoreKey);
-
-  /// Converts a [Key] to a [ds.Key].
-  ds.Key toDatastoreKey(Key dbKey);
-
-  /// Converts a [Model] instance to a [ds.Entity].
-  ds.Entity toDatastoreEntity(Model model);
-
-  /// Converts a [ds.Entity] to a [Model] instance.
-  T fromDatastoreEntity<T extends Model>(ds.Entity entity);
-
-  /// Returns the kind name for instances of [type].
-  String kindName(Type type);
-
-  /// Returns the property name used for [fieldName]
-  // TODO: Get rid of this eventually.
-  String fieldNameToPropertyName(String kind, String fieldName);
-
-  /// Converts [value] according to the [Property] named [fieldName] in [kind].
-  Object toDatastoreValue(String kind, String fieldName, Object value,
-      {bool forComparison = false});
-}
diff --git a/gcloud/lib/src/db/model_db_impl.dart b/gcloud/lib/src/db/model_db_impl.dart
deleted file mode 100644
index 3218561..0000000
--- a/gcloud/lib/src/db/model_db_impl.dart
+++ /dev/null
@@ -1,547 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-part of gcloud.db;
-
-/// An implementation of [ModelDB] based on model class annotations.
-///
-/// The two constructors will scan loaded dart libraries for classes with a
-/// [Kind] annotation.
-///
-/// An example on how to write a model class is:
-///     @Kind
-///     class Person extends db.Model {
-///       @StringProperty
-///       String name;
-///
-///       @IntProperty
-///       int age;
-///
-///       @DateTimeProperty
-///       DateTime dateOfBirth;
-///     }
-///
-/// These classes must either extend [Model] or [ExpandoModel]. Furthermore
-/// they must have an empty default constructor which can be used to construct
-/// model objects when doing lookups/queries from datastore.
-class ModelDBImpl implements ModelDB {
-  final Map<_ModelDescription, Map<String, Property>> _modelDesc2Properties =
-      {};
-  final Map<String, _ModelDescription> _kind2ModelDesc = {};
-  final Map<_ModelDescription, mirrors.ClassMirror> _modelDesc2ClassMirror = {};
-  final Map<_ModelDescription, Type> _type2ModelDesc = {};
-  final Map<Type, _ModelDescription> _modelDesc2Type = {};
-
-  /// Initializes a new [ModelDB] from all libraries.
-  ///
-  /// This will scan all libraries for classes with a [Kind] annotation.
-  ///
-  /// In case an error is encountered (e.g. two model classes with the same kind
-  /// name) a [StateError] will be thrown.
-  ModelDBImpl() {
-    // WARNING: This is O(n) of the source code, which is very bad!
-    // Would be nice to have: `currentMirrorSystem().subclassesOf(Model)`
-    _initialize(mirrors.currentMirrorSystem().libraries.values);
-  }
-
-  /// Initializes a new [ModelDB] from all libraries.
-  ///
-  /// This will scan the given [librarySymbol] for classes with a [Kind]
-  /// annotation.
-  ///
-  /// In case an error is encountered (e.g. two model classes with the same kind
-  /// name) a [StateError] will be thrown.
-  ModelDBImpl.fromLibrary(Symbol librarySymbol) {
-    _initialize([mirrors.currentMirrorSystem().findLibrary(librarySymbol)]);
-  }
-
-  /// Converts a [ds.Key] to a [Key].
-  Key fromDatastoreKey(ds.Key datastoreKey) {
-    var namespace = Partition(datastoreKey.partition.namespace);
-    Key key = namespace.emptyKey;
-    for (var element in datastoreKey.elements) {
-      var type = _type2ModelDesc[_kind2ModelDesc[element.kind]];
-      if (type == null) {
-        throw StateError(
-            'Could not find a model associated with kind "${element.kind}". '
-            'Please ensure a model class was annotated with '
-            '`@Kind(name: "${element.kind}")`.');
-      }
-      key = key.append(type, id: element.id);
-    }
-    return key;
-  }
-
-  /// Converts a [Key] to a [ds.Key].
-  ds.Key toDatastoreKey(Key dbKey) {
-    List<ds.KeyElement> elements = [];
-    var currentKey = dbKey;
-    while (!currentKey.isEmpty) {
-      var id = currentKey.id;
-
-      var modelDescription = _modelDescriptionForType(currentKey.type);
-      var kind = modelDescription.kindName(this);
-
-      bool useIntegerId = modelDescription.useIntegerId;
-
-      if (useIntegerId && id != null && id is! int) {
-        throw ArgumentError('Expected an integer id property but '
-            'id was of type ${id.runtimeType}');
-      }
-      if (!useIntegerId && (id != null && id is! String)) {
-        throw ArgumentError('Expected a string id property but '
-            'id was of type ${id.runtimeType}');
-      }
-
-      elements.add(ds.KeyElement(kind, id));
-      currentKey = currentKey.parent;
-    }
-    Partition partition = currentKey._parent as Partition;
-    return ds.Key(elements.reversed.toList(),
-        partition: ds.Partition(partition.namespace));
-  }
-
-  /// Converts a [Model] instance to a [ds.Entity].
-  ds.Entity toDatastoreEntity(Model model) {
-    try {
-      var modelDescription = _modelDescriptionForType(model.runtimeType);
-      return modelDescription.encodeModel(this, model);
-    } catch (error, stack) {
-      throw ArgumentError('Error while encoding entity ($error, $stack).');
-    }
-  }
-
-  /// Converts a [ds.Entity] to a [Model] instance.
-  T fromDatastoreEntity<T extends Model>(ds.Entity entity) {
-    if (entity == null) return null;
-
-    Key key = fromDatastoreKey(entity.key);
-    var kind = entity.key.elements.last.kind;
-    var modelDescription = _kind2ModelDesc[kind];
-    if (modelDescription == null) {
-      throw StateError('Trying to deserialize entity of kind '
-          '$kind, but no Model class available for it.');
-    }
-
-    try {
-      return modelDescription.decodeEntity<T>(this, key, entity);
-    } catch (error, stack) {
-      throw StateError('Error while decoding entity ($error, $stack).');
-    }
-  }
-
-  /// Returns the string representation of the kind of model class [type].
-  ///
-  /// If the model class `type` is not found it will throw an `ArgumentError`.
-  String kindName(Type type) {
-    var kind = _modelDesc2Type[type]?.kind;
-    if (kind == null) {
-      throw ArgumentError('The class $type was not associated with a kind.');
-    }
-    return kind;
-  }
-
-  /// Returns the name of the property corresponding to the kind [kind] and
-  /// [fieldName].
-  String fieldNameToPropertyName(String kind, String fieldName) {
-    var modelDescription = _kind2ModelDesc[kind];
-    if (modelDescription == null) {
-      throw ArgumentError('The kind "$kind" is unknown.');
-    }
-    return modelDescription.fieldNameToPropertyName(fieldName);
-  }
-
-  /// Converts [value] according to the [Property] named [name] in [type].
-  Object toDatastoreValue(String kind, String fieldName, Object value,
-      {bool forComparison = false}) {
-    var modelDescription = _kind2ModelDesc[kind];
-    if (modelDescription == null) {
-      throw ArgumentError('The kind "$kind" is unknown.');
-    }
-    return modelDescription.encodeField(this, fieldName, value,
-        forComparison: forComparison);
-  }
-
-  Iterable<_ModelDescription> get _modelDescriptions {
-    return _modelDesc2Type.values;
-  }
-
-  Map<String, Property> _propertiesForModel(
-      _ModelDescription modelDescription) {
-    return _modelDesc2Properties[modelDescription];
-  }
-
-  _ModelDescription _modelDescriptionForType(Type type) {
-    return _modelDesc2Type[type];
-  }
-
-  mirrors.ClassMirror _modelClass(_ModelDescription md) {
-    return _modelDesc2ClassMirror[md];
-  }
-
-  void _initialize(Iterable<mirrors.LibraryMirror> libraries) {
-    libraries.forEach((mirrors.LibraryMirror lm) {
-      lm.declarations.values
-          .whereType<mirrors.ClassMirror>()
-          .where((d) => d.hasReflectedType)
-          .forEach((declaration) {
-        _tryLoadNewModelClass(declaration);
-      });
-    });
-
-    // Ask every [ModelDescription] to compute whatever global state it wants
-    // to have.
-    for (var modelDescription in _modelDescriptions) {
-      modelDescription.initialize(this);
-    }
-
-    // Ask every [ModelDescription] whether we should register it with a given
-    // kind name.
-    for (var modelDescription in _modelDescriptions) {
-      var kindName = modelDescription.kindName(this);
-      if (_kind2ModelDesc.containsKey(kindName)) {
-        throw StateError('Cannot have two ModelDescriptions '
-            'with the same kind ($kindName)');
-      }
-      _kind2ModelDesc[kindName] = modelDescription;
-    }
-  }
-
-  void _tryLoadNewModelClass(mirrors.ClassMirror classMirror) {
-    Kind kindAnnotation;
-    for (mirrors.InstanceMirror instance in classMirror.metadata) {
-      if (instance.reflectee.runtimeType == Kind) {
-        if (kindAnnotation != null) {
-          throw StateError(
-              'Cannot have more than one ModelMetadata() annotation '
-              'on a Model class');
-        }
-        kindAnnotation = instance.reflectee as Kind;
-      }
-    }
-
-    if (kindAnnotation != null) {
-      var name = kindAnnotation.name;
-      var integerId = kindAnnotation.idType == IdType.Integer;
-      var stringId = kindAnnotation.idType == IdType.String;
-
-      // Fall back to the class name.
-      if (name == null) {
-        name = mirrors.MirrorSystem.getName(classMirror.simpleName);
-      }
-
-      // This constraint should be guaranteed by the Kind() const constructor.
-      assert((integerId && !stringId) || (!integerId && stringId));
-
-      _tryLoadNewModelClassFull(classMirror, name, integerId);
-    }
-  }
-
-  void _tryLoadNewModelClassFull(
-      mirrors.ClassMirror modelClass, String name, bool useIntegerId) {
-    assert(!_modelDesc2Type.containsKey(modelClass.reflectedType));
-
-    _ModelDescription modelDesc;
-    if (_isExpandoClass(modelClass)) {
-      modelDesc = _ExpandoModelDescription(name, useIntegerId);
-    } else {
-      modelDesc = _ModelDescription(name, useIntegerId);
-    }
-
-    _type2ModelDesc[modelDesc] = modelClass.reflectedType;
-    _modelDesc2Type[modelClass.reflectedType] = modelDesc;
-    _modelDesc2ClassMirror[modelDesc] = modelClass;
-    _modelDesc2Properties[modelDesc] =
-        _propertiesFromModelDescription(modelClass);
-
-    // Ensure we have an empty constructor.
-    bool defaultConstructorFound = false;
-    for (var declaration in modelClass.declarations.values) {
-      if (declaration is mirrors.MethodMirror) {
-        if (declaration.isConstructor &&
-            declaration.constructorName == const Symbol('') &&
-            declaration.parameters.isEmpty) {
-          defaultConstructorFound = true;
-          break;
-        }
-      }
-    }
-    if (!defaultConstructorFound) {
-      throw StateError('Class ${modelClass.simpleName} does not have a default '
-          'constructor.');
-    }
-  }
-
-  Map<String, Property> _propertiesFromModelDescription(
-      mirrors.ClassMirror modelClassMirror) {
-    var properties = Map<String, Property>();
-    var propertyNames = Set<String>();
-
-    // Loop over all classes in the inheritance path up to the Object class.
-    while (modelClassMirror.superclass != null) {
-      var memberMap = modelClassMirror.instanceMembers;
-      // Loop over all declarations (which includes fields)
-      modelClassMirror.declarations
-          .forEach((Symbol fieldSymbol, mirrors.DeclarationMirror decl) {
-        // Look if the symbol is a getter and we have metadata attached to it.
-        if (memberMap.containsKey(fieldSymbol) &&
-            memberMap[fieldSymbol].isGetter &&
-            decl.metadata != null) {
-          final propertyAnnotations = decl.metadata
-              .map((mirrors.InstanceMirror mirror) => mirror.reflectee)
-              .whereType<Property>()
-              .toList();
-
-          if (propertyAnnotations.length > 1) {
-            throw StateError(
-                'Cannot have more than one Property annotation on a model '
-                'field.');
-          } else if (propertyAnnotations.length == 1) {
-            var property = propertyAnnotations.first;
-
-            // Get a String representation of the field and the value.
-            var fieldName = mirrors.MirrorSystem.getName(fieldSymbol);
-
-            // Determine the name to use for the property in datastore.
-            var propertyName = property.propertyName;
-            if (propertyName == null) propertyName = fieldName;
-
-            if (properties.containsKey(fieldName)) {
-              throw StateError(
-                  'Cannot have two Property objects describing the same field '
-                  'in a model object class hierarchy.');
-            }
-
-            if (propertyNames.contains(propertyName)) {
-              throw StateError(
-                  'Cannot have two Property objects mapping to the same '
-                  'datastore property name "$propertyName".');
-            }
-            properties[fieldName] = property;
-            propertyNames.add(propertyName);
-          }
-        }
-      });
-      modelClassMirror = modelClassMirror.superclass;
-    }
-
-    return properties;
-  }
-
-  bool _isExpandoClass(mirrors.ClassMirror modelClass) {
-    while (modelClass.superclass != modelClass) {
-      if (modelClass.reflectedType == ExpandoModel) {
-        return true;
-      } else if (modelClass.reflectedType == Model) {
-        return false;
-      }
-      modelClass = modelClass.superclass;
-    }
-    throw StateError('This should be unreachable.');
-  }
-}
-
-class _ModelDescription<T extends Model> {
-  final HashMap<String, String> _property2FieldName = HashMap<String, String>();
-  final HashMap<String, String> _field2PropertyName = HashMap<String, String>();
-  final Set<String> _indexedProperties = Set<String>();
-  final Set<String> _unIndexedProperties = Set<String>();
-
-  final String kind;
-  final bool useIntegerId;
-
-  _ModelDescription(this.kind, this.useIntegerId);
-
-  void initialize(ModelDBImpl db) {
-    // Compute propertyName -> fieldName mapping.
-    db._propertiesForModel(this).forEach((String fieldName, Property prop) {
-      // The default of a datastore property name is the fieldName.
-      // It can be overridden with [Property.propertyName].
-      String propertyName = prop.propertyName;
-      if (propertyName == null) propertyName = fieldName;
-
-      _property2FieldName[propertyName] = fieldName;
-      _field2PropertyName[fieldName] = propertyName;
-    });
-
-    // Compute properties & unindexed properties
-    db._propertiesForModel(this).forEach((String fieldName, Property prop) {
-      String propertyName = prop.propertyName;
-      if (propertyName == null) propertyName = fieldName;
-
-      if (prop.indexed) {
-        _indexedProperties.add(propertyName);
-      } else {
-        _unIndexedProperties.add(propertyName);
-      }
-    });
-  }
-
-  String kindName(ModelDBImpl db) => kind;
-
-  ds.Entity encodeModel(ModelDBImpl db, T model) {
-    var key = db.toDatastoreKey(model.key);
-
-    var properties = <String, Object>{};
-    var mirror = mirrors.reflect(model);
-
-    db._propertiesForModel(this).forEach((String fieldName, Property prop) {
-      _encodeProperty(db, model, mirror, properties, fieldName, prop);
-    });
-
-    return ds.Entity(key, properties,
-        unIndexedProperties: _unIndexedProperties);
-  }
-
-  _encodeProperty(ModelDBImpl db, Model model, mirrors.InstanceMirror mirror,
-      Map properties, String fieldName, Property prop) {
-    String propertyName = prop.propertyName;
-    if (propertyName == null) propertyName = fieldName;
-
-    var value =
-        mirror.getField(mirrors.MirrorSystem.getSymbol(fieldName)).reflectee;
-    if (!prop.validate(db, value)) {
-      throw StateError('Property validation failed for '
-          'property $fieldName while trying to serialize entity of kind '
-          '${model.runtimeType}. ');
-    }
-    properties[propertyName] = prop.encodeValue(db, value);
-  }
-
-  H decodeEntity<H extends Model>(ModelDBImpl db, Key key, ds.Entity entity) {
-    if (entity == null) return null;
-
-    // NOTE: this assumes a default constructor for the model classes!
-    var classMirror = db._modelClass(this);
-    var mirror = classMirror.newInstance(const Symbol(''), []);
-
-    // Set the id and the parent key
-    mirror.reflectee.id = key.id;
-    mirror.reflectee.parentKey = key.parent;
-
-    db._propertiesForModel(this).forEach((String fieldName, Property prop) {
-      _decodeProperty(db, entity, mirror, fieldName, prop);
-    });
-    return mirror.reflectee as H;
-  }
-
-  _decodeProperty(ModelDBImpl db, ds.Entity entity,
-      mirrors.InstanceMirror mirror, String fieldName, Property prop) {
-    String propertyName = fieldNameToPropertyName(fieldName);
-
-    var rawValue = entity.properties[propertyName];
-    var value = prop.decodePrimitiveValue(db, rawValue);
-
-    if (!prop.validate(db, value)) {
-      throw StateError('Property validation failed while '
-          'trying to deserialize entity of kind '
-          '${entity.key.elements.last.kind} (property name: $propertyName)');
-    }
-
-    mirror.setField(mirrors.MirrorSystem.getSymbol(fieldName), value);
-  }
-
-  String fieldNameToPropertyName(String fieldName) {
-    return _field2PropertyName[fieldName];
-  }
-
-  String propertyNameToFieldName(ModelDBImpl db, String propertySearchName) {
-    return _property2FieldName[propertySearchName];
-  }
-
-  Object encodeField(ModelDBImpl db, String fieldName, Object value,
-      {bool enforceFieldExists = true, bool forComparison = false}) {
-    Property property = db._propertiesForModel(this)[fieldName];
-    if (property != null) {
-      return property.encodeValue(db, value, forComparison: forComparison);
-    }
-    if (enforceFieldExists) {
-      throw ArgumentError(
-          'A field named "$fieldName" does not exist in kind "$kind".');
-    }
-    return null;
-  }
-}
-
-// NOTE/TODO:
-// Currently expanded properties are only
-//   * decoded if there are no clashes in [usedNames]
-//   * encoded if there are no clashes in [usedNames]
-// We might want to throw an error if there are clashes, because otherwise
-//   - we may end up removing properties after a read-write cycle
-//   - we may end up dropping added properties in a write
-// ([usedNames] := [realFieldNames] + [realPropertyNames])
-class _ExpandoModelDescription extends _ModelDescription<ExpandoModel> {
-  Set<String> realFieldNames;
-  Set<String> realPropertyNames;
-  Set<String> usedNames;
-
-  _ExpandoModelDescription(String kind, bool useIntegerId)
-      : super(kind, useIntegerId);
-
-  void initialize(ModelDBImpl db) {
-    super.initialize(db);
-
-    realFieldNames = Set<String>.from(_field2PropertyName.keys);
-    realPropertyNames = Set<String>.from(_property2FieldName.keys);
-    usedNames = Set()..addAll(realFieldNames)..addAll(realPropertyNames);
-  }
-
-  ds.Entity encodeModel(ModelDBImpl db, ExpandoModel model) {
-    var entity = super.encodeModel(db, model);
-    var properties = entity.properties;
-    model.additionalProperties.forEach((String key, Object value) {
-      // NOTE: All expanded properties will be indexed.
-      if (!usedNames.contains(key)) {
-        properties[key] = value;
-      }
-    });
-    return entity;
-  }
-
-  T decodeEntity<T extends Model>(ModelDBImpl db, Key key, ds.Entity entity) {
-    if (entity == null) return null;
-
-    ExpandoModel model = super.decodeEntity(db, key, entity);
-    var properties = entity.properties;
-    properties.forEach((String key, Object value) {
-      if (!usedNames.contains(key)) {
-        model.additionalProperties[key] = value;
-      }
-    });
-    // TODO: check if there is a more elegant solution than this
-    return model as T;
-  }
-
-  String fieldNameToPropertyName(String fieldName) {
-    String propertyName = super.fieldNameToPropertyName(fieldName);
-    // If the ModelDescription doesn't know about [fieldName], it's an
-    // expanded property, where propertyName == fieldName.
-    if (propertyName == null) propertyName = fieldName;
-    return propertyName;
-  }
-
-  String propertyNameToFieldName(ModelDBImpl db, String propertyName) {
-    String fieldName = super.propertyNameToFieldName(db, propertyName);
-    // If the ModelDescription doesn't know about [propertyName], it's an
-    // expanded property, where propertyName == fieldName.
-    if (fieldName == null) fieldName = propertyName;
-    return fieldName;
-  }
-
-  Object encodeField(ModelDBImpl db, String fieldName, Object value,
-      {bool enforceFieldExists = true, bool forComparison = false}) {
-    // The [enforceFieldExists] argument is intentionally ignored.
-
-    Object primitiveValue = super.encodeField(db, fieldName, value,
-        enforceFieldExists: false, forComparison: forComparison);
-    // If superclass can't encode field, we return value here (and assume
-    // it's primitive)
-    // NOTE: Implicit assumption:
-    // If value != null then superclass will return != null.
-    // TODO: Ensure [value] is primitive in this case.
-    if (primitiveValue == null) primitiveValue = value;
-    return primitiveValue;
-  }
-}
diff --git a/gcloud/lib/src/db/models.dart b/gcloud/lib/src/db/models.dart
deleted file mode 100644
index 95f645a..0000000
--- a/gcloud/lib/src/db/models.dart
+++ /dev/null
@@ -1,123 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-part of gcloud.db;
-
-/// Represents a unique identifier for a [Model] stored in a datastore.
-///
-/// The [Key] can be incomplete if it's id is `null`. In this case the id will
-/// be automatically allocated and set at commit time.
-class Key {
-  // Either KeyImpl or PartitionImpl
-  final Object _parent;
-
-  final Type type;
-  final Object id;
-
-  Key(Key parent, this.type, this.id) : _parent = parent {
-    if (type == null) {
-      throw ArgumentError('The type argument must not be null.');
-    }
-    if (id != null && id is! String && id is! int) {
-      throw ArgumentError('The id argument must be an integer or a String.');
-    }
-  }
-
-  Key.emptyKey(Partition partition)
-      : _parent = partition,
-        type = null,
-        id = null;
-
-  /// Parent of this [Key].
-  Key get parent {
-    if (_parent is Key) {
-      return _parent as Key;
-    }
-    return null;
-  }
-
-  /// The partition of this [Key].
-  Partition get partition {
-    var obj = _parent;
-    while (obj is! Partition) {
-      obj = (obj as Key)._parent;
-    }
-    return obj as Partition;
-  }
-
-  Key append(Type modelType, {Object id}) {
-    return Key(this, modelType, id);
-  }
-
-  bool get isEmpty => _parent is Partition;
-
-  operator ==(Object other) {
-    return other is Key &&
-        _parent == other._parent &&
-        type == other.type &&
-        id == other.id;
-  }
-
-  int get hashCode => _parent.hashCode ^ type.hashCode ^ id.hashCode;
-}
-
-/// Represents a datastore partition.
-///
-/// A datastore is partitioned into namespaces. The default namespace is
-/// `null`.
-class Partition {
-  final String namespace;
-
-  Partition(this.namespace) {
-    if (namespace == '') {
-      throw ArgumentError('The namespace must not be an empty string');
-    }
-  }
-
-  /// Returns an empty [Key].
-  ///
-  /// Entities where the parent [Key] is empty will create their own entity
-  /// group.
-  Key get emptyKey => Key.emptyKey(this);
-
-  operator ==(Object other) {
-    return other is Partition && namespace == other.namespace;
-  }
-
-  int get hashCode => namespace.hashCode;
-}
-
-/// Superclass for all model classes.
-///
-/// Every model class has a [id] -- which must be an integer or a string, and
-/// a [parentKey]. The [key] getter is returning the key for the model object.
-abstract class Model {
-  Object id;
-  Key parentKey;
-
-  Key get key => parentKey.append(this.runtimeType, id: id);
-}
-
-/// Superclass for all expanded model classes.
-///
-/// The [ExpandoModel] class adds support for having dynamic properties. You can
-/// set arbitrary fields on these models. The expanded values must be values
-/// accepted by the [RawDatastore] implementation.
-abstract class ExpandoModel extends Model {
-  final Map<String, Object> additionalProperties = {};
-
-  Object noSuchMethod(Invocation invocation) {
-    var name = mirrors.MirrorSystem.getName(invocation.memberName);
-    if (name.endsWith('=')) name = name.substring(0, name.length - 1);
-    if (invocation.isGetter) {
-      return additionalProperties[name];
-    } else if (invocation.isSetter) {
-      var value = invocation.positionalArguments[0];
-      additionalProperties[name] = value;
-      return value;
-    } else {
-      throw ArgumentError('Unsupported noSuchMethod call on ExpandoModel');
-    }
-  }
-}
diff --git a/gcloud/lib/src/pubsub_impl.dart b/gcloud/lib/src/pubsub_impl.dart
deleted file mode 100644
index aba0de4..0000000
--- a/gcloud/lib/src/pubsub_impl.dart
+++ /dev/null
@@ -1,474 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-part of gcloud.pubsub;
-
-class _PubSubImpl implements PubSub {
-  final String project;
-  final pubsub.PubsubApi _api;
-  final String _topicPrefix;
-  final String _subscriptionPrefix;
-
-  _PubSubImpl(http.Client client, this.project)
-      : _api = pubsub.PubsubApi(client),
-        _topicPrefix = 'projects/$project/topics/',
-        _subscriptionPrefix = 'projects/$project/subscriptions/';
-
-  _PubSubImpl.rootUrl(http.Client client, this.project, String rootUrl)
-      : _api = pubsub.PubsubApi(client, rootUrl: rootUrl),
-        _topicPrefix = 'projects/$project/topics/',
-        _subscriptionPrefix = 'projects/$project/subscriptions/';
-
-  String _fullTopicName(String name) {
-    return name.startsWith('projects/') ? name : '$_topicPrefix$name';
-  }
-
-  String _fullSubscriptionName(String name) {
-    return name.startsWith('projects/') ? name : '$_subscriptionPrefix$name';
-  }
-
-  Future<pubsub.Topic> _createTopic(String name) {
-    return _api.projects.topics.create(null, name);
-  }
-
-  Future _deleteTopic(String name) {
-    // The Pub/Sub delete API returns an instance of Empty.
-    return _api.projects.topics.delete(name).then((_) => null);
-  }
-
-  Future<pubsub.Topic> _getTopic(String name) {
-    return _api.projects.topics.get(name);
-  }
-
-  Future<pubsub.ListTopicsResponse> _listTopics(
-      int pageSize, String nextPageToken) {
-    return _api.projects.topics.list('projects/$project',
-        pageSize: pageSize, pageToken: nextPageToken);
-  }
-
-  Future<pubsub.Subscription> _createSubscription(
-      String name, String topic, Uri endpoint) {
-    var subscription = pubsub.Subscription()..topic = topic;
-    if (endpoint != null) {
-      var pushConfig = pubsub.PushConfig()..pushEndpoint = endpoint.toString();
-      subscription.pushConfig = pushConfig;
-    }
-    return _api.projects.subscriptions.create(subscription, name);
-  }
-
-  Future _deleteSubscription(String name) {
-    // The Pub/Sub delete API returns an instance of Empty.
-    return _api.projects.subscriptions
-        .delete(_fullSubscriptionName(name))
-        .then((_) => null);
-  }
-
-  Future<pubsub.Subscription> _getSubscription(String name) {
-    return _api.projects.subscriptions.get(name);
-  }
-
-  Future<pubsub.ListSubscriptionsResponse> _listSubscriptions(
-      String topic, int pageSize, String nextPageToken) {
-    return _api.projects.subscriptions.list('projects/$project',
-        pageSize: pageSize, pageToken: nextPageToken);
-  }
-
-  Future _modifyPushConfig(String subscription, Uri endpoint) {
-    var pushConfig = pubsub.PushConfig()
-      ..pushEndpoint = endpoint != null ? endpoint.toString() : null;
-    var request = pubsub.ModifyPushConfigRequest()..pushConfig = pushConfig;
-    return _api.projects.subscriptions.modifyPushConfig(request, subscription);
-  }
-
-  Future _publish(
-      String topic, List<int> message, Map<String, String> attributes) {
-    var request = pubsub.PublishRequest()
-      ..messages = [
-        (pubsub.PubsubMessage()
-          ..dataAsBytes = message
-          ..attributes = attributes)
-      ];
-    // TODO(sgjesse): Handle PublishResponse containing message ids.
-    return _api.projects.topics.publish(request, topic).then((_) => null);
-  }
-
-  Future<pubsub.PullResponse> _pull(
-      String subscription, bool returnImmediately) {
-    var request = pubsub.PullRequest()
-      ..maxMessages = 1
-      ..returnImmediately = returnImmediately;
-    return _api.projects.subscriptions.pull(request, subscription);
-  }
-
-  Future _ack(String ackId, String subscription) {
-    var request = pubsub.AcknowledgeRequest()..ackIds = [ackId];
-    // The Pub/Sub acknowledge API returns an instance of Empty.
-    return _api.projects.subscriptions
-        .acknowledge(request, subscription)
-        .then((_) => null);
-  }
-
-  void _checkTopicName(String name) {
-    if (name.startsWith('projects/') && !name.contains('/topics/')) {
-      throw ArgumentError(
-          "Illegal topic name. Absolute topic names must have the form "
-          "'projects/[project-id]/topics/[topic-name]");
-    }
-    if (name.endsWith('/topics/')) {
-      throw ArgumentError(
-          'Illegal topic name. Relative part of the name cannot be empty');
-    }
-  }
-
-  void _checkSubscriptionName(String name) {
-    if (name.startsWith('projects/') && !name.contains('/subscriptions/')) {
-      throw ArgumentError(
-          "Illegal subscription name. Absolute subscription names must have "
-          "the form 'projects/[project-id]/subscriptions/[subscription-name]");
-    }
-    if (name.endsWith('/subscriptions/')) {
-      throw ArgumentError(
-          'Illegal subscription name. Relative part of the name cannot be '
-          'empty');
-    }
-  }
-
-  Future<Topic> createTopic(String name) {
-    _checkTopicName(name);
-    return _createTopic(_fullTopicName(name))
-        .then((top) => _TopicImpl(this, top));
-  }
-
-  Future deleteTopic(String name) {
-    _checkTopicName(name);
-    return _deleteTopic(_fullTopicName(name));
-  }
-
-  Future<Topic> lookupTopic(String name) {
-    _checkTopicName(name);
-    return _getTopic(_fullTopicName(name)).then((top) => _TopicImpl(this, top));
-  }
-
-  Stream<Topic> listTopics() {
-    Future<Page<Topic>> firstPage(int pageSize) {
-      return _listTopics(pageSize, null)
-          .then((response) => _TopicPageImpl(this, pageSize, response));
-    }
-
-    return StreamFromPages<Topic>(firstPage).stream;
-  }
-
-  Future<Page<Topic>> pageTopics({int pageSize = 50}) {
-    return _listTopics(pageSize, null).then((response) {
-      return _TopicPageImpl(this, pageSize, response);
-    });
-  }
-
-  Future<Subscription> createSubscription(String name, String topic,
-      {Uri endpoint}) {
-    _checkSubscriptionName(name);
-    _checkTopicName(topic);
-    return _createSubscription(
-            _fullSubscriptionName(name), _fullTopicName(topic), endpoint)
-        .then((sub) => _SubscriptionImpl(this, sub));
-  }
-
-  Future deleteSubscription(String name) {
-    _checkSubscriptionName(name);
-    return _deleteSubscription(_fullSubscriptionName(name));
-  }
-
-  Future<Subscription> lookupSubscription(String name) {
-    _checkSubscriptionName(name);
-    return _getSubscription(_fullSubscriptionName(name))
-        .then((sub) => _SubscriptionImpl(this, sub));
-  }
-
-  Stream<Subscription> listSubscriptions([String query]) {
-    Future<Page<Subscription>> firstPage(int pageSize) {
-      return _listSubscriptions(query, pageSize, null).then(
-          (response) => _SubscriptionPageImpl(this, query, pageSize, response));
-    }
-
-    return StreamFromPages<Subscription>(firstPage).stream;
-  }
-
-  Future<Page<Subscription>> pageSubscriptions(
-      {String topic, int pageSize = 50}) {
-    return _listSubscriptions(topic, pageSize, null).then((response) {
-      return _SubscriptionPageImpl(this, topic, pageSize, response);
-    });
-  }
-}
-
-/// Message class for messages constructed through 'new Message()'. It stores
-/// the user supplied body as either String or bytes.
-class _MessageImpl implements Message {
-  // The message body, if it is a `String`. In that case, [bytesMessage] is
-  // null.
-  final String _stringMessage;
-
-  // The message body, if it is a byte list. In that case, [stringMessage] is
-  // null.
-  final List<int> _bytesMessage;
-
-  final Map<String, String> attributes;
-
-  _MessageImpl.withString(this._stringMessage, {this.attributes})
-      : _bytesMessage = null;
-
-  _MessageImpl.withBytes(this._bytesMessage, {this.attributes})
-      : _stringMessage = null;
-
-  List<int> get asBytes =>
-      _bytesMessage != null ? _bytesMessage : utf8.encode(_stringMessage);
-
-  String get asString =>
-      _stringMessage != null ? _stringMessage : utf8.decode(_bytesMessage);
-}
-
-/// Message received using [Subscription.pull].
-///
-/// Contains the [pubsub.PubsubMessage] received from Pub/Sub, and
-/// makes the message body and labels available on request.
-///
-/// The labels map is lazily created when first accessed.
-class _PullMessage implements Message {
-  final pubsub.PubsubMessage _message;
-  List<int> _bytes;
-  String _string;
-
-  _PullMessage(this._message);
-
-  List<int> get asBytes {
-    if (_bytes == null) _bytes = _message.dataAsBytes;
-    return _bytes;
-  }
-
-  String get asString {
-    if (_string == null) _string = utf8.decode(_message.dataAsBytes);
-    return _string;
-  }
-
-  Map<String, String> get attributes => _message.attributes;
-}
-
-/// Message received through Pub/Sub push delivery.
-///
-/// Stores the message body received from Pub/Sub as the Base64 encoded string
-/// from the wire protocol.
-///
-/// The labels have been decoded into a Map.
-class _PushMessage implements Message {
-  final String _base64Message;
-  final Map<String, String> attributes;
-
-  _PushMessage(this._base64Message, this.attributes);
-
-  List<int> get asBytes => base64.decode(_base64Message);
-
-  String get asString => utf8.decode(asBytes);
-}
-
-/// Pull event received from Pub/Sub pull delivery.
-///
-/// Stores the pull response received from Pub/Sub.
-class _PullEventImpl implements PullEvent {
-  /// Pub/Sub API object.
-  final _PubSubImpl _api;
-
-  /// Subscription this was received from.
-  final String _subscriptionName;
-
-  /// Low level response received from Pub/Sub.
-  final pubsub.PullResponse _response;
-  final Message message;
-
-  _PullEventImpl(
-      this._api, this._subscriptionName, pubsub.PullResponse response)
-      : this._response = response,
-        message = _PullMessage(response.receivedMessages[0].message);
-
-  Future acknowledge() {
-    return _api._ack(_response.receivedMessages[0].ackId, _subscriptionName);
-  }
-}
-
-/// Push event received from Pub/Sub push delivery.
-///
-/// decoded from JSON encoded push HTTP request body.
-class _PushEventImpl implements PushEvent {
-  static const PREFIX = '/subscriptions/';
-  final Message _message;
-  final String _subscriptionName;
-
-  Message get message => _message;
-
-  String get subscriptionName => _subscriptionName;
-
-  _PushEventImpl(this._message, this._subscriptionName);
-
-  factory _PushEventImpl.fromJson(String json) {
-    Map body = jsonDecode(json) as Map<String, dynamic>;
-    String data = body['message']['data'] as String;
-    Map<String, String> labels = HashMap();
-    body['message']['labels'].forEach((label) {
-      String key = label['key'] as String;
-      var value = label['strValue'];
-      if (value == null) value = label['numValue'];
-      labels[key] = value.toString();
-    });
-    String subscription = body['subscription'] as String;
-    // TODO(#1): Remove this when the push event subscription name is prefixed
-    // with '/subscriptions/'.
-    if (!subscription.startsWith(PREFIX)) {
-      subscription = PREFIX + subscription;
-    }
-    return _PushEventImpl(_PushMessage(data, labels), subscription);
-  }
-}
-
-class _TopicImpl implements Topic {
-  final _PubSubImpl _api;
-  final pubsub.Topic _topic;
-
-  _TopicImpl(this._api, this._topic);
-
-  String get name {
-    assert(_topic.name.startsWith(_api._topicPrefix));
-    return _topic.name.substring(_api._topicPrefix.length);
-  }
-
-  String get project {
-    assert(_topic.name.startsWith(_api._topicPrefix));
-    return _api.project;
-  }
-
-  String get absoluteName => _topic.name;
-
-  Future publish(Message message) {
-    return _api._publish(_topic.name, message.asBytes, message.attributes);
-  }
-
-  Future delete() => _api._deleteTopic(_topic.name);
-
-  Future publishString(String message, {Map<String, String> attributes}) {
-    return _api._publish(_topic.name, utf8.encode(message), attributes);
-  }
-
-  Future publishBytes(List<int> message, {Map<String, String> attributes}) {
-    return _api._publish(_topic.name, message, attributes);
-  }
-}
-
-class _SubscriptionImpl implements Subscription {
-  final _PubSubImpl _api;
-  final pubsub.Subscription _subscription;
-
-  _SubscriptionImpl(this._api, this._subscription);
-
-  String get name {
-    assert(_subscription.name.startsWith(_api._subscriptionPrefix));
-    return _subscription.name.substring(_api._subscriptionPrefix.length);
-  }
-
-  String get project {
-    assert(_subscription.name.startsWith(_api._subscriptionPrefix));
-    return _api.project;
-  }
-
-  String get absoluteName => _subscription.name;
-
-  Topic get topic {
-    var topic = pubsub.Topic()..name = _subscription.topic;
-    return _TopicImpl(_api, topic);
-  }
-
-  Future delete() => _api._deleteSubscription(_subscription.name);
-
-  Future<PullEvent> pull({bool wait = true}) {
-    return _api._pull(_subscription.name, !wait).then((response) {
-      // The documentation says 'Returns an empty list if there are no
-      // messages available in the backlog'. However the receivedMessages
-      // property can also be null in that case.
-      if (response.receivedMessages == null ||
-          response.receivedMessages.isEmpty) {
-        return null;
-      }
-      return _PullEventImpl(_api, _subscription.name, response);
-    }).catchError((e) => null,
-        test: (e) => e is pubsub.DetailedApiRequestError && e.status == 400);
-  }
-
-  Uri get endpoint => null;
-
-  bool get isPull => endpoint == null;
-
-  bool get isPush => endpoint != null;
-
-  Future updatePushConfiguration(Uri endpoint) {
-    return _api._modifyPushConfig(_subscription.name, endpoint);
-  }
-}
-
-class _TopicPageImpl implements Page<Topic> {
-  final _PubSubImpl _api;
-  final int _pageSize;
-  final String _nextPageToken;
-  final List<Topic> items;
-
-  _TopicPageImpl(this._api, this._pageSize, pubsub.ListTopicsResponse response)
-      : items = List(response.topics != null ? response.topics.length : 0),
-        _nextPageToken = response.nextPageToken {
-    if (response.topics != null) {
-      for (int i = 0; i < response.topics.length; i++) {
-        items[i] = _TopicImpl(_api, response.topics[i]);
-      }
-    }
-  }
-
-  bool get isLast => _nextPageToken == null;
-
-  Future<Page<Topic>> next({int pageSize}) {
-    if (isLast) return Future.value(null);
-    if (pageSize == null) pageSize = this._pageSize;
-
-    return _api._listTopics(pageSize, _nextPageToken).then((response) {
-      return _TopicPageImpl(_api, pageSize, response);
-    });
-  }
-}
-
-class _SubscriptionPageImpl implements Page<Subscription> {
-  final _PubSubImpl _api;
-  final String _topic;
-  final int _pageSize;
-  final String _nextPageToken;
-  final List<Subscription> items;
-
-  _SubscriptionPageImpl(this._api, this._topic, this._pageSize,
-      pubsub.ListSubscriptionsResponse response)
-      : items = List(
-            response.subscriptions != null ? response.subscriptions.length : 0),
-        _nextPageToken = response.nextPageToken {
-    if (response.subscriptions != null) {
-      for (int i = 0; i < response.subscriptions.length; i++) {
-        items[i] = _SubscriptionImpl(_api, response.subscriptions[i]);
-      }
-    }
-  }
-
-  bool get isLast => _nextPageToken == null;
-
-  Future<Page<Subscription>> next({int pageSize}) {
-    if (_nextPageToken == null) return Future.value(null);
-    if (pageSize == null) pageSize = this._pageSize;
-
-    return _api
-        ._listSubscriptions(_topic, pageSize, _nextPageToken)
-        .then((response) {
-      return _SubscriptionPageImpl(_api, _topic, pageSize, response);
-    });
-  }
-}
diff --git a/gcloud/lib/src/storage_impl.dart b/gcloud/lib/src/storage_impl.dart
deleted file mode 100644
index d7f340e..0000000
--- a/gcloud/lib/src/storage_impl.dart
+++ /dev/null
@@ -1,624 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-part of gcloud.storage;
-
-const String _ABSOLUTE_PREFIX = 'gs://';
-const String _DIRECTORY_DELIMITER = 'gs://';
-
-/// Representation of an absolute name consisting of bucket name and object
-/// name.
-class _AbsoluteName {
-  String bucketName;
-  String objectName;
-
-  _AbsoluteName.parse(String absoluteName) {
-    if (!absoluteName.startsWith(_ABSOLUTE_PREFIX)) {
-      throw FormatException("Absolute name '$absoluteName' does not start "
-          "with '$_ABSOLUTE_PREFIX'");
-    }
-    int index = absoluteName.indexOf('/', _ABSOLUTE_PREFIX.length);
-    if (index == -1 || index == _ABSOLUTE_PREFIX.length) {
-      throw FormatException("Absolute name '$absoluteName' does not have "
-          "a bucket name");
-    }
-    if (index == absoluteName.length - 1) {
-      throw FormatException("Absolute name '$absoluteName' does not have "
-          "an object name");
-    }
-    bucketName = absoluteName.substring(_ABSOLUTE_PREFIX.length, index);
-    objectName = absoluteName.substring(index + 1);
-  }
-}
-
-/// Storage API implementation providing access to buckets.
-class _StorageImpl implements Storage {
-  final String project;
-  final storage_api.StorageApi _api;
-
-  _StorageImpl(http.Client client, this.project)
-      : _api = storage_api.StorageApi(client);
-
-  Future createBucket(String bucketName,
-      {PredefinedAcl predefinedAcl, Acl acl}) {
-    var bucket = storage_api.Bucket()..name = bucketName;
-    var predefinedName = predefinedAcl != null ? predefinedAcl._name : null;
-    if (acl != null) {
-      bucket.acl = acl._toBucketAccessControlList();
-    }
-    return _api.buckets
-        .insert(bucket, project, predefinedAcl: predefinedName)
-        .then((bucket) => null);
-  }
-
-  Future deleteBucket(String bucketName) {
-    return _api.buckets.delete(bucketName);
-  }
-
-  Bucket bucket(String bucketName,
-      {PredefinedAcl defaultPredefinedObjectAcl, Acl defaultObjectAcl}) {
-    return _BucketImpl(
-        this, bucketName, defaultPredefinedObjectAcl, defaultObjectAcl);
-  }
-
-  Future<bool> bucketExists(String bucketName) {
-    notFoundError(e) {
-      return e is storage_api.DetailedApiRequestError && e.status == 404;
-    }
-
-    return _api.buckets
-        .get(bucketName)
-        .then((_) => true)
-        .catchError((e) => false, test: notFoundError);
-  }
-
-  Future<BucketInfo> bucketInfo(String bucketName) {
-    return _api.buckets
-        .get(bucketName, projection: 'full')
-        .then((bucket) => _BucketInfoImpl(bucket));
-  }
-
-  Stream<String> listBucketNames() {
-    Future<_BucketPageImpl> firstPage(int pageSize) {
-      return _listBuckets(pageSize, null)
-          .then((response) => _BucketPageImpl(this, pageSize, response));
-    }
-
-    return StreamFromPages<String>(firstPage).stream;
-  }
-
-  Future<Page<String>> pageBucketNames({int pageSize = 50}) {
-    return _listBuckets(pageSize, null).then((response) {
-      return _BucketPageImpl(this, pageSize, response);
-    });
-  }
-
-  Future copyObject(String src, String dest) {
-    var srcName = _AbsoluteName.parse(src);
-    var destName = _AbsoluteName.parse(dest);
-    return _api.objects
-        .copy(null, srcName.bucketName, srcName.objectName, destName.bucketName,
-            destName.objectName)
-        .then((_) => null);
-  }
-
-  Future<storage_api.Buckets> _listBuckets(int pageSize, String nextPageToken) {
-    return _api.buckets
-        .list(project, maxResults: pageSize, pageToken: nextPageToken);
-  }
-}
-
-class _BucketInfoImpl implements BucketInfo {
-  final storage_api.Bucket _bucket;
-
-  _BucketInfoImpl(this._bucket);
-
-  String get bucketName => _bucket.name;
-
-  String get etag => _bucket.etag;
-
-  DateTime get created => _bucket.timeCreated;
-
-  String get id => _bucket.id;
-
-  Acl get acl => Acl._fromBucketAcl(_bucket);
-}
-
-/// Bucket API implementation providing access to objects.
-class _BucketImpl implements Bucket {
-  final storage_api.StorageApi _api;
-  PredefinedAcl _defaultPredefinedObjectAcl;
-  Acl _defaultObjectAcl;
-  final String bucketName;
-
-  _BucketImpl(_StorageImpl storage, this.bucketName,
-      this._defaultPredefinedObjectAcl, this._defaultObjectAcl)
-      : this._api = storage._api;
-
-  String absoluteObjectName(String objectName) {
-    return '$_ABSOLUTE_PREFIX$bucketName/$objectName';
-  }
-
-  StreamSink<List<int>> write(String objectName,
-      {int length,
-      ObjectMetadata metadata,
-      Acl acl,
-      PredefinedAcl predefinedAcl,
-      String contentType}) {
-    storage_api.Object object;
-    if (metadata == null) {
-      metadata = _ObjectMetadata(acl: acl, contentType: contentType);
-    } else {
-      if (acl != null) {
-        metadata = metadata.replace(acl: acl);
-      }
-      if (contentType != null) {
-        metadata = metadata.replace(contentType: contentType);
-      }
-    }
-    _ObjectMetadata objectMetadata = metadata as _ObjectMetadata;
-    object = objectMetadata._object;
-
-    // If no predefined ACL is passed use the default (if any).
-    String predefinedName;
-    if (predefinedAcl != null || _defaultPredefinedObjectAcl != null) {
-      var predefined =
-          predefinedAcl != null ? predefinedAcl : _defaultPredefinedObjectAcl;
-      predefinedName = predefined._name;
-    }
-
-    // If no ACL is passed use the default (if any).
-    if (object.acl == null && _defaultObjectAcl != null) {
-      object.acl = _defaultObjectAcl._toObjectAccessControlList();
-    }
-
-    // Fill properties not passed in metadata.
-    object.name = objectName;
-
-    var sink = _MediaUploadStreamSink(
-        _api, bucketName, objectName, object, predefinedName, length);
-    return sink;
-  }
-
-  Future<ObjectInfo> writeBytes(String objectName, List<int> bytes,
-      {ObjectMetadata metadata,
-      Acl acl,
-      PredefinedAcl predefinedAcl,
-      String contentType}) {
-    _MediaUploadStreamSink sink = write(objectName,
-        length: bytes.length,
-        metadata: metadata,
-        acl: acl,
-        predefinedAcl: predefinedAcl,
-        contentType: contentType) as _MediaUploadStreamSink;
-    sink.add(bytes);
-    return sink.close();
-  }
-
-  Stream<List<int>> read(String objectName, {int offset, int length}) async* {
-    if (offset == null) {
-      offset = 0;
-    }
-
-    if (offset != 0 && length == null) {
-      throw ArgumentError('length must have a value if offset is non-zero.');
-    }
-
-    var options = storage_api.DownloadOptions.FullMedia;
-
-    if (length != null) {
-      if (length <= 0) {
-        throw ArgumentError.value(
-            length, 'length', 'If provided, length must greater than zero.');
-      }
-      // For ByteRange, end is *inclusive*.
-      var end = offset + length - 1;
-      var range = storage_api.ByteRange(offset, end);
-      assert(range.length == length);
-      options = storage_api.PartialDownloadOptions(range);
-    }
-
-    commons.Media media = (await _api.objects.get(bucketName, objectName,
-        downloadOptions: options)) as commons.Media;
-
-    yield* media.stream;
-  }
-
-  Future<ObjectInfo> info(String objectName) {
-    return _api.objects
-        .get(bucketName, objectName, projection: 'full')
-        .then((object) => _ObjectInfoImpl(object as storage_api.Object));
-  }
-
-  Future delete(String objectName) {
-    return _api.objects.delete(bucketName, objectName);
-  }
-
-  Stream<BucketEntry> list({String prefix}) {
-    Future<_ObjectPageImpl> firstPage(int pageSize) {
-      return _listObjects(bucketName, prefix, _DIRECTORY_DELIMITER, 50, null)
-          .then(
-              (response) => _ObjectPageImpl(this, prefix, pageSize, response));
-    }
-
-    return StreamFromPages<BucketEntry>(firstPage).stream;
-  }
-
-  Future<Page<BucketEntry>> page({String prefix, int pageSize = 50}) {
-    return _listObjects(
-            bucketName, prefix, _DIRECTORY_DELIMITER, pageSize, null)
-        .then((response) {
-      return _ObjectPageImpl(this, prefix, pageSize, response);
-    });
-  }
-
-  Future updateMetadata(String objectName, ObjectMetadata metadata) {
-    // TODO: support other ObjectMetadata implementations?
-    _ObjectMetadata md = metadata as _ObjectMetadata;
-    var object = md._object;
-    if (md._object.acl == null && _defaultObjectAcl == null) {
-      throw ArgumentError('ACL is required for update');
-    }
-    if (md.contentType == null) {
-      throw ArgumentError('Content-Type is required for update');
-    }
-    if (md._object.acl == null) {
-      md._object.acl = _defaultObjectAcl._toObjectAccessControlList();
-    }
-    return _api.objects.update(object, bucketName, objectName);
-  }
-
-  Future<storage_api.Objects> _listObjects(String bucketName, String prefix,
-      String delimiter, int pageSize, String nextPageToken) {
-    return _api.objects.list(bucketName,
-        prefix: prefix,
-        delimiter: delimiter,
-        maxResults: pageSize,
-        pageToken: nextPageToken);
-  }
-}
-
-class _BucketPageImpl implements Page<String> {
-  final _StorageImpl _storage;
-  final int _pageSize;
-  final String _nextPageToken;
-  final List<String> items;
-
-  _BucketPageImpl(this._storage, this._pageSize, storage_api.Buckets response)
-      : items = List(response.items != null ? response.items.length : 0),
-        _nextPageToken = response.nextPageToken {
-    for (int i = 0; i < items.length; i++) {
-      items[i] = response.items[i].name;
-    }
-  }
-
-  bool get isLast => _nextPageToken == null;
-
-  Future<Page<String>> next({int pageSize}) {
-    if (isLast) return Future.value(null);
-    if (pageSize == null) pageSize = this._pageSize;
-
-    return _storage._listBuckets(pageSize, _nextPageToken).then((response) {
-      return _BucketPageImpl(_storage, pageSize, response);
-    });
-  }
-}
-
-class _ObjectPageImpl implements Page<BucketEntry> {
-  final _BucketImpl _bucket;
-  final String _prefix;
-  final int _pageSize;
-  final String _nextPageToken;
-  final List<BucketEntry> items;
-
-  _ObjectPageImpl(
-      this._bucket, this._prefix, this._pageSize, storage_api.Objects response)
-      : items = List((response.items != null ? response.items.length : 0) +
-            (response.prefixes != null ? response.prefixes.length : 0)),
-        _nextPageToken = response.nextPageToken {
-    var prefixes = 0;
-    if (response.prefixes != null) {
-      for (int i = 0; i < response.prefixes.length; i++) {
-        items[i] = BucketEntry._directory(response.prefixes[i]);
-      }
-      prefixes = response.prefixes.length;
-    }
-    if (response.items != null) {
-      for (int i = 0; i < response.items.length; i++) {
-        items[prefixes + i] = BucketEntry._object(response.items[i].name);
-      }
-    }
-  }
-
-  bool get isLast => _nextPageToken == null;
-
-  Future<Page<BucketEntry>> next({int pageSize}) {
-    if (isLast) return Future.value(null);
-    if (pageSize == null) pageSize = this._pageSize;
-
-    return _bucket
-        ._listObjects(_bucket.bucketName, _prefix, _DIRECTORY_DELIMITER,
-            pageSize, _nextPageToken)
-        .then((response) {
-      return _ObjectPageImpl(_bucket, _prefix, pageSize, response);
-    });
-  }
-}
-
-class _ObjectGenerationImpl implements ObjectGeneration {
-  final String objectGeneration;
-  final int metaGeneration;
-
-  _ObjectGenerationImpl(this.objectGeneration, this.metaGeneration);
-}
-
-class _ObjectInfoImpl implements ObjectInfo {
-  final storage_api.Object _object;
-  final ObjectMetadata _metadata;
-  Uri _downloadLink;
-  ObjectGeneration _generation;
-
-  _ObjectInfoImpl(storage_api.Object object)
-      : _object = object,
-        _metadata = _ObjectMetadata._(object);
-
-  String get name => _object.name;
-
-  int get length => int.parse(_object.size);
-
-  DateTime get updated => _object.updated;
-
-  String get etag => _object.etag;
-
-  List<int> get md5Hash => base64.decode(_object.md5Hash);
-
-  int get crc32CChecksum {
-    var list = base64.decode(_object.crc32c);
-    return (list[3] << 24) | (list[2] << 16) | (list[1] << 8) | list[0];
-  }
-
-  Uri get downloadLink {
-    if (_downloadLink == null) {
-      _downloadLink = Uri.parse(_object.mediaLink);
-    }
-    return _downloadLink;
-  }
-
-  ObjectGeneration get generation {
-    if (_generation == null) {
-      _generation = _ObjectGenerationImpl(
-          _object.generation, int.parse(_object.metageneration));
-    }
-    return _generation;
-  }
-
-  /// Additional metadata.
-  ObjectMetadata get metadata => _metadata;
-}
-
-class _ObjectMetadata implements ObjectMetadata {
-  final storage_api.Object _object;
-  Acl _cachedAcl;
-  ObjectGeneration _cachedGeneration;
-  Map<String, String> _cachedCustom;
-
-  _ObjectMetadata(
-      {Acl acl,
-      String contentType,
-      String contentEncoding,
-      String cacheControl,
-      String contentDisposition,
-      String contentLanguage,
-      Map<String, String> custom})
-      : _object = storage_api.Object() {
-    _object.acl = acl != null ? acl._toObjectAccessControlList() : null;
-    _object.contentType = contentType;
-    _object.contentEncoding = contentEncoding;
-    _object.cacheControl = cacheControl;
-    _object.contentDisposition = contentDisposition;
-    _object.contentLanguage = contentLanguage;
-    if (custom != null) _object.metadata = custom;
-  }
-
-  _ObjectMetadata._(this._object);
-
-  Acl get acl {
-    if (_cachedAcl == null) {
-      _cachedAcl = Acl._fromObjectAcl(_object);
-    }
-    return _cachedAcl;
-  }
-
-  String get contentType => _object.contentType;
-
-  String get contentEncoding => _object.contentEncoding;
-
-  String get cacheControl => _object.cacheControl;
-
-  String get contentDisposition => _object.contentDisposition;
-
-  String get contentLanguage => _object.contentLanguage;
-
-  ObjectGeneration get generation {
-    if (_cachedGeneration == null) {
-      _cachedGeneration = ObjectGeneration(
-          _object.generation, int.parse(_object.metageneration));
-    }
-    return _cachedGeneration;
-  }
-
-  Map<String, String> get custom {
-    if (_object.metadata == null) return null;
-    if (_cachedCustom == null) {
-      _cachedCustom = UnmodifiableMapView<String, String>(_object.metadata);
-    }
-    return _cachedCustom;
-  }
-
-  ObjectMetadata replace(
-      {Acl acl,
-      String contentType,
-      String contentEncoding,
-      String cacheControl,
-      String contentDisposition,
-      String contentLanguage,
-      Map<String, String> custom}) {
-    return _ObjectMetadata(
-        acl: acl != null ? acl : this.acl,
-        contentType: contentType != null ? contentType : this.contentType,
-        contentEncoding:
-            contentEncoding != null ? contentEncoding : this.contentEncoding,
-        cacheControl: cacheControl != null ? cacheControl : this.cacheControl,
-        contentDisposition: contentDisposition != null
-            ? contentDisposition
-            : this.contentEncoding,
-        contentLanguage:
-            contentLanguage != null ? contentLanguage : this.contentEncoding,
-        custom: custom != null ? Map.from(custom) : this.custom);
-  }
-}
-
-/// Implementation of StreamSink which handles Google media upload.
-/// It provides a StreamSink and logic which selects whether to use normal
-/// media upload (multipart mime) or resumable media upload.
-class _MediaUploadStreamSink implements StreamSink<List<int>> {
-  static const int _DEFAULT_MAX_NORMAL_UPLOAD_LENGTH = 1024 * 1024;
-  final storage_api.StorageApi _api;
-  final String _bucketName;
-  final String _objectName;
-  final storage_api.Object _object;
-  final String _predefinedAcl;
-  final int _length;
-  final int _maxNormalUploadLength;
-  int _bufferLength = 0;
-  final List<List<int>> buffer = List<List<int>>();
-  final _controller = StreamController<List<int>>(sync: true);
-  StreamSubscription _subscription;
-  StreamController<List<int>> _resumableController;
-  final _doneCompleter = Completer<ObjectInfo>();
-
-  static const int _STATE_LENGTH_KNOWN = 0;
-  static const int _STATE_PROBING_LENGTH = 1;
-  static const int _STATE_DECIDED_RESUMABLE = 2;
-  int _state;
-
-  _MediaUploadStreamSink(this._api, this._bucketName, this._objectName,
-      this._object, this._predefinedAcl, this._length,
-      [this._maxNormalUploadLength = _DEFAULT_MAX_NORMAL_UPLOAD_LENGTH]) {
-    if (_length != null) {
-      // If the length is known in advance decide on the upload strategy
-      // immediately
-      _state = _STATE_LENGTH_KNOWN;
-      if (_length <= _maxNormalUploadLength) {
-        _startNormalUpload(_controller.stream, _length);
-      } else {
-        _startResumableUpload(_controller.stream, _length);
-      }
-    } else {
-      _state = _STATE_PROBING_LENGTH;
-      // If the length is not known in advance decide on the upload strategy
-      // later. Start buffering until enough data has been read to decide.
-      _subscription = _controller.stream
-          .listen(_onData, onDone: _onDone, onError: _onError);
-    }
-  }
-
-  void add(List<int> event) {
-    _controller.add(event);
-  }
-
-  void addError(errorEvent, [StackTrace stackTrace]) {
-    _controller.addError(errorEvent, stackTrace);
-  }
-
-  Future addStream(Stream<List<int>> stream) {
-    return _controller.addStream(stream);
-  }
-
-  Future<ObjectInfo> close() {
-    _controller.close();
-    return _doneCompleter.future;
-  }
-
-  Future get done => _doneCompleter.future;
-
-  _onData(List<int> data) {
-    assert(_state != _STATE_LENGTH_KNOWN);
-    if (_state == _STATE_PROBING_LENGTH) {
-      buffer.add(data);
-      _bufferLength += data.length;
-      if (_bufferLength > _maxNormalUploadLength) {
-        // Start resumable upload.
-        // TODO: Avoid using another stream-controller.
-        _resumableController = StreamController<List<int>>(sync: true);
-        buffer.forEach(_resumableController.add);
-        _startResumableUpload(_resumableController.stream, _length);
-        _state = _STATE_DECIDED_RESUMABLE;
-      }
-    } else {
-      assert(_state == _STATE_DECIDED_RESUMABLE);
-      _resumableController.add(data);
-    }
-  }
-
-  _onDone() {
-    if (_state == _STATE_PROBING_LENGTH) {
-      // As the data is already cached don't bother to wait on somebody
-      // listening on the stream before adding the data.
-      _startNormalUpload(Stream<List<int>>.fromIterable(buffer), _bufferLength);
-    } else {
-      _resumableController.close();
-    }
-  }
-
-  _onError(e, StackTrace s) {
-    // If still deciding on the strategy complete with error. Otherwise
-    // forward the error for default processing.
-    if (_state == _STATE_PROBING_LENGTH) {
-      _completeError(e, s);
-    } else {
-      _resumableController.addError(e, s);
-    }
-  }
-
-  _completeError(e, StackTrace s) {
-    if (_state != _STATE_LENGTH_KNOWN) {
-      // Always cancel subscription on error.
-      _subscription.cancel();
-    }
-    _doneCompleter.completeError(e, s);
-  }
-
-  void _startNormalUpload(Stream<List<int>> stream, int length) {
-    var contentType = _object.contentType != null
-        ? _object.contentType
-        : 'application/octet-stream';
-    var media = storage_api.Media(stream, length, contentType: contentType);
-    _api.objects
-        .insert(_object, _bucketName,
-            name: _objectName,
-            predefinedAcl: _predefinedAcl,
-            uploadMedia: media,
-            uploadOptions: storage_api.UploadOptions.Default)
-        .then((response) {
-      _doneCompleter.complete(_ObjectInfoImpl(response));
-    }, onError: _completeError);
-  }
-
-  void _startResumableUpload(Stream<List<int>> stream, int length) {
-    var contentType = _object.contentType != null
-        ? _object.contentType
-        : 'application/octet-stream';
-    var media = storage_api.Media(stream, length, contentType: contentType);
-    _api.objects
-        .insert(_object, _bucketName,
-            name: _objectName,
-            predefinedAcl: _predefinedAcl,
-            uploadMedia: media,
-            uploadOptions: storage_api.UploadOptions.Resumable)
-        .then((response) {
-      _doneCompleter.complete(_ObjectInfoImpl(response));
-    }, onError: _completeError);
-  }
-}
diff --git a/gcloud/lib/storage.dart b/gcloud/lib/storage.dart
deleted file mode 100644
index 9ab01ce..0000000
--- a/gcloud/lib/storage.dart
+++ /dev/null
@@ -1,799 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-/// This library provides access to Google Cloud Storage.
-///
-/// Google Cloud Storage is an object store for binary objects. Each
-/// object has a set of metadata attached to it. For more information on
-/// Google Cloud Storage see https://developers.google.com/storage/.
-///
-/// There are two main concepts in Google Cloud Storage: Buckets and Objects.
-/// A bucket is a container for objects and objects are the actual binary
-/// objects.
-///
-/// The API has two main classes for dealing with buckets and objects.
-///
-/// The class `Storage` is the main API class providing access to working
-/// with buckets. This is the 'bucket service' interface.
-///
-/// The class `Bucket` provide access to working with objects in a specific
-/// bucket. This is the 'object service' interface.
-///
-/// Both buckets have objects, have names. The bucket namespace is flat and
-/// global across all projects. This means that a bucket is always
-/// addressable using its name without requiring further context.
-///
-/// Within buckets the object namespace is also flat. Object are *not*
-/// organized hierarchical. However, as object names allow the slash `/`
-/// character this is often used to simulate a hierarchical structure
-/// based on common prefixes.
-///
-/// This package uses relative and absolute names to refer to objects. A
-/// relative name is just the object name within a bucket, and requires the
-/// context of a bucket to be used. A relative name just looks like this:
-///
-///     object_name
-///
-/// An absolute name includes the bucket name and uses the `gs://` prefix
-/// also used by the `gsutil` tool. An absolute name looks like this.
-///
-///     gs://bucket_name/object_name
-///
-/// In most cases relative names are used. Absolute names are typically
-/// only used for operations involving objects in different buckets.
-///
-/// For most of the APIs in ths library which take instances of other classes
-/// from this library it is the assumption that the actual implementations
-/// provided here are used.
-library gcloud.storage;
-
-import 'dart:async';
-import 'dart:collection' show UnmodifiableListView, UnmodifiableMapView;
-import 'dart:convert';
-
-import 'package:http/http.dart' as http;
-
-import 'package:googleapis/storage/v1.dart' as storage_api;
-import 'package:_discoveryapis_commons/_discoveryapis_commons.dart' as commons;
-
-import 'common.dart';
-import 'service_scope.dart' as ss;
-
-export 'common.dart';
-
-part 'src/storage_impl.dart';
-
-const Symbol _storageKey = #gcloud.storage;
-
-/// Access the [Storage] object available in the current service scope.
-///
-/// The returned object will be the one which was previously registered with
-/// [registerStorageService] within the current (or a parent) service scope.
-///
-/// Accessing this getter outside of a service scope will result in an error.
-/// See the `package:gcloud/service_scope.dart` library for more information.
-Storage get storageService => ss.lookup(_storageKey) as Storage;
-
-/// Registers the [storage] object within the current service scope.
-///
-/// The provided `storage` object will be available via the top-level
-/// `storageService` getter.
-///
-/// Calling this function outside of a service scope will result in an error.
-/// Calling this function more than once inside the same service scope is not
-/// allowed.
-void registerStorageService(Storage storage) {
-  ss.register(_storageKey, storage);
-}
-
-int _jenkinsHash(List e) {
-  const _HASH_MASK = 0x3fffffff;
-  int hash = 0;
-  for (int i = 0; i < e.length; i++) {
-    int c = e[i].hashCode;
-    hash = (hash + c) & _HASH_MASK;
-    hash = (hash + (hash << 10)) & _HASH_MASK;
-    hash ^= (hash >> 6);
-  }
-  hash = (hash + (hash << 3)) & _HASH_MASK;
-  hash ^= (hash >> 11);
-  hash = (hash + (hash << 15)) & _HASH_MASK;
-  return hash;
-}
-
-/// An ACL (Access Control List) describes access rights to buckets and
-/// objects.
-///
-/// An ACL is a prioritized sequence of access control specifications,
-/// which individually prevent or grant access.
-/// The access controls are described by [AclEntry] objects.
-class Acl {
-  final List<AclEntry> _entries;
-  int _cachedHashCode;
-
-  /// The entries in the ACL.
-  List<AclEntry> get entries => UnmodifiableListView<AclEntry>(_entries);
-
-  /// Create a new ACL with a list of ACL entries.
-  Acl(Iterable<AclEntry> entries) : _entries = List.from(entries);
-
-  Acl._fromBucketAcl(storage_api.Bucket bucket)
-      : _entries = List(bucket.acl == null ? 0 : bucket.acl.length) {
-    if (bucket.acl != null) {
-      for (int i = 0; i < bucket.acl.length; i++) {
-        _entries[i] = AclEntry(_aclScopeFromEntity(bucket.acl[i].entity),
-            _aclPermissionFromRole(bucket.acl[i].role));
-      }
-    }
-  }
-
-  Acl._fromObjectAcl(storage_api.Object object)
-      : _entries = List(object.acl == null ? 0 : object.acl.length) {
-    if (object.acl != null) {
-      for (int i = 0; i < object.acl.length; i++) {
-        _entries[i] = AclEntry(_aclScopeFromEntity(object.acl[i].entity),
-            _aclPermissionFromRole(object.acl[i].role));
-      }
-    }
-  }
-
-  AclScope _aclScopeFromEntity(String entity) {
-    if (entity.startsWith('user-')) {
-      String tmp = entity.substring(5);
-      int at = tmp.indexOf('@');
-      if (at != -1) {
-        return AccountScope(tmp);
-      } else {
-        return StorageIdScope(tmp);
-      }
-    } else if (entity.startsWith('group-')) {
-      return GroupScope(entity.substring(6));
-    } else if (entity.startsWith('domain-')) {
-      return DomainScope(entity.substring(7));
-    } else if (entity.startsWith('allAuthenticatedUsers-')) {
-      return AclScope.allAuthenticated;
-    } else if (entity.startsWith('allUsers-')) {
-      return AclScope.allUsers;
-    } else if (entity.startsWith('project-')) {
-      String tmp = entity.substring(8);
-      int dash = tmp.indexOf('-');
-      if (dash != -1) {
-        return ProjectScope(tmp.substring(dash + 1), tmp.substring(0, dash));
-      }
-    }
-    return OpaqueScope(entity);
-  }
-
-  AclPermission _aclPermissionFromRole(String role) {
-    if (role == 'READER') return AclPermission.READ;
-    if (role == 'WRITER') return AclPermission.WRITE;
-    if (role == 'OWNER') return AclPermission.FULL_CONTROL;
-    throw UnsupportedError(
-        "Server returned a unsupported permission role '$role'");
-  }
-
-  List<storage_api.BucketAccessControl> _toBucketAccessControlList() {
-    return _entries.map((entry) => entry._toBucketAccessControl()).toList();
-  }
-
-  List<storage_api.ObjectAccessControl> _toObjectAccessControlList() {
-    return _entries.map((entry) => entry._toObjectAccessControl()).toList();
-  }
-
-  int get hashCode {
-    return _cachedHashCode != null
-        ? _cachedHashCode
-        : _cachedHashCode = _jenkinsHash(_entries);
-  }
-
-  bool operator ==(Object other) {
-    if (other is Acl) {
-      List entries = _entries;
-      List otherEntries = other._entries;
-      if (entries.length != otherEntries.length) return false;
-      for (int i = 0; i < entries.length; i++) {
-        if (entries[i] != otherEntries[i]) return false;
-      }
-      return true;
-    } else {
-      return false;
-    }
-  }
-
-  String toString() => 'Acl($_entries)';
-}
-
-/// An ACL entry specifies that an entity has a specific access permission.
-///
-/// A permission grants a specific permission to the entity.
-class AclEntry {
-  final AclScope scope;
-  final AclPermission permission;
-  int _cachedHashCode;
-
-  AclEntry(this.scope, this.permission);
-
-  storage_api.BucketAccessControl _toBucketAccessControl() {
-    var acl = storage_api.BucketAccessControl();
-    acl.entity = scope._storageEntity;
-    acl.role = permission._storageBucketRole;
-    return acl;
-  }
-
-  storage_api.ObjectAccessControl _toObjectAccessControl() {
-    var acl = storage_api.ObjectAccessControl();
-    acl.entity = scope._storageEntity;
-    acl.role = permission._storageObjectRole;
-    return acl;
-  }
-
-  int get hashCode {
-    return _cachedHashCode != null
-        ? _cachedHashCode
-        : _cachedHashCode = _jenkinsHash([scope, permission]);
-  }
-
-  bool operator ==(Object other) {
-    return other is AclEntry &&
-        scope == other.scope &&
-        permission == other.permission;
-  }
-
-  String toString() => 'AclEntry($scope, $permission)';
-}
-
-/// An ACL scope specifies an entity for which a permission applies.
-///
-/// A scope can be one of:
-///
-///   * Google Storage ID
-///   * Google account email address
-///   * Google group email address
-///   * Google Apps domain
-///   * Special identifier for all Google account holders
-///   * Special identifier for all users
-///
-/// See https://cloud.google.com/storage/docs/accesscontrol for more details.
-abstract class AclScope {
-  int _cachedHashCode;
-
-  /// ACL type for scope representing a Google Storage id.
-  static const int _TYPE_STORAGE_ID = 0;
-
-  /// ACL type for scope representing a project entity.
-  static const int _TYPE_PROJECT = 1;
-
-  /// ACL type for scope representing an account holder.
-  static const int _TYPE_ACCOUNT = 2;
-
-  /// ACL type for scope representing a group.
-  static const int _TYPE_GROUP = 3;
-
-  /// ACL type for scope representing a domain.
-  static const int _TYPE_DOMAIN = 4;
-
-  /// ACL type for scope representing all authenticated users.
-  static const int _TYPE_ALL_AUTHENTICATED = 5;
-
-  /// ACL type for scope representing all users.
-  static const int _TYPE_ALL_USERS = 6;
-
-  /// ACL type for scope representing an unsupported scope.
-  static const int _TYPE_OPAQUE = 7;
-
-  /// The id of the actual entity this ACL scope represents. The actual values
-  /// are set in the different subclasses.
-  final String _id;
-
-  /// The type of this acope this ACL scope represents.
-  final int _type;
-
-  /// ACL scope for all authenticated users.
-  static AllAuthenticatedScope allAuthenticated = AllAuthenticatedScope();
-
-  /// ACL scope for all users.
-  static AllUsersScope allUsers = AllUsersScope();
-
-  AclScope._(this._type, this._id);
-
-  int get hashCode {
-    return _cachedHashCode != null
-        ? _cachedHashCode
-        : _cachedHashCode = _jenkinsHash([_type, _id]);
-  }
-
-  bool operator ==(Object other) {
-    return other is AclScope && _type == other._type && _id == other._id;
-  }
-
-  String toString() => 'AclScope($_storageEntity)';
-
-  String get _storageEntity;
-}
-
-/// An ACL scope for an entity identified by a 'Google Storage ID'.
-///
-/// The [storageId] is a string of 64 hexadecimal digits that identifies a
-/// specific Google account holder or a specific Google group.
-class StorageIdScope extends AclScope {
-  StorageIdScope(String storageId)
-      : super._(AclScope._TYPE_STORAGE_ID, storageId);
-
-  /// Google Storage ID.
-  String get storageId => _id;
-
-  String get _storageEntity => 'user-$_id';
-}
-
-/// An ACL scope for an entity identified by an individual email address.
-class AccountScope extends AclScope {
-  AccountScope(String email) : super._(AclScope._TYPE_ACCOUNT, email);
-
-  /// Email address.
-  String get email => _id;
-
-  String get _storageEntity => 'user-$_id';
-}
-
-/// An ACL scope for an entity identified by an Google Groups email.
-class GroupScope extends AclScope {
-  GroupScope(String group) : super._(AclScope._TYPE_GROUP, group);
-
-  /// Group name.
-  String get group => _id;
-
-  String get _storageEntity => 'group-$_id';
-}
-
-/// An ACL scope for an entity identified by a domain name.
-class DomainScope extends AclScope {
-  DomainScope(String domain) : super._(AclScope._TYPE_DOMAIN, domain);
-
-  /// Domain name.
-  String get domain => _id;
-
-  String get _storageEntity => 'domain-$_id';
-}
-
-/// An ACL scope for an project related entity.
-class ProjectScope extends AclScope {
-  /// Project role.
-  ///
-  /// Possible values are `owners`, `editors` and `viewers`.
-  final String role;
-
-  ProjectScope(String project, this.role)
-      : super._(AclScope._TYPE_PROJECT, project);
-
-  /// Project ID.
-  String get project => _id;
-
-  String get _storageEntity => 'project-$role-$_id';
-}
-
-/// An ACL scope for an unsupported scope.
-class OpaqueScope extends AclScope {
-  OpaqueScope(String id) : super._(AclScope._TYPE_OPAQUE, id);
-
-  String get _storageEntity => _id;
-}
-
-/// ACL scope for a all authenticated users.
-class AllAuthenticatedScope extends AclScope {
-  AllAuthenticatedScope() : super._(AclScope._TYPE_ALL_AUTHENTICATED, null);
-
-  String get _storageEntity => 'allAuthenticatedUsers';
-}
-
-/// ACL scope for a all users.
-class AllUsersScope extends AclScope {
-  AllUsersScope() : super._(AclScope._TYPE_ALL_USERS, null);
-
-  String get _storageEntity => 'allUsers';
-}
-
-/// Permissions for individual scopes in an ACL.
-class AclPermission {
-  /// Provide read access.
-  static const READ = AclPermission._('READER');
-
-  /// Provide write access.
-  ///
-  /// For objects this permission is the same as [FULL_CONTROL].
-  static const WRITE = AclPermission._('WRITER');
-
-  /// Provide full control.
-  ///
-  /// For objects this permission is the same as [WRITE].
-  static const FULL_CONTROL = AclPermission._('OWNER');
-
-  final String _id;
-
-  const AclPermission._(this._id);
-
-  String get _storageBucketRole => _id;
-
-  String get _storageObjectRole => this == WRITE ? FULL_CONTROL._id : _id;
-
-  int get hashCode => _id.hashCode;
-
-  bool operator ==(Object other) {
-    return other is AclPermission && _id == other._id;
-  }
-
-  String toString() => 'AclPermission($_id)';
-}
-
-/// Definition of predefined ACLs.
-///
-/// There is a convenient way of referring to number of _predefined_ ACLs. These
-/// predefined ACLs have explicit names, and can _only_ be used to set an ACL,
-/// when either creating or updating a bucket or object. This set of predefined
-/// ACLs are expanded on the server to their actual list of [AclEntry] objects.
-/// When information is retrieved on a bucket or object, this expanded list will
-/// be present. For a description of these predefined ACLs see:
-/// https://cloud.google.com/storage/docs/accesscontrol#extension.
-class PredefinedAcl {
-  final String _name;
-  const PredefinedAcl._(this._name);
-
-  /// Predefined ACL for the 'authenticated-read' ACL. Applies to both buckets
-  /// and objects.
-  static const PredefinedAcl authenticatedRead =
-      PredefinedAcl._('authenticatedRead');
-
-  /// Predefined ACL for the 'private' ACL. Applies to both buckets
-  /// and objects.
-  static const PredefinedAcl private = PredefinedAcl._('private');
-
-  /// Predefined ACL for the 'project-private' ACL. Applies to both buckets
-  /// and objects.
-  static const PredefinedAcl projectPrivate = PredefinedAcl._('projectPrivate');
-
-  /// Predefined ACL for the 'public-read' ACL. Applies to both buckets
-  /// and objects.
-  static const PredefinedAcl publicRead = PredefinedAcl._('publicRead');
-
-  /// Predefined ACL for the 'public-read-write' ACL. Applies only to buckets.
-  static const PredefinedAcl publicReadWrite =
-      PredefinedAcl._('publicReadWrite');
-
-  /// Predefined ACL for the 'bucket-owner-full-control' ACL. Applies only to
-  /// objects.
-  static const PredefinedAcl bucketOwnerFullControl =
-      PredefinedAcl._('bucketOwnerFullControl');
-
-  /// Predefined ACL for the 'bucket-owner-read' ACL. Applies only to
-  /// objects.
-  static const PredefinedAcl bucketOwnerRead =
-      PredefinedAcl._('bucketOwnerRead');
-
-  String toString() => 'PredefinedAcl($_name)';
-}
-
-/// Information on a bucket.
-abstract class BucketInfo {
-  /// Name of the bucket.
-  String get bucketName;
-
-  /// Entity tag for the bucket.
-  String get etag;
-
-  /// When this bucket was created.
-  DateTime get created;
-
-  /// Bucket ID.
-  String get id;
-
-  /// Acl of the bucket.
-  Acl get acl;
-}
-
-/// Access to Cloud Storage
-abstract class Storage {
-  /// List of required OAuth2 scopes for Cloud Storage operation.
-  static const List<String> SCOPES = <String>[
-    storage_api.StorageApi.DevstorageFullControlScope
-  ];
-
-  /// Initializes access to cloud storage.
-  factory Storage(http.Client client, String project) = _StorageImpl;
-
-  /// Create a cloud storage bucket.
-  ///
-  /// Creates a cloud storage bucket named [bucketName].
-  ///
-  /// The bucket ACL can be set by passing [predefinedAcl] or [acl]. If both
-  /// are passed the entries from [acl] with be followed by the expansion of
-  /// [predefinedAcl].
-  ///
-  /// Returns a [Future] which completes when the bucket has been created.
-  Future createBucket(String bucketName,
-      {PredefinedAcl predefinedAcl, Acl acl});
-
-  /// Delete a cloud storage bucket.
-  ///
-  /// Deletes the cloud storage bucket named [bucketName].
-  ///
-  /// If the bucket is not empty the operation will fail.
-  ///
-  /// The returned [Future] completes when the operation is finished.
-  Future deleteBucket(String bucketName);
-
-  /// Access bucket object operations.
-  ///
-  /// Instantiates a `Bucket` object referring to the bucket named [bucketName].
-  ///
-  /// When an object is created using the resulting `Bucket` an ACL will always
-  /// be set. If the object creation does not pass any explicit ACL information
-  /// a default ACL will be used.
-  ///
-  /// If the arguments [defaultPredefinedObjectAcl] or [defaultObjectAcl] are
-  /// passed they define the default ACL. If both are passed the entries from
-  /// [defaultObjectAcl] with be followed by the expansion of
-  /// [defaultPredefinedObjectAcl] when an object is created.
-  ///
-  /// Otherwise the default object ACL attached to the bucket will be used.
-  ///
-  /// Returns a `Bucket` instance.
-  Bucket bucket(String bucketName,
-      {PredefinedAcl defaultPredefinedObjectAcl, Acl defaultObjectAcl});
-
-  /// Check whether a cloud storage bucket exists.
-  ///
-  /// Checks whether the bucket named [bucketName] exists.
-  ///
-  /// Returns a [Future] which completes with `true` if the bucket exists.
-  Future<bool> bucketExists(String bucketName);
-
-  /// Get information on a bucket
-  ///
-  /// Provide metadata information for bucket named [bucketName].
-  ///
-  /// Returns a [Future] which completes with a `BucketInfo` object.
-  Future<BucketInfo> bucketInfo(String bucketName);
-
-  /// List names of all buckets.
-  ///
-  /// Returns a [Stream] of bucket names.
-  Stream<String> listBucketNames();
-
-  /// Start paging through names of all buckets.
-  ///
-  /// The maximum number of buckets in each page is specified in [pageSize].
-  ///
-  /// Returns a [Future] which completes with a `Page` object holding the
-  /// first page. Use the `Page` object to move to the next page of buckets.
-  Future<Page<String>> pageBucketNames({int pageSize = 50});
-
-  /// Copy an object.
-  ///
-  /// Copy object [src] to object [dest].
-  ///
-  /// The names of [src] and [dest] must be absolute.
-  Future copyObject(String src, String dest);
-}
-
-/// Information on a specific object.
-///
-/// This class provides access to information on an object. This includes
-/// both the properties which are provided by Cloud Storage (such as the
-/// MD5 hash) and the properties which can be changed (such as content type).
-///
-///  The properties provided by Cloud Storage are direct properties on this
-///  object.
-///
-///  The mutable properties are properties on the `metadata` property.
-abstract class ObjectInfo {
-  /// Name of the object.
-  String get name;
-
-  /// Length of the data.
-  int get length;
-
-  /// When this object was updated.
-  DateTime get updated;
-
-  /// Entity tag for the object.
-  String get etag;
-
-  /// MD5 hash of the object.
-  List<int> get md5Hash;
-
-  /// CRC32c checksum, as described in RFC 4960.
-  int get crc32CChecksum;
-
-  /// URL for direct download.
-  Uri get downloadLink;
-
-  /// Object generation.
-  ObjectGeneration get generation;
-
-  /// Additional metadata.
-  ObjectMetadata get metadata;
-}
-
-/// Generational information on an object.
-class ObjectGeneration {
-  /// Object generation.
-  final String objectGeneration;
-
-  /// Metadata generation.
-  final int metaGeneration;
-
-  const ObjectGeneration(this.objectGeneration, this.metaGeneration);
-}
-
-/// Access to object metadata.
-abstract class ObjectMetadata {
-  factory ObjectMetadata(
-      {Acl acl,
-      String contentType,
-      String contentEncoding,
-      String cacheControl,
-      String contentDisposition,
-      String contentLanguage,
-      Map<String, String> custom}) = _ObjectMetadata;
-
-  /// ACL.
-  Acl get acl;
-
-  /// `Content-Type` for this object.
-  String get contentType;
-
-  /// `Content-Encoding` for this object.
-  String get contentEncoding;
-
-  /// `Cache-Control` for this object.
-  String get cacheControl;
-
-  /// `Content-Disposition` for this object.
-  String get contentDisposition;
-
-  /// `Content-Language` for this object.
-  ///
-  /// The value of this field must confirm to RFC 3282.
-  String get contentLanguage;
-
-  /// Custom metadata.
-  Map<String, String> get custom;
-
-  /// Create a copy of this object with some values replaced.
-  ///
-  // TODO: This cannot be used to set values to null.
-  ObjectMetadata replace(
-      {Acl acl,
-      String contentType,
-      String contentEncoding,
-      String cacheControl,
-      String contentDisposition,
-      String contentLanguage,
-      Map<String, String> custom});
-}
-
-/// Result from List objects in a bucket.
-///
-/// Listing operate like a directory listing, despite the object
-/// namespace being flat.
-///
-/// See [Bucket.list] for information on how the hierarchical structure
-/// is determined.
-class BucketEntry {
-  /// Whether this is information on an object.
-  final bool isObject;
-
-  /// Name of object or directory.
-  final String name;
-
-  BucketEntry._object(this.name) : isObject = true;
-
-  BucketEntry._directory(this.name) : isObject = false;
-
-  /// Whether this is a prefix.
-  bool get isDirectory => !isObject;
-}
-
-/// Access to operations on a specific cloud storage bucket.
-abstract class Bucket {
-  /// Name of this bucket.
-  String get bucketName;
-
-  /// Absolute name of an object in this bucket. This includes the gs:// prefix.
-  String absoluteObjectName(String objectName);
-
-  /// Create a new object.
-  ///
-  /// Create an object named [objectName] in the bucket.
-  ///
-  /// If an object named [objectName] already exists this object will be
-  /// replaced.
-  ///
-  /// If the length of the data to write is known in advance this can be passed
-  /// as [length]. This can help to optimize the upload process.
-  ///
-  /// Additional metadata on the object can be passed either through the
-  /// `metadata` argument or through the specific named arguments
-  /// (such as `contentType`). Values passed through the specific named
-  /// arguments takes precedence over the values in `metadata`.
-  ///
-  /// If [contentType] is not passed the default value of
-  /// `application/octet-stream` will be used.
-  ///
-  /// It is possible to at one of the predefined ACLs on the created object
-  /// using the [predefinedAcl] argument. If the [metadata] argument contain a
-  /// ACL as well, this ACL with be followed by the expansion of
-  /// [predefinedAcl].
-  ///
-  /// Returns a `StreamSink` where the object content can be written. When
-  /// The object content has been written the `StreamSink` completes with
-  /// an `ObjectInfo` instance with the information on the object created.
-  StreamSink<List<int>> write(String objectName,
-      {int length,
-      ObjectMetadata metadata,
-      Acl acl,
-      PredefinedAcl predefinedAcl,
-      String contentType});
-
-  /// Create an new object in the bucket with specified content.
-  ///
-  /// Writes [bytes] to the created object.
-  ///
-  /// See [write] for more information on the additional arguments.
-  ///
-  /// Returns a `Future` which completes with an `ObjectInfo` instance when
-  /// the object is written.
-  Future<ObjectInfo> writeBytes(String name, List<int> bytes,
-      {ObjectMetadata metadata,
-      Acl acl,
-      PredefinedAcl predefinedAcl,
-      String contentType});
-
-  /// Read object content as byte stream.
-  ///
-  /// If [offset] is provided, [length] must also be provided.
-  ///
-  /// If [length] is provided, it must be greater than `0`.
-  ///
-  /// If there is a problem accessing the file, a [DetailedApiRequestError] is
-  /// thrown.
-  Stream<List<int>> read(String objectName, {int offset, int length});
-
-  /// Lookup object metadata.
-  ///
-  // TODO: More documentation
-  Future<ObjectInfo> info(String name);
-
-  /// Delete an object.
-  ///
-  // TODO: More documentation
-  Future delete(String name);
-
-  /// Update object metadata.
-  ///
-  // TODO: More documentation
-  Future updateMetadata(String objectName, ObjectMetadata metadata);
-
-  /// List objects in the bucket.
-  ///
-  /// Listing operates like a directory listing, despite the object
-  /// namespace being flat. The character `/` is being used to separate
-  /// object names into directory components.
-  ///
-  /// Retrieves a list of objects and directory components starting
-  /// with [prefix].
-  ///
-  /// Returns a [Stream] of [BucketEntry]. Each element of the stream
-  /// represents either an object or a directory component.
-  Stream<BucketEntry> list({String prefix});
-
-  /// Start paging through objects in the bucket.
-  ///
-  /// The maximum number of objects in each page is specified in [pageSize].
-  ///
-  /// See [list] for more information on the other arguments.
-  ///
-  /// Returns a `Future` which completes with a `Page` object holding the
-  /// first page. Use the `Page` object to move to the next page.
-  Future<Page<BucketEntry>> page({String prefix, int pageSize = 50});
-}
diff --git a/gcloud/pubspec.yaml b/gcloud/pubspec.yaml
deleted file mode 100644
index a249319..0000000
--- a/gcloud/pubspec.yaml
+++ /dev/null
@@ -1,21 +0,0 @@
-name: gcloud
-version: 0.6.0+4
-author: Dart Team <misc@dartlang.org>
-description: |
-  High level idiomatic Dart API for Google Cloud Storage, Pub-Sub and Datastore.
-homepage: https://github.com/dart-lang/gcloud
-
-environment:
-  sdk: '>=2.0.0 <3.0.0'
-
-dependencies:
-  _discoveryapis_commons: ^0.1.6+1
-  googleapis: '>=0.50.2 <1.0.0'
-  http: '>=0.11.0 <0.13.0'
-
-dev_dependencies:
-  googleapis_auth: '>=0.2.3 <0.3.0'
-  http_parser: '>=2.0.0 <4.0.0'
-  mime: '>=0.9.0+3 <0.10.0'
-  pedantic: ^1.4.0
-  test: ^1.5.1