Merge pull request #2272 from weiznich/feature/pg_range_ops

Add a `contains` method for postgres range types
diff --git a/.editorconfig b/.editorconfig
index 3c1f41b..d5188ec 100644
--- a/.editorconfig
+++ b/.editorconfig
@@ -23,3 +23,7 @@
 
 [*.md]
 trim_trailing_whitespace = false
+
+[*.{yml,yaml}]
+indent_style = space
+indent_size = 2
diff --git a/.env.sample b/.env.sample
index 8e4662a..c3a6e36 100644
--- a/.env.sample
+++ b/.env.sample
@@ -1,7 +1,7 @@
 # The database to use when testing against Postgres.
-PG_DATABASE_URL=postgresql://postgres@localhost:5432/diesel_test
+PG_DATABASE_URL=postgresql://postgres:postgres@localhost:5432/diesel_test
 # The database to use when running the Postgres examples during testing.
-PG_EXAMPLE_DATABASE_URL=postgresql://postgres@localhost:5432/diesel_example
+PG_EXAMPLE_DATABASE_URL=postgresql://postgres:postgres@localhost:5432/diesel_example
 
 # The database to use when testing against MySQL.
 MYSQL_DATABASE_URL=mysql://root@127.0.0.1:3306/diesel_test
diff --git a/.travis.yml b/.travis.yml
index 030239d..ea37022 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -12,11 +12,44 @@
   - rm -rf /home/travis/.cargo/registry
 addons:
   postgresql: '9.5'
-  apt:
-    packages:
-      - libsqlite3-dev
 services:
   - mysql
+before_install:
+  # install sqlite3 version 3.24.0
+  - |
+    if [[ "$BACKEND" == sqlite ]]; then
+      (sudo apt-get update) &&
+      (wget --quiet -c https://sqlite.org/2020/sqlite-autoconf-3310100.tar.gz) &&
+      (tar zxf sqlite-autoconf-3310100.tar.gz;) &&
+      (cd sqlite-autoconf-3310100; \
+       CFLAGS="$CFLAGS -O2 -fno-strict-aliasing \
+                   -DSQLITE_DEFAULT_FOREIGN_KEYS=1 \
+                   -DSQLITE_SECURE_DELETE \
+                   -DSQLITE_ENABLE_COLUMN_METADATA \
+                   -DSQLITE_ENABLE_FTS3_PARENTHESIS \
+                   -DSQLITE_ENABLE_RTREE=1 \
+                   -DSQLITE_SOUNDEX=1 \
+                   -DSQLITE_ENABLE_UNLOCK_NOTIFY \
+                   -DSQLITE_OMIT_LOOKASIDE=1 \
+                   -DSQLITE_ENABLE_DBSTAT_VTAB \
+                   -DSQLITE_ENABLE_UPDATE_DELETE_LIMIT=1 \
+                   -DSQLITE_ENABLE_LOAD_EXTENSION \
+                   -DSQLITE_ENABLE_JSON1 \
+                   -DSQLITE_LIKE_DOESNT_MATCH_BLOBS \
+                   -DSQLITE_THREADSAFE=1 \
+                   -DSQLITE_ENABLE_FTS3_TOKENIZER=1 \
+                   -DSQLITE_MAX_SCHEMA_RETRY=25 \
+                   -DSQLITE_ENABLE_PREUPDATE_HOOK \
+                   -DSQLITE_ENABLE_SESSION \
+                   -DSQLITE_ENABLE_STMTVTAB \
+                   -DSQLITE_MAX_VARIABLE_NUMBER=250000" \
+       ./configure --prefix=/usr \
+                   --enable-threadsafe \
+                   --enable-dynamic-extensions \
+                   --libdir=/usr/lib/x86_64-linux-gnu \
+                   --libexecdir=/usr/lib/x86_64-linux-gnu/sqlite3) &&
+      (cd sqlite-autoconf-3310100; sudo make; sudo make install)
+    fi
 before_script:
   - pip install 'travis-cargo<0.2' --user
   - export PATH=$HOME/.local/bin:$PATH
@@ -49,12 +82,12 @@
   allow_failures:
     - rust: nightly
   include:
-  - rust: nightly-2019-08-01
+  - rust: nightly-2020-05-01
     name: "Compile tests"
     env: RUSTFLAGS="--cap-lints=warn"
     script:
     - (cd diesel_compile_tests && cargo test)
-  - rust: 1.37.0
+  - rust: 1.40.0
     name: "Rustfmt && Clippy"
     script:
     - rustup component add rustfmt clippy
@@ -66,8 +99,8 @@
     - SQLITE_DATABASE_URL=/tmp/test.db
     script:
     - (cd diesel_cli && cargo test --no-default-features --features "sqlite-bundled")
-  - rust: 1.37.0
-     name: "Minimal supported rust version == 1.37.0"
+  - rust: 1.40.0
+    name: "Minimal supported rust version == 1.40.0"
     script:
     - cargo check --all
 
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 1f335e0..b89b0f5 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -8,8 +8,6 @@
 
 ### Added
 
-* `NonAggregate` can now be derived for simple cases.
-
 * `Connection` and `SimpleConnection` traits are implemented for a broader range
   of `r2d2::PooledConnection<M>` types when the `r2d2` feature is enabled.
 
@@ -27,10 +25,25 @@
 * The `MacAddr` SQL type can now be used without enabling the `network-address`
   feature.
 
+* Added support for SQLite's `UPSERT`.
+  You can use this feature above SQLite version 3.24.0.
+
+
+* Added ability to create custom aggregate functions in SQLite.
+
+* Multiple aggregate expressions can now appear together in the same select
+  clause. See [the upgrade notes](#2-0-0-upgrade-non-aggregate) for details.
+
+* `ValidGrouping` has been added to represent whether an expression is valid for
+  a given group by clause, and whether or not it's aggregate. It replaces the
+  functionality of `NonAggregate`. See [the upgrade
+  notes](#2-0-0-upgrade-non-aggregate) for details.
+
+
 ### Removed
 
 * All previously deprecated items have been removed.
-* Support for uuid version < 0.7.0 has been removed
+* Support for uuid version < 0.7.0 has been removed.
 
 ### Changed
 
@@ -48,37 +61,125 @@
   `Mysql::TypeMetadata`, you will need to take the new struct
   `MysqlTypeMetadata` instead.
 
-* The minimal officially supported rustc version is now 1.36.0
+* The minimal officially supported rustc version is now 1.40.0
 
 * The `RawValue` types for the `Mysql` and `Postgresql` backend where changed
   from `[u8]` to distinct opaque types. If you used the concrete `RawValue` type
   somewhere you need to change it to `mysql::MysqlValue` or `pg::PgValue`.
   For the postgres backend additionally type information where added to the `RawValue`
   type. This allows to dynamically deserialize `RawValues` in container types.
+
 * The uuidv07 feature was renamed to uuid, due to the removal of support for older uuid versions
 
+* Boxed queries (constructed from `.into_boxed()`) are now `Send`.
+
+* The handling of mixed aggregate values is more robust. Invalid queries such as
+  `.select(max(id) + other_column)` are now correctly rejected, and valid
+  queries such as `.select((count_star(), max(other_column)))` are now correctly
+  accepted. For more details, see [the upgrade notes](#2-0-0-upgrade-non-aggregate).
+
+* `NonAggregate` is now a trait alias for `ValidGrouping<()>` for expressions
+  that are not aggregate. On stable this is a normal trait with a blanket impl,
+  but it should never be implemented directly. With the `unstable` feature, it
+  will use trait aliases which prevent manual implementations.
+
+  Due to language limitations, we cannot make the new trait alias by itself
+  represent everything it used to, so in some rare cases code changes may be
+  required. See [the upgrade notes](#2-0-0-upgrade-non-aggregate) for details.
+
+* Various `__NonExhaustive` variants in different (error-) enums are replaced with
+  `#[non_exhaustive]`. If you matched on one of those variants explicitly you need to
+  introduce a wild card match instead.
+
 ### Fixed
 
 * Many types were incorrectly considered non-aggregate when they should not
   have been. All types in Diesel are now correctly only considered
   non-aggregate if their parts are.
 
+* Offset clauses without limit clauses resulted into invalid sql using the mysql or
+  sqlite backend. Both do not support such clauses without a preceding limit clause.
+  For those backend Diesel does now generate a fake limit clause in case no explicit
+  limit clause was given. As consequence of this change generic query code may
+  require additional trait bounds as requested from the compiler. Third party
+  backends are required to explicitly provide `QueryFragment` impls for
+  `LimitOffsetClause<L, O>` now.
+
 * Nullability requirements are now properly enforced for nested joins.
   Previously, only the rules for the outer-most join were considered. For
   example, `users.left_join(posts).left_join(comments)` would allow selecting
   any columns from `posts`. That will now fail to compile, and any selections
   from `posts` will need to be made explicitly nullable.
 
+* Diesel CLI will now look for `diesel.toml` to determine the project root
+  before looking for `Cargo.toml`.
+
+* Any relative paths in `diesel.toml` will now be treated as relative to the
+  project root (the directory containing either `diesel.toml` or `Cargo.toml`).
+  They are no longer dependent on the current working directory (for all
+  directories in the same project)
+
 ### Deprecated
 
 * `diesel_(prefix|postfix|infix)_operator!` have been deprecated. These macros
   are now available without the `diesel_` prefix. With Rust 2018 they can be
   invoked as `diesel::infix_operator!` instead.
 
+* `diesel::pg::upsert` has been deprecated to support upsert queries on more than one backend.
+  Please use `diesel::upsert` instead.
 
 
+### Upgrade Notes
+
+#### Replacement of `NonAggregate` with `ValidGrouping`
+<a name="2-0-0-upgrade-non-aggregate"></a>
+
+FIXME: This should probably be on the website, but I wanted to document it in
+the PR adding the changes.
+
+Key points:
+
+- Rules for aggregation are now correctly enforced. They match the semantics of
+  PG or MySQL with `ONLY_FULL_GROUP_BY` enabled.
+  - As before, `sql` is the escape hatch if needed.
+  - MySQL users can use `ANY_VALUE`, PG users can use `DISTINCT ON`. Also
+    consider using max/min/etc to get deterministic values.
+- Any `impl NonAggregate` must be replaced with `impl ValidGrouping`
+- For most code, `T: NonAggregate` should continue to work. Unless you're
+  getting a compiler error, you most likely don't need to change it.
+- The full equivalent of what `T: NonAggregate` used to mean is:
+
+      where
+          T: ValidGrouping<()>,
+          T::IsAggregate: MixedGrouping<is_aggregate::No, Output = is_aggregate::No>,
+          is_aggreagte::No: MixedGrouping<T::IsAggregate, Output = is_aggreagte::No>,
+
+- With `feature = "unstable"`, `T: NonAggregate` implies the first two bounds,
+  but not the third. On stable only the first bound is implied. This is a
+  language limitation.
+- `T: NonAggregate` can still be passed everywhere it could before, but `T:
+  NonAggregate` no longer implies `(OtherType, T): NonAggregate`.
+  - With `feature = "unstable"`, `(T, OtherType): NonAggregate` is still implied.
+
 [2-0-migration]: FIXME write a migration guide
 
+## [1.4.4] - 2020-03-22
+
+### Fixed
+
+* Update several dependencies
+* Fixed a bug with printing embeded migrations
+
+## [1.4.3] - 2019-10-11
+
+### Fixed
+
+* Updated several dependencies
+* Fixed an issue where the postgresql backend exploits implementation defined behaviour
+* Fixed issue where rustdoc failed to build the documentation
+* `diesel_derives` and `diesel_migrations` are updated to syn 1.0
+
+
 ## [1.4.2] - 2019-03-19
 
 ### Fixed
@@ -1685,3 +1786,5 @@
 [1.4.0]: https://github.com/diesel-rs/diesel/compare/v1.3.0...v1.4.0
 [1.4.1]: https://github.com/diesel-rs/diesel/compare/v1.4.0...v1.4.1
 [1.4.2]: https://github.com/diesel-rs/diesel/compare/v1.4.1...v1.4.2
+[1.4.3]: https://github.com/diesel-rs/diesel/compare/v1.4.2...v1.4.3
+[1.4.4]: https://github.com/diesel-rs/diesel/compare/v1.4.3...v1.4.4
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 2709ae9..c0d35c2 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -93,7 +93,7 @@
      ! echo 'CREATE DATABASE diesel_test; CREATE DATABASE diesel_unit_test;' | docker exec -i diesel.mysql mysql
    do sleep 1; done
 
-   docker run -d --name diesel.postgres -p 5432:5432 postgres
+   docker run -d --name diesel.postgres -p 5432:5432 -e POSTGRES_PASSWORD=postgres postgres
    while
      sleep 1;
      ! echo 'CREATE DATABASE diesel_test;' | docker exec -i diesel.postgres psql -U postgres
diff --git a/Cargo.toml b/Cargo.toml
index 8331314..705ec6b 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -24,10 +24,3 @@
     "examples/sqlite/getting_started_step_2",
     "examples/sqlite/getting_started_step_3",
 ]
-
-[replace]
-"diesel:1.4.3" = { path = "diesel" }
-"diesel_derives:1.4.1" = { path = "diesel_derives" }
-"diesel_migrations:1.4.0" = { path = "diesel_migrations" }
-"migrations_internals:1.4.0" = { path = "diesel_migrations/migrations_internals" }
-"migrations_macros:1.4.1" = { path = "diesel_migrations/migrations_macros" }
diff --git a/README.md b/README.md
index c9d05a8..b855929 100644
--- a/README.md
+++ b/README.md
@@ -18,6 +18,17 @@
 Rust's type system to create a low overhead query builder that "feels like
 Rust."
 
+Supported databases:
+1. [PostgreSQL](https://docs.diesel.rs/diesel/pg/index.html)
+2. [MySQL](https://docs.diesel.rs/diesel/mysql/index.html)
+3. [SQLite](https://docs.diesel.rs/diesel/sqlite/index.html)
+
+You can configure the database backend in `Cargo.toml`:
+```toml
+[dependencies]
+diesel = { version = "<version>", features = ["<postgres|mysql|sqlite>"] }
+```
+
 ## Getting Started
 
 Find our extensive Getting Started tutorial at
diff --git a/_build/azure-pipelines-template.yml b/_build/azure-pipelines-template.yml
index 068376c..23625c5 100644
--- a/_build/azure-pipelines-template.yml
+++ b/_build/azure-pipelines-template.yml
@@ -16,6 +16,9 @@
         rustup_toolchain: nightly
   steps:
   - ${{ parameters.setup }}
+  - template: install-sqlite3.yml
+    parameters:
+      name: ${{parameters.name}}
   - template: install-rust.yml
     parameters:
       platform: ${{parameters.name}}
diff --git a/_build/install-sqlite3.yml b/_build/install-sqlite3.yml
new file mode 100644
index 0000000..4c7ecec
--- /dev/null
+++ b/_build/install-sqlite3.yml
@@ -0,0 +1,58 @@
+steps:
+  - ${{ if eq(parameters.name, 'macOS_sqlite') }}:
+    - script: |
+        brew update
+        brew reinstall sqlite pkg-config
+        echo "##vso[task.setvariable variable=PATH;]/usr/local/opt/sqlite/bin:$PATH"
+        echo "##vso[task.setvariable variable=LDFLAGS;]-L/usr/local/opt/sqlite/lib"
+        echo "##vso[task.setvariable variable=CPPFLAGS;]-I/usr/local/opt/sqlite/include"
+        echo "##vso[task.setvariable variable=PKG_CONFIG_PATH;]/usr/local/opt/sqlite/lib/pkgconfig"
+      displayName: Install sqlite3 (macOS)
+  - ${{ if eq(parameters.name, 'Linux_sqlite') }}:
+    - script: |
+        sudo apt-get update
+        sudo apt-get install curl libpq-dev libmysqlclient-dev
+        curl -fsS --retry 3 -o sqlite-autoconf-3310100.tar.gz https://sqlite.org/2020/sqlite-autoconf-3310100.tar.gz
+        tar zxf sqlite-autoconf-3310100.tar.gz
+        cd sqlite-autoconf-3310100
+        CFLAGS="$CFLAGS -O2 -fno-strict-aliasing \
+                    -DSQLITE_DEFAULT_FOREIGN_KEYS=1 \
+                    -DSQLITE_SECURE_DELETE \
+                    -DSQLITE_ENABLE_COLUMN_METADATA \
+                    -DSQLITE_ENABLE_FTS3_PARENTHESIS \
+                    -DSQLITE_ENABLE_RTREE=1 \
+                    -DSQLITE_SOUNDEX=1 \
+                    -DSQLITE_ENABLE_UNLOCK_NOTIFY \
+                    -DSQLITE_OMIT_LOOKASIDE=1 \
+                    -DSQLITE_ENABLE_DBSTAT_VTAB \
+                    -DSQLITE_ENABLE_UPDATE_DELETE_LIMIT=1 \
+                    -DSQLITE_ENABLE_LOAD_EXTENSION \
+                    -DSQLITE_ENABLE_JSON1 \
+                    -DSQLITE_LIKE_DOESNT_MATCH_BLOBS \
+                    -DSQLITE_THREADSAFE=1 \
+                    -DSQLITE_ENABLE_FTS3_TOKENIZER=1 \
+                    -DSQLITE_MAX_SCHEMA_RETRY=25 \
+                    -DSQLITE_ENABLE_PREUPDATE_HOOK \
+                    -DSQLITE_ENABLE_SESSION \
+                    -DSQLITE_ENABLE_STMTVTAB \
+                    -DSQLITE_MAX_VARIABLE_NUMBER=250000" \
+        ./configure --prefix=/usr \
+                    --enable-threadsafe \
+                    --enable-dynamic-extensions \
+                    --libdir=/usr/lib/x86_64-linux-gnu \
+                    --libexecdir=/usr/lib/x86_64-linux-gnu/sqlite3
+        sudo make
+        sudo make install
+      displayName: Install sqlite3 (Linux)
+  - ${{ if eq(parameters.name, 'Windows_sqlite') }}:
+    - script: |
+        choco install 7zip
+        mkdir C:\sqlite
+        CD /D C:\sqlite
+        curl -fsS --retry 3 --retry-connrefused -o sqlite3.zip https://sqlite.org/2020/sqlite-dll-win64-x64-3310100.zip
+        7z e sqlite3.zip -y
+        call "C:\Program Files (x86)\Microsoft Visual Studio\2017\Enterprise\VC\Auxiliary\Build\vcvars64.bat"
+        lib /machine:x64 /def:sqlite3.def /out:sqlite3.lib
+        set PATH=%PATH%;C:\sqlite
+        echo "##vso[task.setvariable variable=PATH;]%PATH%;C:\sqlite"
+      displayName: Install sqlite3 (Windows)
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index 60a9ab9..0185f04 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -7,31 +7,31 @@
   parameters:
     name: macOS_sqlite
     displayName: macOS SQLite
-    vmImage: macOS-10.13
+    vmImage: macOS-10.15
     variables:
       BACKEND: sqlite
       SQLITE_DATABASE_URL: /tmp/test.db
     setup:
-      - bash: |
-          brew update &&
-          brew install sqlite
-        displayName: Install sqlite
+      - script: |
+          echo "Skip this step"
 
 - template: _build/azure-pipelines-template.yml
   parameters:
     name: macOS_postgres
     displayName: macOS PostgreSQL
-    vmImage: macOS-10.13
+    vmImage: macOS-10.15
     variables:
       BACKEND: postgres
       PG_DATABASE_URL: postgres://postgres@localhost/
       PG_EXAMPLE_DATABASE_URL: postgres://postgres@localhost/diesel_example
     setup:
-      - bash: |
-          brew update &&
-          brew install postgres &&
-          brew services start postgres &&
-          sleep 3 &&
+      - script: |
+          brew update
+          brew uninstall --ignore-dependencies libpq
+          brew install postgres
+          /usr/local/Cellar/postgresql/12.2/bin/initdb --locale=C -E UTF-8 /usr/local/var/postgres
+          brew services start postgresql
+          sleep 3
           /usr/local/opt/postgres/bin/createuser -s postgres
         displayName: Install postgresql
 
@@ -39,7 +39,7 @@
   parameters:
     name: macOS_mysql
     displayName: macOS MySQL
-    vmImage: macOS-10.13
+    vmImage: macOS-10.15
     variables:
       BACKEND: mysql
       MYSQL_DATABASE_URL: mysql://root@localhost/diesel_test
@@ -48,7 +48,7 @@
       RUST_TEST_THREADS: 1
       MYSQLCLIENT_LIB_DIR: /usr/local/Cellar/mysql/8.0.19/lib
     setup:
-      - bash: |
+      - script: |
           brew update &&
           brew install mysql &&
           brew services start mysql &&
@@ -66,10 +66,8 @@
       BACKEND: sqlite
       SQLITE_DATABASE_URL: /tmp/test.db
     setup:
-      - bash: |
-          sudo apt-get update &&
-          sudo apt-get -y install sqlite3 libsqlite3-dev
-        displayName: Install sqlite
+      - script: |
+          echo "Skip this step"
 
 - template: _build/azure-pipelines-template.yml
   parameters:
@@ -81,7 +79,7 @@
       PG_DATABASE_URL: postgres://postgres:postgres@localhost/
       PG_EXAMPLE_DATABASE_URL: postgres://postgres:postgres@localhost/diesel_example
     setup:
-      - bash: |
+      - script: |
           sudo apt-get update &&
           sudo apt-get -y install postgresql libpq-dev &&
           echo "host    all             all             127.0.0.1/32            md5" > sudo tee -a /etc/postgresql/9.5/main/pg_hba.conf &&
@@ -102,9 +100,8 @@
       MYSQL_UNIT_TEST_DATABASE_URL: mysql://root:root@localhost/diesel_unit_test
       RUST_TEST_THREADS: 1
     setup:
-      - bash: |
-          sudo apt-get update &&
-          sudo apt-get -y install mysql-server libmysqlclient-dev &&
+      - script: |
+          sudo systemctl start mysql.service &&
           mysql -e "create database diesel_test; create database diesel_unit_test; grant all on \`diesel_%\`.* to 'root'@'localhost';" -uroot -proot
         displayName: Install mysql
 
@@ -119,16 +116,7 @@
       SQLITE3_LIB_DIR: C:\sqlite
     setup:
       - script: |
-          choco install 7zip
-          mkdir C:\sqlite
-          CD /D C:\sqlite
-          curl -fsS --retry 3 --retry-connrefused -o sqlite3.zip https://sqlite.org/2017/sqlite-dll-win64-x64-3160200.zip
-          7z e sqlite3.zip -y
-          call "C:\Program Files (x86)\Microsoft Visual Studio\2017\Enterprise\VC\Auxiliary\Build\vcvars64.bat"
-          lib /machine:x64 /def:sqlite3.def /out:sqlite3.lib
-          set PATH=%PATH%;C:\sqlite
-          echo "##vso[task.setvariable variable=PATH;]%PATH%;C:\sqlite"
-        displayName: Install sqlite
+          echo "Skip this step"
 
 - template: _build/azure-pipelines-template.yml
   parameters:
@@ -179,7 +167,7 @@
     - template: _build/install-rust.yml
       parameters:
         platform: Linux
-        rust_version: nightly-2019-08-01
+        rust_version: nightly-2020-05-01
     - bash: |
         sudo apt-get update &&
         sudo apt-get -y install libsqlite3-dev libpq-dev libmysqlclient-dev
@@ -198,7 +186,7 @@
     - template: _build/install-rust.yml
       parameters:
         platform: Linux
-        rust_version: 1.36.0
+        rust_version: 1.40.0
     - bash: |
         sudo apt-get update &&
         sudo apt-get -y install libsqlite3-dev libpq-dev libmysqlclient-dev
@@ -239,14 +227,17 @@
   pool:
     vmImage: ubuntu-16.04
   steps:
+    - template: _build/install-sqlite3.yml
+      parameters:
+          name: Linux_sqlite
     - template: _build/install-rust.yml
       parameters:
         platform: Linux
-        rust_version: 1.36.0
+        rust_version: 1.40.0
     - bash: |
         sudo apt-get update &&
-        sudo apt-get -y install libsqlite3-dev libpq-dev libmysqlclient-dev
+        sudo apt-get install libpq-dev libmysqlclient-dev
       displayName: Install build dependencies
     - bash: |
         cargo check --all
-      displayName: Check building with rust 1.36.0
+      displayName: Check building with rust 1.40.0
diff --git a/bin/test b/bin/test
index 809e17d..97ad1f0 100755
--- a/bin/test
+++ b/bin/test
@@ -40,5 +40,6 @@
   (cd diesel_compile_tests && cargo test $*)
   (cd diesel_migrations/migrations_internals && cargo test $*)
   (cd diesel_migrations/migrations_macros && cargo test $*)
+  export BACKEND="mysql sqlite postgres"
   (cd examples && ./test_all $*)
 fi;
diff --git a/diesel/Cargo.toml b/diesel/Cargo.toml
index 5400b18..45679e8 100644
--- a/diesel/Cargo.toml
+++ b/diesel/Cargo.toml
@@ -1,6 +1,6 @@
 [package]
 name = "diesel"
-version = "1.4.3"
+version = "2.0.0"
 authors = ["Sean Griffin <sean@seantheprogrammer.com>"]
 license = "MIT OR Apache-2.0"
 description = "A safe, extensible ORM and Query Builder for PostgreSQL, SQLite, and MySQL"
@@ -14,10 +14,9 @@
 
 [dependencies]
 byteorder = "1.0"
-diesel_derives = "~1.4.0"
 chrono = { version = "0.4", optional = true }
 libc = { version = "0.2.0", optional = true }
-libsqlite3-sys = { version = ">=0.8.0, <0.17.0", optional = true, features = ["min_sqlite_version_3_7_16"] }
+libsqlite3-sys = { version = ">=0.8.0, <0.18.0", optional = true, features = ["min_sqlite_version_3_7_16"] }
 mysqlclient-sys = { version = ">=0.1.0, <0.3.0", optional = true }
 pq-sys = { version = ">=0.3.0, <0.5.0", optional = true }
 quickcheck = { version = "0.4", optional = true }
@@ -34,6 +33,10 @@
 bitflags = { version = "1.0", optional = true }
 r2d2 = { version = ">= 0.8, < 0.9", optional = true }
 
+[dependencies.diesel_derives]
+version = "~2.0.0"
+path = "../diesel_derives"
+
 [dev-dependencies]
 cfg-if = "0.1.0"
 dotenv = ">=0.8, <0.11"
diff --git a/diesel/src/associations/belongs_to.rs b/diesel/src/associations/belongs_to.rs
index 269fa00..d6e76e4 100644
--- a/diesel/src/associations/belongs_to.rs
+++ b/diesel/src/associations/belongs_to.rs
@@ -46,7 +46,6 @@
 /// # Example
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("../doctest_setup.rs");
 /// # use schema::{posts, users};
 /// #
diff --git a/diesel/src/associations/mod.rs b/diesel/src/associations/mod.rs
index 947af39..24850b4 100644
--- a/diesel/src/associations/mod.rs
+++ b/diesel/src/associations/mod.rs
@@ -5,7 +5,6 @@
 //! Unlike other ORMs, Diesel has no concept of `#[has_many`]
 //!
 //! ```rust
-//! # #[macro_use] extern crate diesel;
 //! # include!("../doctest_setup.rs");
 //! use schema::{posts, users};
 //!
@@ -54,7 +53,6 @@
 //! `unrestricted_attribute_tokens` is stable.
 //!
 //! ```rust
-//! # #[macro_use] extern crate diesel;
 //! # include!("../doctest_setup.rs");
 //! # use schema::{posts, users};
 //! # use std::borrow::Cow;
@@ -98,7 +96,6 @@
 //! [`belonging_to`]: ../query_dsl/trait.BelongingToDsl.html#tymethod.belonging_to
 //!
 //! ```rust
-//! # #[macro_use] extern crate diesel;
 //! # include!("../doctest_setup.rs");
 //! # use schema::users;
 //! # use schema::posts;
@@ -153,7 +150,6 @@
 //! [`belonging_to`]: ../query_dsl/trait.BelongingToDsl.html#tymethod.belonging_to
 //!
 //! ```rust
-//! # #[macro_use] extern crate diesel;
 //! # include!("../doctest_setup.rs");
 //! # use schema::{posts, users};
 //! #
@@ -208,7 +204,6 @@
 //! and it will be combined with its parent.
 //!
 //! ```rust
-//! # #[macro_use] extern crate diesel;
 //! # include!("../doctest_setup.rs");
 //! # use schema::{posts, users};
 //! #
@@ -270,7 +265,6 @@
 //! to make each line a bit more clear.
 //!
 //! ```rust
-//! # #[macro_use] extern crate diesel;
 //! # include!("../doctest_setup.rs");
 //! # use schema::{users, posts, comments};
 //! #
@@ -361,6 +355,9 @@
 
 pub use self::belongs_to::{BelongsTo, GroupedBy};
 
+#[doc(inline)]
+pub use diesel_derives::Associations;
+
 /// This trait indicates that a struct is associated with a single database table.
 ///
 /// This trait is implemented by structs which implement `Identifiable`,
@@ -389,25 +386,8 @@
 /// `update(YourStruct::table().find(&your_struct.primary_key())`).
 ///
 /// This trait is usually implemented on a reference to a struct,
-/// not the struct itself.
+/// not on the struct itself. It can be [derived](derive.Identifiable.html).
 ///
-/// ### Deriving
-///
-/// This trait can be automatically derived by adding `#[derive(Identifiable)]`
-/// to your struct.
-/// By default, the "id" field is assumed to be a single field called `id`.
-/// If it's not, you can put `#[primary_key(your_id)]` on your struct.
-/// If you have a composite primary key, the syntax is `#[primary_key(id1, id2)]`.
-///
-/// By default, `#[derive(Identifiable)]` will assume that your table
-/// name is the plural form of your struct name.
-/// Diesel uses very simple pluralization rules.
-/// It only adds an `s` to the end, and converts `CamelCase` to `snake_case`.
-/// If your table name does not follow this convention
-/// or the plural form isn't just an `s`,
-/// you can specify the table name with `#[table_name = "some_table_name"]`.
-/// Our rules for inferring table names is considered public API.
-/// It will never change without a major version bump.
 pub trait Identifiable: HasTable {
     /// The type of this struct's identifier.
     ///
@@ -428,3 +408,6 @@
     /// so that we have a lifetime to use for `Id`.
     fn id(self) -> Self::Id;
 }
+
+#[doc(inline)]
+pub use diesel_derives::Identifiable;
diff --git a/diesel/src/backend.rs b/diesel/src/backend.rs
index d5c4293..a08a0db 100644
--- a/diesel/src/backend.rs
+++ b/diesel/src/backend.rs
@@ -72,6 +72,8 @@
 
 /// Does this backend support `RETURNING` clauses?
 pub trait SupportsReturningClause {}
+/// Does this backend support 'ON CONFLICT' clause?
+pub trait SupportsOnConflictClause {}
 /// Does this backend support the bare `DEFAULT` keyword?
 pub trait SupportsDefaultKeyword {}
 /// Does this backend use the standard `SAVEPOINT` syntax?
diff --git a/diesel/src/connection/mod.rs b/diesel/src/connection/mod.rs
index 70bf15d..328b813 100644
--- a/diesel/src/connection/mod.rs
+++ b/diesel/src/connection/mod.rs
@@ -45,10 +45,14 @@
     /// If there is already an open transaction,
     /// savepoints will be used instead.
     ///
+    /// If the transaction fails to commit due to a `SerializationFailure` or a
+    /// `ReadOnlyTransaction` a rollback will be attempted. If the rollback succeeds,
+    /// the original error will be returned, otherwise the error generated by the rollback
+    /// will be returned. In the second case the connection should be considered broken
+    /// as it contains a uncommitted unabortable open transaction.
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// use diesel::result::Error;
     ///
@@ -121,7 +125,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// use diesel::result::Error;
     ///
diff --git a/diesel/src/connection/transaction_manager.rs b/diesel/src/connection/transaction_manager.rs
index 0857eb7..c9538c9 100644
--- a/diesel/src/connection/transaction_manager.rs
+++ b/diesel/src/connection/transaction_manager.rs
@@ -1,6 +1,6 @@
 use crate::backend::UsesAnsiSavepointSyntax;
 use crate::connection::{Connection, SimpleConnection};
-use crate::result::QueryResult;
+use crate::result::{DatabaseErrorKind, Error, QueryResult};
 
 /// Manages the internal transaction state for a connection.
 ///
@@ -110,12 +110,28 @@
         )
     }
 
+    /// If the transaction fails to commit due to a `SerializationFailure` or a
+    /// `ReadOnlyTransaction` a rollback will be attempted. If the rollback succeeds,
+    /// the original error will be returned, otherwise the error generated by the rollback
+    /// will be returned. In the second case the connection should be considered broken
+    /// as it contains a uncommitted unabortable open transaction.
     fn commit_transaction(&self, conn: &Conn) -> QueryResult<()> {
         let transaction_depth = self.transaction_depth.get();
         self.change_transaction_depth(
             -1,
             if transaction_depth <= 1 {
-                conn.batch_execute("COMMIT")
+                conn.batch_execute("COMMIT").or_else(|err| {
+                    // When any of these kinds of error happen on `COMMIT`, it is expected
+                    // that a `ROLLBACK` would succeed, leaving the transaction in a non-broken state.
+                    // If there are other such errors, it is fine to add them here.
+                    match err {
+                        Error::DatabaseError(DatabaseErrorKind::SerializationFailure, _)
+                        | Error::DatabaseError(DatabaseErrorKind::ReadOnlyTransaction, _) => {
+                            conn.batch_execute("ROLLBACK").and(Err(err))
+                        }
+                        other_err => Err(other_err),
+                    }
+                })
             } else {
                 conn.batch_execute(&format!(
                     "RELEASE SAVEPOINT diesel_savepoint_{}",
diff --git a/diesel/src/deserialize.rs b/diesel/src/deserialize.rs
index bc6327b..27bbf21 100644
--- a/diesel/src/deserialize.rs
+++ b/diesel/src/deserialize.rs
@@ -19,28 +19,13 @@
 /// trait is to convert from a tuple of Rust values that have been deserialized
 /// into your struct.
 ///
-/// # Deriving
-///
-/// This trait can be derived automatically using `#[derive(Queryable)]`. This
-/// trait can only be derived for structs, not enums.
-///
-/// When this trait is derived, it will assume that the order of fields on your
-/// struct match the order of the fields in the query. This means that field
-/// order is significant if you are using `#[derive(Queryable)]`. Field name has
-/// no effect.
-///
-/// To provide custom deserialization behavior for a field, you can use
-/// `#[diesel(deserialize_as = "Type")]`. If this attribute is present, Diesel
-/// will deserialize into that type, rather than the type on your struct and
-/// call `.into` to convert it. This can be used to add custom behavior for a
-/// single field, or use types that are otherwise unsupported by Diesel.
+/// This trait can be [derived](derive.Queryable.html)
 ///
 /// # Examples
 ///
 /// If we just want to map a query to our struct, we can use `derive`.
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("doctest_setup.rs");
 /// #
 /// #[derive(Queryable, PartialEq, Debug)]
@@ -67,7 +52,6 @@
 /// `deserialize_as` to use a different implementation.
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("doctest_setup.rs");
 /// #
 /// # use schema::users;
@@ -118,7 +102,6 @@
 /// Alternatively, we can implement the trait for our struct manually.
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("doctest_setup.rs");
 /// #
 /// use schema::users;
@@ -171,43 +154,20 @@
     fn build(row: Self::Row) -> Self;
 }
 
+#[doc(inline)]
+pub use diesel_derives::Queryable;
+
 /// Deserializes the result of a query constructed with [`sql_query`].
 ///
-/// # Deriving
-///
-/// To derive this trait, Diesel needs to know the SQL type of each field. You
-/// can do this by either annotating your struct with `#[table_name =
-/// "some_table"]` (in which case the SQL type will be
-/// `diesel::dsl::SqlTypeOf<table_name::column_name>`), or by annotating each
-/// field with `#[sql_type = "SomeType"]`.
-///
-/// If you are using `#[table_name]`, the module for that table must be in
-/// scope. For example, to derive this for a struct called `User`, you will
-/// likely need a line such as `use schema::users;`
-///
-/// If the name of a field on your struct is different than the column in your
-/// `table!` declaration, or if you are deriving this trait on a tuple struct,
-/// you can annotate the field with `#[column_name = "some_column"]`. For tuple
-/// structs, all fields must have this annotation.
-///
-/// If a field is another struct which implements `QueryableByName`, instead of
-/// a column, you can annotate that struct with `#[diesel(embed)]`
-///
-/// To provide custom deserialization behavior for a field, you can use
-/// `#[diesel(deserialize_as = "Type")]`. If this attribute is present, Diesel
-/// will deserialize into that type, rather than the type on your struct and
-/// call `.into` to convert it. This can be used to add custom behavior for a
-/// single field, or use types that are otherwise unsupported by Diesel.
+/// This trait can be [derived](derive.QueryableByName.html)
 ///
 /// [`sql_query`]: ../fn.sql_query.html
 ///
 /// # Examples
 ///
-///
 /// If we just want to map a query to our struct, we can use `derive`.
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("doctest_setup.rs");
 /// # use schema::users;
 /// # use diesel::sql_query;
@@ -237,7 +197,6 @@
 /// `deserialize_as` to use a different implementation.
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("doctest_setup.rs");
 /// # use diesel::sql_query;
 /// # use schema::users;
@@ -293,6 +252,9 @@
     fn build<R: NamedRow<DB>>(row: &R) -> Result<Self>;
 }
 
+#[doc(inline)]
+pub use diesel_derives::QueryableByName;
+
 /// Deserialize a single field of a given SQL type.
 ///
 /// When possible, implementations of this trait should prefer to use an
@@ -361,12 +323,7 @@
 /// for all types which implement `FromSql`. However, as of Diesel 1.0, such an
 /// impl would conflict with our impl for tuples.
 ///
-/// ## Deriving
-///
-/// This trait can be automatically derived by Diesel
-/// for any type which implements `FromSql`.
-/// There are no options or special considerations needed for this derive.
-/// Note that `#[derive(FromSqlRow)]` will also generate a `Queryable` implementation.
+/// This trait can be [derived](derive.FromSqlRow.html)
 pub trait FromSqlRow<A, DB: Backend>: Sized {
     /// The number of fields that this type will consume. Must be equal to
     /// the number of times you would call `row.take()` in `build_from_row`
@@ -376,6 +333,9 @@
     fn build_from_row<T: Row<DB>>(row: &mut T) -> Result<Self>;
 }
 
+#[doc(inline)]
+pub use diesel_derives::FromSqlRow;
+
 // Reasons we can't write this:
 //
 // impl<T, ST, DB> FromSqlRow<ST, DB> for T
diff --git a/diesel/src/doctest_setup.rs b/diesel/src/doctest_setup.rs
index c3c98c1..597ca9e 100644
--- a/diesel/src/doctest_setup.rs
+++ b/diesel/src/doctest_setup.rs
@@ -197,6 +197,8 @@
 
 
 mod schema {
+    use diesel::prelude::*;
+
     table! {
         animals {
             id -> Integer,
diff --git a/diesel/src/expression/array_comparison.rs b/diesel/src/expression/array_comparison.rs
index 7f85d33..1eb5d1d 100644
--- a/diesel/src/expression/array_comparison.rs
+++ b/diesel/src/expression/array_comparison.rs
@@ -5,13 +5,13 @@
 use crate::result::QueryResult;
 use crate::sql_types::Bool;
 
-#[derive(Debug, Copy, Clone, QueryId, NonAggregate)]
+#[derive(Debug, Copy, Clone, QueryId, ValidGrouping)]
 pub struct In<T, U> {
     left: T,
     values: U,
 }
 
-#[derive(Debug, Copy, Clone, QueryId, NonAggregate)]
+#[derive(Debug, Copy, Clone, QueryId, ValidGrouping)]
 pub struct NotIn<T, U> {
     left: T,
     values: U,
@@ -140,7 +140,7 @@
     }
 }
 
-#[derive(Debug, Clone, NonAggregate)]
+#[derive(Debug, Clone, ValidGrouping)]
 pub struct Many<T>(Vec<T>);
 
 impl<T: Expression> Expression for Many<T> {
diff --git a/diesel/src/expression/bound.rs b/diesel/src/expression/bound.rs
index 8751281..4dbb605 100644
--- a/diesel/src/expression/bound.rs
+++ b/diesel/src/expression/bound.rs
@@ -5,7 +5,7 @@
 use crate::query_builder::*;
 use crate::result::QueryResult;
 use crate::serialize::ToSql;
-use crate::sql_types::HasSqlType;
+use crate::sql_types::{DieselNumericOps, HasSqlType};
 
 #[derive(Debug, Clone, Copy, DieselNumericOps)]
 pub struct Bound<T, U> {
@@ -47,4 +47,6 @@
 
 impl<T, U, QS> AppearsOnTable<QS> for Bound<T, U> where Bound<T, U>: Expression {}
 
-impl<T, U> NonAggregate for Bound<T, U> {}
+impl<T, U, GB> ValidGrouping<GB> for Bound<T, U> {
+    type IsAggregate = is_aggregate::Never;
+}
diff --git a/diesel/src/expression/coerce.rs b/diesel/src/expression/coerce.rs
index 0d83ffa..3328a3f 100644
--- a/diesel/src/expression/coerce.rs
+++ b/diesel/src/expression/coerce.rs
@@ -4,6 +4,7 @@
 use crate::expression::*;
 use crate::query_builder::*;
 use crate::result::QueryResult;
+use crate::sql_types::DieselNumericOps;
 
 #[derive(Debug, Copy, Clone, QueryId, DieselNumericOps)]
 #[doc(hidden)]
@@ -53,4 +54,9 @@
     }
 }
 
-impl<T, ST> NonAggregate for Coerce<T, ST> where T: NonAggregate {}
+impl<T, ST, GB> ValidGrouping<GB> for Coerce<T, ST>
+where
+    T: ValidGrouping<GB>,
+{
+    type IsAggregate = T::IsAggregate;
+}
diff --git a/diesel/src/expression/count.rs b/diesel/src/expression/count.rs
index c2bef2b..c75f2cb 100644
--- a/diesel/src/expression/count.rs
+++ b/diesel/src/expression/count.rs
@@ -1,8 +1,9 @@
-use super::Expression;
+use super::functions::sql_function;
+use super::{Expression, ValidGrouping};
 use crate::backend::Backend;
 use crate::query_builder::*;
 use crate::result::QueryResult;
-use crate::sql_types::BigInt;
+use crate::sql_types::{BigInt, DieselNumericOps};
 
 sql_function! {
     /// Creates a SQL `COUNT` expression
@@ -14,7 +15,6 @@
     /// # Examples
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// # use diesel::dsl::*;
     /// #
@@ -41,7 +41,6 @@
 /// # Examples
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("../doctest_setup.rs");
 /// # use diesel::dsl::*;
 /// #
@@ -55,7 +54,8 @@
     CountStar
 }
 
-#[derive(Debug, Clone, Copy, QueryId, DieselNumericOps)]
+#[derive(Debug, Clone, Copy, QueryId, DieselNumericOps, ValidGrouping)]
+#[diesel(aggregate)]
 #[doc(hidden)]
 pub struct CountStar;
 
diff --git a/diesel/src/expression/exists.rs b/diesel/src/expression/exists.rs
index 982aa71..8b4d83e 100644
--- a/diesel/src/expression/exists.rs
+++ b/diesel/src/expression/exists.rs
@@ -1,6 +1,6 @@
 use crate::backend::Backend;
 use crate::expression::subselect::Subselect;
-use crate::expression::{AppearsOnTable, Expression, NonAggregate, SelectableExpression};
+use crate::expression::{AppearsOnTable, Expression, SelectableExpression, ValidGrouping};
 use crate::query_builder::*;
 use crate::result::QueryResult;
 use crate::sql_types::Bool;
@@ -13,7 +13,6 @@
 /// # Example
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("../doctest_setup.rs");
 /// #
 /// # fn main() {
@@ -43,7 +42,12 @@
     type SqlType = Bool;
 }
 
-impl<T> NonAggregate for Exists<T> where Subselect<T, ()>: NonAggregate {}
+impl<T, GB> ValidGrouping<GB> for Exists<T>
+where
+    Subselect<T, ()>: ValidGrouping<GB>,
+{
+    type IsAggregate = <Subselect<T, ()> as ValidGrouping<GB>>::IsAggregate;
+}
 
 #[cfg(not(feature = "unstable"))]
 impl<T, DB> QueryFragment<DB> for Exists<T>
diff --git a/diesel/src/expression/functions/aggregate_folding.rs b/diesel/src/expression/functions/aggregate_folding.rs
index bf6f044..b544c39 100644
--- a/diesel/src/expression/functions/aggregate_folding.rs
+++ b/diesel/src/expression/functions/aggregate_folding.rs
@@ -1,3 +1,4 @@
+use crate::expression::functions::sql_function;
 use crate::sql_types::Foldable;
 
 sql_function! {
@@ -7,7 +8,6 @@
     /// # Examples
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../../doctest_setup.rs");
     /// # use diesel::dsl::*;
     /// #
@@ -28,7 +28,6 @@
     /// # Examples
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../../doctest_setup.rs");
     /// # use diesel::dsl::*;
     /// # #[cfg(feature = "bigdecimal")]
diff --git a/diesel/src/expression/functions/aggregate_ordering.rs b/diesel/src/expression/functions/aggregate_ordering.rs
index 4b48445..6048d11 100644
--- a/diesel/src/expression/functions/aggregate_ordering.rs
+++ b/diesel/src/expression/functions/aggregate_ordering.rs
@@ -1,3 +1,4 @@
+use crate::expression::functions::sql_function;
 use crate::sql_types::{IntoNullable, SqlOrd};
 
 sql_function! {
@@ -7,7 +8,6 @@
     /// # Examples
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../../doctest_setup.rs");
     /// # use diesel::dsl::*;
     /// #
@@ -27,7 +27,6 @@
     /// # Examples
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../../doctest_setup.rs");
     /// # use diesel::dsl::*;
     /// #
diff --git a/diesel/src/expression/functions/date_and_time.rs b/diesel/src/expression/functions/date_and_time.rs
index ff57ee2..22b6225 100644
--- a/diesel/src/expression/functions/date_and_time.rs
+++ b/diesel/src/expression/functions/date_and_time.rs
@@ -1,6 +1,7 @@
 use crate::backend::Backend;
 use crate::expression::coerce::Coerce;
-use crate::expression::{AsExpression, Expression};
+use crate::expression::functions::sql_function;
+use crate::expression::{AsExpression, Expression, ValidGrouping};
 use crate::query_builder::*;
 use crate::result::QueryResult;
 use crate::sql_types::*;
@@ -8,7 +9,7 @@
 /// Represents the SQL `CURRENT_TIMESTAMP` constant. This is equivalent to the
 /// `NOW()` function on backends that support it.
 #[allow(non_camel_case_types)]
-#[derive(Debug, Copy, Clone, QueryId, NonAggregate)]
+#[derive(Debug, Copy, Clone, QueryId, ValidGrouping)]
 pub struct now;
 
 impl Expression for now {
@@ -33,7 +34,6 @@
     /// # Examples
 
     /// ```ignore
-    /// # #[macro_use] extern crate diesel;
     /// # extern crate chrono;
     /// # include!("../../doctest_setup.rs");
     /// # use diesel::dsl::*;
diff --git a/diesel/src/expression/functions/mod.rs b/diesel/src/expression/functions/mod.rs
index 45095f6..fe378ac 100644
--- a/diesel/src/expression/functions/mod.rs
+++ b/diesel/src/expression/functions/mod.rs
@@ -1,172 +1,7 @@
-#[macro_export]
-/// Declare a sql function for use in your code.
-///
-/// Diesel only provides support for a very small number of SQL functions.
-/// This macro enables you to add additional functions from the SQL standard,
-/// as well as any custom functions your application might have.
-///
-/// The syntax for this macro is very similar to that of a normal Rust function,
-/// except the argument and return types will be the SQL types being used.
-/// Typically these types will come from [`diesel::sql_types`].
-///
-/// This macro will generate two items. A function with the name that you've
-/// given, and a module with a helper type representing the return type of your
-/// function. For example, this invocation:
-///
-/// ```ignore
-/// sql_function!(fn lower(x: Text) -> Text);
-/// ```
-///
-/// will generate this code:
-///
-/// ```ignore
-/// pub fn lower<X>(x: X) -> lower::HelperType<X> {
-///     ...
-/// }
-///
-/// pub(crate) mod lower {
-///     pub type HelperType<X> = ...;
-/// }
-/// ```
-///
-/// If you are using this macro for part of a library, where the function is
-/// part of your public API, it is highly recommended that you re-export this
-/// helper type with the same name as your function. This is the standard
-/// structure:
-///
-/// ```ignore
-/// pub mod functions {
-///     use super::types::*;
-///     use diesel::sql_types::*;
-///
-///     sql_function! {
-///         /// Represents the Pg `LENGTH` function used with `tsvector`s.
-///         fn length(x: TsVector) -> Integer;
-///     }
-/// }
-///
-/// pub mod helper_types {
-///     /// The return type of `length(expr)`
-///     pub type Length<Expr> = functions::length::HelperType<Expr>;
-/// }
-///
-/// pub mod dsl {
-///     pub use functions::*;
-///     pub use helper_types::*;
-/// }
-/// ```
-///
-/// Most attributes given to this macro will be put on the generated function
-/// (including doc comments).
-///
-/// # Adding Doc Comments
-///
-/// ```no_run
-/// # #[macro_use] extern crate diesel;
-/// # use diesel::*;
-/// #
-/// # table! { crates { id -> Integer, name -> VarChar, } }
-/// #
-/// use diesel::sql_types::Text;
-///
-/// sql_function! {
-///     /// Represents the `canon_crate_name` SQL function, created in
-///     /// migration ....
-///     fn canon_crate_name(a: Text) -> Text;
-/// }
-///
-/// # fn main() {
-/// # use self::crates::dsl::*;
-/// let target_name = "diesel";
-/// crates.filter(canon_crate_name(name).eq(canon_crate_name(target_name)));
-/// // This will generate the following SQL
-/// // SELECT * FROM crates WHERE canon_crate_name(crates.name) = canon_crate_name($1)
-/// # }
-/// ```
-///
-/// # Special Attributes
-///
-/// There are a handful of special attributes that Diesel will recognize. They
-/// are:
-///
-/// - `#[aggregate]`
-///   - Indicates that this is an aggregate function, and that `NonAggregate`
-///     should not be implemented.
-/// - `#[sql_name="name"]`
-///   - The SQL to be generated is different than the Rust name of the function.
-///     This can be used to represent functions which can take many argument
-///     types, or to capitalize function names.
-///
-/// Functions can also be generic. Take the definition of `sum` for an example
-/// of all of this:
-///
-/// ```no_run
-/// # #[macro_use] extern crate diesel;
-/// # use diesel::*;
-/// #
-/// # table! { crates { id -> Integer, name -> VarChar, } }
-/// #
-/// use diesel::sql_types::Foldable;
-///
-/// sql_function! {
-///     #[aggregate]
-///     #[sql_name = "SUM"]
-///     fn sum<ST: Foldable>(expr: ST) -> ST::Sum;
-/// }
-///
-/// # fn main() {
-/// # use self::crates::dsl::*;
-/// crates.select(sum(id));
-/// # }
-/// ```
-///
-/// # Use with SQLite
-///
-/// On most backends, the implementation of the function is defined in a
-/// migration using `CREATE FUNCTION`. On SQLite, the function is implemented in
-/// Rust instead. You must call `register_impl` or
-/// `register_nondeterministic_impl` with every connection before you can use
-/// the function.
-///
-/// These functions will only be generated if the `sqlite` feature is enabled,
-/// and the function is not generic. Generic functions and variadic functions
-/// are not supported on SQLite.
-///
-/// ```rust
-/// # #[macro_use] extern crate diesel;
-/// # use diesel::*;
-/// #
-/// # #[cfg(feature = "sqlite")]
-/// # fn main() {
-/// #     run_test().unwrap();
-/// # }
-/// #
-/// # #[cfg(not(feature = "sqlite"))]
-/// # fn main() {
-/// # }
-/// #
-/// use diesel::sql_types::{Integer, Double};
-/// sql_function!(fn add_mul(x: Integer, y: Integer, z: Double) -> Double);
-///
-/// # #[cfg(feature = "sqlite")]
-/// # fn run_test() -> Result<(), Box<::std::error::Error>> {
-/// let connection = SqliteConnection::establish(":memory:")?;
-///
-/// add_mul::register_impl(&connection, |x: i32, y: i32, z: f64| {
-///     (x + y) as f64 * z
-/// })?;
-///
-/// let result = select(add_mul(1, 2, 1.5))
-///     .get_result::<f64>(&connection)?;
-/// assert_eq!(4.5, result);
-/// #     Ok(())
-/// # }
-/// ```
-macro_rules! sql_function {
-    ($($args:tt)*) => {
-        sql_function_proc! { $($args)* }
-    }
-}
+//! Helper macros to define custom sql functions
+
+#[doc(inline)]
+pub use diesel_derives::sql_function_proc as sql_function;
 
 #[macro_export]
 #[doc(hidden)]
@@ -174,7 +9,13 @@
     ($type_name:ident, $return_type:ty, $docs:expr) => {
         #[allow(non_camel_case_types)]
         #[doc=$docs]
-        #[derive(Debug, Clone, Copy, QueryId, NonAggregate)]
+        #[derive(
+            Debug,
+            Clone,
+            Copy,
+            $crate::query_builder::QueryId,
+            $crate::expression::ValidGrouping
+        )]
         pub struct $type_name;
 
         impl $crate::expression::Expression for $type_name {
@@ -224,7 +65,6 @@
 /// generated using:
 ///
 /// ```no_run
-/// # #[macro_use] extern crate diesel;
 /// # pub use diesel::*;
 /// no_arg_sql_function!(now, sql_types::Timestamp, "Represents the SQL NOW() function");
 /// # fn main() {}
@@ -246,7 +86,11 @@
     };
 }
 
+#[doc(hidden)]
 pub mod aggregate_folding;
+#[doc(hidden)]
 pub mod aggregate_ordering;
+#[doc(hidden)]
 pub mod date_and_time;
+#[doc(hidden)]
 pub mod helper_types;
diff --git a/diesel/src/expression/grouped.rs b/diesel/src/expression/grouped.rs
index 122e998..bd77582 100644
--- a/diesel/src/expression/grouped.rs
+++ b/diesel/src/expression/grouped.rs
@@ -1,9 +1,10 @@
 use crate::backend::Backend;
-use crate::expression::Expression;
+use crate::expression::{Expression, ValidGrouping};
 use crate::query_builder::*;
 use crate::result::QueryResult;
+use crate::sql_types::DieselNumericOps;
 
-#[derive(Debug, Copy, Clone, QueryId, Default, DieselNumericOps, NonAggregate)]
+#[derive(Debug, Copy, Clone, QueryId, Default, DieselNumericOps, ValidGrouping)]
 pub struct Grouped<T>(pub T);
 
 impl<T: Expression> Expression for Grouped<T> {
diff --git a/diesel/src/expression/mod.rs b/diesel/src/expression/mod.rs
index 490aa0f..07d0681 100644
--- a/diesel/src/expression/mod.rs
+++ b/diesel/src/expression/mod.rs
@@ -17,8 +17,6 @@
 #[macro_use]
 #[doc(hidden)]
 pub mod ops;
-#[doc(hidden)]
-#[macro_use]
 pub mod functions;
 
 #[doc(hidden)]
@@ -124,25 +122,8 @@
 ///   [`Timestamptz`]: ../pg/types/sql_types/struct.Timestamptz.html
 ///   [`ToSql`]: ../serialize/trait.ToSql.html
 ///
-/// ## Deriving
-///
-/// This trait can be automatically derived for any type which implements `ToSql`.
-/// The type must be annotated with `#[sql_type = "SomeType"]`.
-/// If that annotation appears multiple times,
-/// implementations will be generated for each one of them.
-///
-/// This will generate the following impls:
-///
-/// - `impl AsExpression<SqlType> for YourType`
-/// - `impl AsExpression<Nullable<SqlType>> for YourType`
-/// - `impl AsExpression<SqlType> for &'a YourType`
-/// - `impl AsExpression<Nullable<SqlType>> for &'a YourType`
-/// - `impl AsExpression<SqlType> for &'a &'b YourType`
-/// - `impl AsExpression<Nullable<SqlType>> for &'a &'b YourType`
-///
-/// If your type is unsized,
-/// you can specify this by adding the annotation `#[diesel(not_sized)]`.
-/// This will skip the impls for non-reference types.
+///  This trait could be [derived](derive.AsExpression.html)
+
 pub trait AsExpression<T> {
     /// The expression being returned
     type Expression: Expression<SqlType = T>;
@@ -151,6 +132,9 @@
     fn as_expression(self) -> Self::Expression;
 }
 
+#[doc(inline)]
+pub use diesel_derives::AsExpression;
+
 impl<T: Expression> AsExpression<T::SqlType> for T {
     type Expression = Self;
 
@@ -169,7 +153,6 @@
 /// # Example
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("../doctest_setup.rs");
 /// # use schema::users;
 /// #
@@ -260,36 +243,158 @@
 {
 }
 
-/// Marker trait to indicate that an expression does not include any aggregate
-/// functions.
+/// Is this expression valid for a given group by clause?
 ///
-/// Used to ensure that aggregate expressions aren't mixed with
-/// non-aggregate expressions in a select clause, and that they're never
-/// included in a where clause.
+/// Implementations of this trait must ensure that aggregate expressions are
+/// not mixed with non-aggregate expressions.
 ///
-/// ## Deriving
+/// For generic types, you can determine if your sub-expresssions can appear
+/// together using the [`MixedAggregates`] trait.
 ///
-/// This trait can be automatically derived for structs with no type parameters
-/// which are not aggregate, as well as for structs which are `NonAggregate`
-/// when all type parameters are `NonAggregate`. For example:
+/// `GroupByClause` will be a tuple containing the set of expressions appearing
+/// in the `GROUP BY` portion of the query. If there is no `GROUP BY`, it will
+/// be `()`.
 ///
-/// ```ignore
-/// #[derive(NonAggregate)]
-/// struct Plus<Lhs, Rhs>(Lhs, Rhs);
+/// This trait can be [derived]
 ///
-/// // The following impl will be generated:
-/// impl<Lhs, Rhs> NonAggregate for Plus<Lhs, Rhs>
-/// where
-///     Lhs: NonAggregate,
-///     Rhs: NonAggregate,
-/// {
-/// }
-/// ```
-pub trait NonAggregate {}
+/// [derived]: derive.ValidGrouping.html
+/// [`MixedAggregates`]: trait.MixedAggregates.html
+pub trait ValidGrouping<GroupByClause> {
+    /// Is this expression aggregate?
+    ///
+    /// This type should always be one of the structs in the [`is_aggregate`]
+    /// module. See the documentation of those structs for more details.
+    ///
+    /// [`is_aggregate`]: is_aggregate/index.html
+    type IsAggregate;
+}
 
-impl<T: NonAggregate + ?Sized> NonAggregate for Box<T> {}
+impl<T: ValidGrouping<GB> + ?Sized, GB> ValidGrouping<GB> for Box<T> {
+    type IsAggregate = T::IsAggregate;
+}
 
-impl<'a, T: NonAggregate + ?Sized> NonAggregate for &'a T {}
+impl<'a, T: ValidGrouping<GB> + ?Sized, GB> ValidGrouping<GB> for &'a T {
+    type IsAggregate = T::IsAggregate;
+}
+
+#[doc(inline)]
+pub use diesel_derives::ValidGrouping;
+
+/// Can two `IsAggregate` types appear in the same expression?
+///
+/// You should never implement this trait. It will eventually become a trait
+/// alias.
+///
+/// [`is_aggregate::Yes`] and [`is_aggregate::No`] can only appear with
+/// themselves or [`is_aggregate::Never`]. [`is_aggregate::Never`] can appear
+/// with anything.
+///
+/// [`is_aggregate::Yes`]: is_aggregate/struct.Yes.html
+/// [`is_aggregate::No`]: is_aggregate/struct.No.html
+/// [`is_aggregate::Never`]: is_aggregate/struct.Never.html
+pub trait MixedAggregates<Other> {
+    /// What is the resulting `IsAggregate` type?
+    type Output;
+}
+
+#[allow(missing_debug_implementations, missing_copy_implementations)]
+/// Possible values for `ValidGrouping::IsAggregate`
+pub mod is_aggregate {
+    use super::MixedAggregates;
+
+    /// Yes, this expression is aggregate for the given group by clause.
+    pub struct Yes;
+
+    /// No, this expression is not aggregate with the given group by clause,
+    /// but it might be aggregate with a different group by clause.
+    pub struct No;
+
+    /// This expression is never aggregate, and can appear with any other
+    /// expression, regardless of whether it is aggregate.
+    ///
+    /// Examples of this are literals. `1` does not care about aggregation.
+    /// `foo + 1` is always valid, regardless of whether `foo` appears in the
+    /// group by clause or not.
+    pub struct Never;
+
+    impl MixedAggregates<Yes> for Yes {
+        type Output = Yes;
+    }
+
+    impl MixedAggregates<Never> for Yes {
+        type Output = Yes;
+    }
+
+    impl MixedAggregates<No> for No {
+        type Output = No;
+    }
+
+    impl MixedAggregates<Never> for No {
+        type Output = No;
+    }
+
+    impl<T> MixedAggregates<T> for Never {
+        type Output = T;
+    }
+}
+
+// Note that these docs are similar to but slightly different than the stable
+// docs below. Make sure if you change these that you also change the docs
+// below.
+/// Trait alias to represent an expression that isn't aggregate by default.
+///
+/// This alias represents a type which is not aggregate if there is no group by
+/// clause. More specifically, it represents for types which implement
+/// [`ValidGrouping<()>`] where `IsAggregate` is [`is_aggregate::No`] or
+/// [`is_aggregate::Yes`].
+///
+/// While this trait is a useful stand-in for common cases, `T: NonAggregate`
+/// cannot always be used when `T: ValidGrouping<(), IsAggregate = No>` or
+/// `T: ValidGrouping<(), IsAggregate = Never>` could be. For that reason,
+/// unless you need to abstract over both columns and literals, you should
+/// prefer to use [`ValidGrouping<()>`] in your bounds instead.
+///
+/// [`ValidGrouping<()>`]: trait.ValidGrouping.html
+/// [`is_aggregate::Yes`]: is_aggregate/struct.Yes.html
+/// [`is_aggregate::No`]: is_aggregate/struct.No.html
+#[cfg(feature = "unstable")]
+pub trait NonAggregate = ValidGrouping<()>
+where
+    <Self as ValidGrouping<()>>::IsAggregate:
+        MixedAggregates<is_aggregate::No, Output = is_aggregate::No>;
+
+// Note that these docs are similar to but slightly different than the unstable
+// docs above. Make sure if you change these that you also change the docs
+// above.
+/// Trait alias to represent an expression that isn't aggregate by default.
+///
+/// This trait should never be implemented directly. It is replaced with a
+/// trait alias when the `unstable` feature is enabled.
+///
+/// This alias represents a type which is not aggregate if there is no group by
+/// clause. More specifically, it represents for types which implement
+/// [`ValidGrouping<()>`] where `IsAggregate` is [`is_aggregate::No`] or
+/// [`is_aggregate::Yes`].
+///
+/// While this trait is a useful stand-in for common cases, `T: NonAggregate`
+/// cannot always be used when `T: ValidGrouping<(), IsAggregate = No>` or
+/// `T: ValidGrouping<(), IsAggregate = Never>` could be. For that reason,
+/// unless you need to abstract over both columns and literals, you should
+/// prefer to use [`ValidGrouping<()>`] in your bounds instead.
+///
+/// [`ValidGrouping<()>`]: trait.ValidGrouping.html
+/// [`is_aggregate::Yes`]: is_aggregate/struct.Yes.html
+/// [`is_aggregate::No`]: is_aggregate/struct.No.html
+#[cfg(not(feature = "unstable"))]
+pub trait NonAggregate: ValidGrouping<()> {}
+
+#[cfg(not(feature = "unstable"))]
+impl<T> NonAggregate for T
+where
+    T: ValidGrouping<()>,
+    T::IsAggregate: MixedAggregates<is_aggregate::No, Output = is_aggregate::No>,
+{
+}
 
 use crate::query_builder::{QueryFragment, QueryId};
 
@@ -308,7 +413,6 @@
 /// # Examples
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("../doctest_setup.rs");
 /// # use schema::users;
 /// use diesel::sql_types::Bool;
@@ -352,7 +456,7 @@
     DB: Backend,
     Self: Expression,
     Self: SelectableExpression<QS>,
-    Self: NonAggregate,
+    Self: ValidGrouping<(), IsAggregate = is_aggregate::No>,
     Self: QueryFragment<DB>,
 {
 }
@@ -362,7 +466,7 @@
     DB: Backend,
     T: Expression,
     T: SelectableExpression<QS>,
-    T: NonAggregate,
+    T: ValidGrouping<(), IsAggregate = is_aggregate::No>,
     T: QueryFragment<DB>,
 {
 }
diff --git a/diesel/src/expression/not.rs b/diesel/src/expression/not.rs
index 1e6d905..cd46aae 100644
--- a/diesel/src/expression/not.rs
+++ b/diesel/src/expression/not.rs
@@ -8,7 +8,6 @@
 /// # Example
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("../doctest_setup.rs");
 /// #
 /// # fn main() {
diff --git a/diesel/src/expression/nullable.rs b/diesel/src/expression/nullable.rs
index a89fb19..70c400c 100644
--- a/diesel/src/expression/nullable.rs
+++ b/diesel/src/expression/nullable.rs
@@ -3,9 +3,9 @@
 use crate::query_builder::*;
 use crate::query_source::joins::ToInnerJoin;
 use crate::result::QueryResult;
-use crate::sql_types::IntoNullable;
+use crate::sql_types::{DieselNumericOps, IntoNullable};
 
-#[derive(Debug, Copy, Clone, DieselNumericOps, NonAggregate)]
+#[derive(Debug, Copy, Clone, DieselNumericOps, ValidGrouping)]
 pub struct Nullable<T>(T);
 
 impl<T> Nullable<T> {
diff --git a/diesel/src/expression/operators.rs b/diesel/src/expression/operators.rs
index dd9cb5b..b5d9116 100644
--- a/diesel/src/expression/operators.rs
+++ b/diesel/src/expression/operators.rs
@@ -1,4 +1,4 @@
-#[macro_export]
+#[macro_export(local_inner_macros)]
 #[doc(hidden)]
 macro_rules! __diesel_operator_body {
     (
@@ -61,7 +61,14 @@
         expression_ty_params = ($($expression_ty_params:ident,)*),
         expression_bounds = ($($expression_bounds:tt)*),
     ) => {
-        #[derive(Debug, Clone, Copy, QueryId, DieselNumericOps, NonAggregate)]
+        #[derive(
+            Debug,
+            Clone,
+            Copy,
+            $crate::query_builder::QueryId,
+            $crate::sql_types::DieselNumericOps,
+            $crate::expression::ValidGrouping
+        )]
         #[doc(hidden)]
         pub struct $name<$($ty_param,)+> {
             $(pub(crate) $field_name: $ty_param,)+
@@ -179,13 +186,12 @@
 /// ## Example usage
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("../doctest_setup.rs");
 /// #
 /// # fn main() {
 /// #     use schema::users::dsl::*;
 /// #     let connection = establish_connection();
-/// infix_operator!(MyEq, " = ");
+/// diesel::infix_operator!(MyEq, " = ");
 ///
 /// use diesel::expression::AsExpression;
 ///
@@ -202,7 +208,7 @@
 /// assert_eq!(Ok(1), users_with_name.first(&connection));
 /// # }
 /// ```
-#[macro_export]
+#[macro_export(local_inner_macros)]
 macro_rules! infix_operator {
     ($name:ident, $operator:expr) => {
         infix_operator!($name, $operator, $crate::sql_types::Bool);
@@ -239,7 +245,7 @@
     };
 }
 
-#[macro_export]
+#[macro_export(local_inner_macros)]
 #[deprecated(since = "2.0.0", note = "use `diesel::infix_operator!` instead")]
 #[cfg(feature = "with-deprecated")]
 #[doc(hidden)]
@@ -257,7 +263,7 @@
 /// the single argument. See [`infix_operator!`] for example usage.
 ///
 /// [`infix_operator!`]: macro.infix_operator.html
-#[macro_export]
+#[macro_export(local_inner_macros)]
 macro_rules! postfix_operator {
     ($name:ident, $operator:expr) => {
         postfix_operator!($name, $operator, $crate::sql_types::Bool);
@@ -294,7 +300,7 @@
     };
 }
 
-#[macro_export]
+#[macro_export(local_inner_macros)]
 #[deprecated(since = "2.0.0", note = "use `diesel::postfix_operator!` instead")]
 #[cfg(feature = "with-deprecated")]
 #[doc(hidden)]
@@ -312,7 +318,7 @@
 /// the single argument. See [`infix_operator!`] for example usage.
 ///
 /// [`infix_operator!`]: macro.infix_operator.html
-#[macro_export]
+#[macro_export(local_inner_macros)]
 macro_rules! prefix_operator {
     ($name:ident, $operator:expr) => {
         prefix_operator!($name, $operator, $crate::sql_types::Bool);
@@ -349,7 +355,7 @@
     };
 }
 
-#[macro_export]
+#[macro_export(local_inner_macros)]
 #[deprecated(since = "2.0.0", note = "use `diesel::prefix_operator!` instead")]
 #[cfg(feature = "with-deprecated")]
 #[doc(hidden)]
@@ -380,9 +386,11 @@
 
 prefix_operator!(Not, "NOT ");
 
+use crate::expression::ValidGrouping;
 use crate::insertable::{ColumnInsertValue, Insertable};
-use crate::query_builder::ValuesClause;
+use crate::query_builder::{QueryId, ValuesClause};
 use crate::query_source::Column;
+use crate::sql_types::DieselNumericOps;
 
 impl<T, U> Insertable<T::Table> for Eq<T, U>
 where
@@ -407,7 +415,7 @@
     }
 }
 
-#[derive(Debug, Clone, Copy, QueryId, DieselNumericOps, NonAggregate)]
+#[derive(Debug, Clone, Copy, QueryId, DieselNumericOps, ValidGrouping)]
 #[doc(hidden)]
 pub struct Concat<L, R> {
     pub(crate) left: L,
diff --git a/diesel/src/expression/ops/numeric.rs b/diesel/src/expression/ops/numeric.rs
index b13d920..8a7b797 100644
--- a/diesel/src/expression/ops/numeric.rs
+++ b/diesel/src/expression/ops/numeric.rs
@@ -1,12 +1,12 @@
 use crate::backend::Backend;
-use crate::expression::{Expression, NonAggregate};
+use crate::expression::{Expression, ValidGrouping};
 use crate::query_builder::*;
 use crate::result::QueryResult;
 use crate::sql_types;
 
 macro_rules! numeric_operation {
     ($name:ident, $op:expr) => {
-        #[derive(Debug, Copy, Clone, QueryId)]
+        #[derive(Debug, Copy, Clone, QueryId, ValidGrouping)]
         pub struct $name<Lhs, Rhs> {
             lhs: Lhs,
             rhs: Rhs,
@@ -47,15 +47,6 @@
         }
 
         impl_selectable_expression!($name<Lhs, Rhs>);
-
-        impl<Lhs, Rhs> NonAggregate for $name<Lhs, Rhs>
-        where
-            Lhs: NonAggregate,
-            Rhs: NonAggregate,
-            $name<Lhs, Rhs>: Expression,
-        {
-        }
-
         generic_numeric_expr!($name, A, B);
     };
 }
diff --git a/diesel/src/expression/sql_literal.rs b/diesel/src/expression/sql_literal.rs
index 9e2446a..bbc3dfe 100644
--- a/diesel/src/expression/sql_literal.rs
+++ b/diesel/src/expression/sql_literal.rs
@@ -5,6 +5,7 @@
 use crate::query_builder::*;
 use crate::query_dsl::RunQueryDsl;
 use crate::result::QueryResult;
+use crate::sql_types::DieselNumericOps;
 
 #[derive(Debug, Clone, DieselNumericOps)]
 #[must_use = "Queries are only executed when calling `load`, `get_result`, or similar."]
@@ -38,7 +39,6 @@
     /// # Examples
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # table! {
@@ -69,7 +69,6 @@
     /// ### Multiple Bind Params
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     ///
     /// # table! {
@@ -121,7 +120,6 @@
     /// # Examples
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     ///
     /// # table! {
@@ -187,7 +185,9 @@
 
 impl<QS, ST, T> AppearsOnTable<QS> for SqlLiteral<ST, T> {}
 
-impl<ST, T> NonAggregate for SqlLiteral<ST, T> {}
+impl<ST, T, GB> ValidGrouping<GB> for SqlLiteral<ST, T> {
+    type IsAggregate = is_aggregate::Never;
+}
 
 /// Use literal SQL in the query builder
 ///
@@ -208,7 +208,6 @@
 /// # Examples
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("../doctest_setup.rs");
 /// # fn main() {
 /// #     run_test().unwrap();
@@ -261,7 +260,6 @@
     /// # Examples
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     ///
     /// # table! {
@@ -322,7 +320,9 @@
     type SqlType = Q::SqlType;
 }
 
-impl<Query, Value> NonAggregate for UncheckedBind<Query, Value> {}
+impl<Query, Value, GB> ValidGrouping<GB> for UncheckedBind<Query, Value> {
+    type IsAggregate = is_aggregate::Never;
+}
 
 impl<QS, Query, Value> SelectableExpression<QS> for UncheckedBind<Query, Value> where
     Self: AppearsOnTable<QS>
diff --git a/diesel/src/expression/subselect.rs b/diesel/src/expression/subselect.rs
index f08b087..754d80d 100644
--- a/diesel/src/expression/subselect.rs
+++ b/diesel/src/expression/subselect.rs
@@ -47,7 +47,9 @@
 // FIXME: This probably isn't sound. The subselect can reference columns from
 // the outer query, and is affected by the `GROUP BY` clause of the outer query
 // identically to using it outside of a subselect
-impl<T, ST> NonAggregate for Subselect<T, ST> {}
+impl<T, ST, GB> ValidGrouping<GB> for Subselect<T, ST> {
+    type IsAggregate = is_aggregate::Never;
+}
 
 impl<T, ST, DB> QueryFragment<DB> for Subselect<T, ST>
 where
diff --git a/diesel/src/expression_methods/bool_expression_methods.rs b/diesel/src/expression_methods/bool_expression_methods.rs
index 5a4e6ed..663da37 100644
--- a/diesel/src/expression_methods/bool_expression_methods.rs
+++ b/diesel/src/expression_methods/bool_expression_methods.rs
@@ -10,7 +10,6 @@
     /// # Example
     ///
     /// ```
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -50,7 +49,6 @@
     /// # Example
     ///
     /// ```
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
diff --git a/diesel/src/expression_methods/escape_expression_methods.rs b/diesel/src/expression_methods/escape_expression_methods.rs
index a674530..d7914de 100644
--- a/diesel/src/expression_methods/escape_expression_methods.rs
+++ b/diesel/src/expression_methods/escape_expression_methods.rs
@@ -12,7 +12,6 @@
 /// # Example
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("../doctest_setup.rs");
 /// #
 /// # fn main() {
diff --git a/diesel/src/expression_methods/global_expression_methods.rs b/diesel/src/expression_methods/global_expression_methods.rs
index fd1582a..88f3070 100644
--- a/diesel/src/expression_methods/global_expression_methods.rs
+++ b/diesel/src/expression_methods/global_expression_methods.rs
@@ -10,7 +10,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -29,7 +28,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -53,7 +51,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -86,7 +83,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -117,7 +113,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -144,7 +139,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -171,7 +165,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -198,7 +191,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -225,7 +217,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -252,7 +243,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -279,7 +269,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -308,7 +297,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -344,7 +332,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -377,7 +364,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -412,7 +398,6 @@
     /// # Example
     /// ```no_run
     /// # #![allow(dead_code)]
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// # use diesel::sql_types::*;
     /// # use schema::users;
diff --git a/diesel/src/expression_methods/text_expression_methods.rs b/diesel/src/expression_methods/text_expression_methods.rs
index e6bf2d0..913690a 100644
--- a/diesel/src/expression_methods/text_expression_methods.rs
+++ b/diesel/src/expression_methods/text_expression_methods.rs
@@ -9,7 +9,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # table! {
@@ -69,7 +68,6 @@
     /// # Examples
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -102,7 +100,6 @@
     /// # Examples
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
diff --git a/diesel/src/insertable.rs b/diesel/src/insertable.rs
index 079a29b..3056234 100644
--- a/diesel/src/insertable.rs
+++ b/diesel/src/insertable.rs
@@ -14,18 +14,7 @@
 /// database. This is automatically implemented for `&[T]` and `&Vec<T>` for
 /// inserting more than one record.
 ///
-/// ### Deriving
-///
-/// This trait can be automatically derived by adding  `#[derive(Insertable)]`
-/// to your struct. Structs which derive this trait must also be annotated
-/// with `#[table_name = "some_table_name"]`. If the field name of your
-/// struct differs from the name of the column, you can annotate the field
-/// with `#[column_name = "some_column_name"]`.
-///
-/// Your struct can also contain fields which implement `Insertable`. This is
-/// useful when you want to have one field map to more than one column (for
-/// example, an enum that maps to a label and a value column). Add
-/// `#[diesel(embed)]` to any such fields.
+/// This trait can be [derived](derive.Insertable.html)
 pub trait Insertable<T> {
     /// The `VALUES` clause to insert these records
     ///
@@ -50,7 +39,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -86,6 +74,9 @@
     }
 }
 
+#[doc(inline)]
+pub use diesel_derives::Insertable;
+
 pub trait CanInsertInSingleQuery<DB: Backend> {
     /// How many rows will this query insert?
     ///
diff --git a/diesel/src/lib.rs b/diesel/src/lib.rs
index 9e97779..dbd9340 100644
--- a/diesel/src/lib.rs
+++ b/diesel/src/lib.rs
@@ -93,7 +93,7 @@
 //! You can come ask for help at
 //! [gitter.im/diesel-rs/diesel](https://gitter.im/diesel-rs/diesel)
 
-#![cfg_attr(feature = "unstable", feature(specialization))]
+#![cfg_attr(feature = "unstable", feature(specialization, trait_alias))]
 // Built-in Lints
 #![deny(warnings)]
 #![warn(
@@ -103,11 +103,12 @@
 )]
 // Clippy lints
 #![allow(
+    clippy::match_same_arms,
+    clippy::needless_doctest_main,
     clippy::option_map_unwrap_or_else,
     clippy::option_map_unwrap_or,
-    clippy::match_same_arms,
-    clippy::type_complexity,
-    clippy::redundant_field_names
+    clippy::redundant_field_names,
+    clippy::type_complexity
 )]
 #![cfg_attr(test, allow(clippy::option_map_unwrap_or, clippy::result_unwrap_used))]
 #![warn(
@@ -129,13 +130,11 @@
 #[macro_use]
 extern crate bitflags;
 extern crate byteorder;
-#[macro_use]
 extern crate diesel_derives;
-#[doc(hidden)]
-pub use diesel_derives::*;
 
 #[macro_use]
-mod macros;
+#[doc(hidden)]
+pub mod macros;
 
 #[cfg(test)]
 #[macro_use]
@@ -161,6 +160,7 @@
 pub mod r2d2;
 pub mod result;
 pub mod serialize;
+pub mod upsert;
 #[macro_use]
 pub mod sql_types;
 pub mod migration;
@@ -176,6 +176,11 @@
 mod type_impls;
 mod util;
 
+#[doc(hidden)]
+#[cfg(feature = "with-deprecated")]
+#[deprecated(since = "2.0.0", note = "Use explicit macro imports instead")]
+pub use diesel_derives::*;
+
 pub mod dsl {
     //! Includes various helper types and bare functions which are named too
     //! generically to be included in prelude, but are often used when using Diesel.
@@ -260,10 +265,18 @@
     pub type InnerJoin<Source, Rhs> =
         <Source as JoinWithImplicitOnClause<Rhs, joins::Inner>>::Output;
 
+    /// Represents the return type of `.inner_join(rhs.on(on))`
+    pub type InnerJoinOn<Source, Rhs, On> =
+        <Source as InternalJoinDsl<Rhs, joins::Inner, On>>::Output;
+
     /// Represents the return type of `.left_join(rhs)`
     pub type LeftJoin<Source, Rhs> =
         <Source as JoinWithImplicitOnClause<Rhs, joins::LeftOuter>>::Output;
 
+    /// Represents the return type of `.left_join(rhs.on(on))`
+    pub type LeftJoinOn<Source, Rhs, On> =
+        <Source as InternalJoinDsl<Rhs, joins::LeftOuter, On>>::Output;
+
     use super::associations::HasTable;
     use super::query_builder::{AsChangeset, IntoUpdateTarget, UpdateStatement};
     /// Represents the return type of `update(lhs).set(rhs)`
@@ -292,31 +305,46 @@
 
 pub mod prelude {
     //! Re-exports important traits and types. Meant to be glob imported when using Diesel.
-    pub use crate::associations::{GroupedBy, Identifiable};
+
+    #[doc(inline)]
+    pub use crate::associations::{Associations, GroupedBy, Identifiable};
+    #[doc(inline)]
     pub use crate::connection::Connection;
-    #[deprecated(
-        since = "1.1.0",
-        note = "Explicitly `use diesel::deserialize::Queryable"
-    )]
-    pub use crate::deserialize::Queryable;
+    #[doc(inline)]
+    pub use crate::deserialize::{Queryable, QueryableByName};
+    #[doc(inline)]
     pub use crate::expression::{
         AppearsOnTable, BoxableExpression, Expression, IntoSql, SelectableExpression,
     };
+
+    #[doc(inline)]
+    pub use crate::expression::functions::sql_function;
+
+    #[doc(inline)]
     pub use crate::expression_methods::*;
     #[doc(inline)]
     pub use crate::insertable::Insertable;
+    #[doc(inline)]
+    pub use crate::macros::prelude::*;
+    #[doc(inline)]
+    pub use crate::query_builder::AsChangeset;
     #[doc(hidden)]
     pub use crate::query_dsl::GroupByDsl;
+    #[doc(inline)]
     pub use crate::query_dsl::{BelongingToDsl, JoinOnDsl, QueryDsl, RunQueryDsl, SaveChangesDsl};
-
+    #[doc(inline)]
     pub use crate::query_source::{Column, JoinTo, QuerySource, Table};
+    #[doc(inline)]
     pub use crate::result::{ConnectionError, ConnectionResult, OptionalExtension, QueryResult};
 
     #[cfg(feature = "mysql")]
+    #[doc(inline)]
     pub use crate::mysql::MysqlConnection;
     #[cfg(feature = "postgres")]
+    #[doc(inline)]
     pub use crate::pg::PgConnection;
     #[cfg(feature = "sqlite")]
+    #[doc(inline)]
     pub use crate::sqlite::SqliteConnection;
 }
 
diff --git a/diesel/src/macros/internal.rs b/diesel/src/macros/internal.rs
index 80c5e22..f00eb43 100644
--- a/diesel/src/macros/internal.rs
+++ b/diesel/src/macros/internal.rs
@@ -4,7 +4,7 @@
 ///
 /// This macro is exported because we want to be able to call it from other
 /// macros that are exported, but it is not part of our public API.
-#[macro_export]
+#[macro_export(local_inner_macros)]
 #[doc(hidden)]
 macro_rules! impl_selectable_expression {
     ($struct_name:ident) => {
diff --git a/diesel/src/macros/mod.rs b/diesel/src/macros/mod.rs
index 375b81c1..652376f 100644
--- a/diesel/src/macros/mod.rs
+++ b/diesel/src/macros/mod.rs
@@ -1,6 +1,15 @@
 #![allow(unused_parens)] // FIXME: Remove this attribute once false positive is resolved.
 #![cfg_attr(rustfmt, rustfmt_skip)] // https://github.com/rust-lang-nursery/rustfmt/issues/2755
 
+pub(crate) mod prelude {
+    #[doc(inline)]
+    pub use crate::{
+        allow_tables_to_appear_in_same_query,
+        joinable,
+        table,
+    };
+}
+
 #[macro_export]
 #[doc(hidden)]
 macro_rules! __diesel_column {
@@ -13,7 +22,7 @@
     ) => {
         $($meta)*
         #[allow(non_camel_case_types, dead_code)]
-        #[derive(Debug, Clone, Copy, QueryId, Default)]
+        #[derive(Debug, Clone, Copy, $crate::query_builder::QueryId, Default)]
         pub struct $column_name;
 
         impl $crate::expression::Expression for $column_name {
@@ -76,7 +85,13 @@
         {
         }
 
-        impl $crate::expression::NonAggregate for $column_name {}
+        impl $crate::expression::ValidGrouping<()> for $column_name {
+            type IsAggregate = $crate::expression::is_aggregate::No;
+        }
+
+        impl $crate::expression::ValidGrouping<$column_name> for $column_name {
+            type IsAggregate = $crate::expression::is_aggregate::Yes;
+        }
 
         impl $crate::query_source::Column for $column_name {
             type Table = $table;
@@ -95,8 +110,8 @@
             }
         }
 
-        __diesel_generate_ops_impls_if_numeric!($column_name, $($Type)*);
-        __diesel_generate_ops_impls_if_date_time!($column_name, $($Type)*);
+        $crate::__diesel_generate_ops_impls_if_numeric!($column_name, $($Type)*);
+        $crate::__diesel_generate_ops_impls_if_date_time!($column_name, $($Type)*);
     }
 }
 
@@ -118,38 +133,33 @@
 /// -------------
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
-/// table! {
+/// diesel::table! {
 ///     users {
 ///         id -> Integer,
 ///         name -> VarChar,
 ///         favorite_color -> Nullable<VarChar>,
 ///     }
 /// }
-/// # fn main() {}
 /// ```
 ///
 /// You may also specify a primary key if it's called something other than `id`.
 /// Tables with no primary key are not supported.
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
-/// table! {
+/// diesel::table! {
 ///     users (non_standard_primary_key) {
 ///         non_standard_primary_key -> Integer,
 ///         name -> VarChar,
 ///         favorite_color -> Nullable<VarChar>,
 ///     }
 /// }
-/// # fn main() {}
 /// ```
 ///
 /// For tables with composite primary keys, list all of the columns in the
 /// primary key.
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
-/// table! {
+/// diesel::table! {
 ///     followings (user_id, post_id) {
 ///         user_id -> Integer,
 ///         post_id -> Integer,
@@ -157,7 +167,7 @@
 ///     }
 /// }
 /// # fn main() {
-/// #     use diesel::prelude::*;
+/// #     use diesel::prelude::Table;
 /// #     use self::followings::dsl::*;
 /// #     // Poor man's assert_eq! -- since this is type level this would fail
 /// #     // to compile if the wrong primary key were generated
@@ -169,12 +179,11 @@
 /// which types to import.
 ///
 /// ```
-/// # #[macro_use] extern crate diesel;
 /// # mod diesel_full_text_search {
 /// #     pub struct TsVector;
 /// # }
 ///
-/// table! {
+/// diesel::table! {
 ///     use diesel::sql_types::*;
 /// #    use crate::diesel_full_text_search::*;
 /// # /*
@@ -194,10 +203,7 @@
 /// following syntax:
 ///
 /// ```
-/// #[macro_use] extern crate diesel;
-///
-/// table! {
-///
+/// diesel::table! {
 ///     /// The table containing all blog posts
 ///     posts {
 ///         /// The post's unique id
@@ -206,16 +212,13 @@
 ///         title -> Text,
 ///     }
 /// }
-/// # fn main() {}
 /// ```
 ///
 /// If you have a column with the same name as a Rust reserved keyword, you can use
 /// the `sql_name` attribute like this:
 ///
 /// ```
-/// #[macro_use] extern crate diesel;
-///
-/// table! {
+/// diesel::table! {
 ///     posts {
 ///         id -> Integer,
 ///         /// This column is named `mytype` but references the table `type` column.
@@ -223,7 +226,6 @@
 ///         mytype -> Text,
 ///     }
 /// }
-/// # fn main() {}
 /// ```
 ///
 /// This module will also contain several helper types:
@@ -269,7 +271,7 @@
 /// ```ignore
 /// pub type BoxedQuery<'a, DB, ST = SqlType> = BoxedSelectStatement<'a, ST, table, DB>;
 /// ```
-#[macro_export]
+#[macro_export(local_inner_macros)]
 macro_rules! table {
     ($($tokens:tt)*) => {
         __diesel_parse_table! {
@@ -279,7 +281,7 @@
             sql_name = unknown,
             name = unknown,
             schema = public,
-            primary_key = (id),
+            primary_key = id,
         }
     }
 }
@@ -304,7 +306,7 @@
         imports = [$($imports:tt)*],
         $($args:tt)*
     ) => {
-        __diesel_parse_table! {
+        $crate::__diesel_parse_table! {
             tokens = [$($rest)*],
             imports = [$($imports)* use $($import)::+;],
             $($args)*
@@ -319,7 +321,7 @@
         sql_name = $ignore:tt,
         $($args:tt)*
     ) => {
-        __diesel_parse_table! {
+        $crate::__diesel_parse_table! {
             tokens = [$($rest)*],
             imports = $imports,
             meta = $meta,
@@ -335,7 +337,7 @@
         meta = [$($meta:tt)*],
         $($args:tt)*
     ) => {
-        __diesel_parse_table! {
+        $crate::__diesel_parse_table! {
             tokens = [$($rest)*],
             imports = $imports,
             meta = [$($meta)* #$new_meta],
@@ -353,7 +355,7 @@
         schema = $ignore:tt,
         $($args:tt)*
     ) => {
-        __diesel_parse_table! {
+        $crate::__diesel_parse_table! {
             tokens = [$($rest)*],
             imports = $imports,
             meta = $meta,
@@ -373,7 +375,7 @@
         name = $ignore:tt,
         $($args:tt)*
     ) => {
-        __diesel_parse_table! {
+        $crate::__diesel_parse_table! {
             tokens = [$($rest)*],
             imports = $imports,
             meta = $meta,
@@ -394,7 +396,7 @@
         primary_key = $ignore:tt,
         $($args:tt)*
     ) => {
-        __diesel_parse_table! {
+        $crate::__diesel_parse_table! {
             tokens = [$($rest)*],
             imports = $imports,
             meta = $meta,
@@ -412,7 +414,7 @@
         imports = [],
         $($args:tt)*
     ) => {
-        __diesel_parse_table! {
+        $crate::__diesel_parse_table! {
             tokens = [{$($columns)*}],
             imports = [use $crate::sql_types::*;],
             $($args)*
@@ -428,7 +430,7 @@
         name = $name:tt,
         $($args:tt)*
     ) => {
-        __diesel_parse_table! {
+        $crate::__diesel_parse_table! {
             tokens = [{$($columns)*}],
             imports = $imports,
             meta = $meta,
@@ -443,7 +445,7 @@
         tokens = [{$($columns:tt)*}],
         $($args:tt)*
     ) => {
-        __diesel_parse_columns! {
+        $crate::__diesel_parse_columns! {
             tokens = [$($columns)*],
             table = { $($args)* },
             columns = [],
@@ -452,7 +454,7 @@
 
     // Invalid syntax
     ($($tokens:tt)*) => {
-        __diesel_invalid_table_syntax!();
+        $crate::__diesel_invalid_table_syntax!();
     }
 }
 
@@ -469,7 +471,7 @@
         ],
         $($args:tt)*
     ) => {
-        __diesel_parse_columns! {
+        $crate::__diesel_parse_columns! {
             current_column = {
                 unchecked_meta = [$(#$meta)*],
                 name = $name,
@@ -491,7 +493,7 @@
         ],
         $($args:tt)*
     ) => {
-        __diesel_parse_columns! {
+        $crate::__diesel_parse_columns! {
             current_column = {
                 unchecked_meta = [$(#$meta)*],
                 name = $name,
@@ -515,7 +517,7 @@
         },
         $($args:tt)*
     ) => {
-        __diesel_parse_columns! {
+        $crate::__diesel_parse_columns! {
             current_column = {
                 unchecked_meta = [$($meta)*],
                 name = $name,
@@ -538,7 +540,7 @@
         },
         $($args:tt)*
     ) => {
-        __diesel_parse_columns! {
+        $crate::__diesel_parse_columns! {
             current_column = {
                 unchecked_meta = [$($unchecked_meta)*],
                 name = $name,
@@ -562,7 +564,7 @@
         columns = [$($columns:tt,)*],
         $($args:tt)*
     ) => {
-        __diesel_parse_columns! {
+        $crate::__diesel_parse_columns! {
             tokens = $tokens,
             table = $table,
             columns = [$($columns,)* { $($current_column)* },],
@@ -575,12 +577,12 @@
         tokens = [],
         $($args:tt)*
     ) => {
-        __diesel_table_impl!($($args)*);
+        $crate::__diesel_table_impl!($($args)*);
     };
 
     // Invalid syntax
     ($($tokens:tt)*) => {
-        __diesel_invalid_table_syntax!();
+        $crate::__diesel_invalid_table_syntax!();
     }
 }
 
@@ -624,7 +626,7 @@
             /// table struct renamed to the module name. This is meant to be
             /// glob imported for functions which only deal with one table.
             pub mod dsl {
-                $(static_cond! {
+                $($crate::static_cond! {
                     if $table_name == $column_name {
                         compile_error!(concat!(
                             "Column `",
@@ -649,7 +651,7 @@
             pub const all_columns: ($($column_name,)+) = ($($column_name,)+);
 
             #[allow(non_camel_case_types)]
-            #[derive(Debug, Clone, Copy, QueryId)]
+            #[derive(Debug, Clone, Copy, $crate::query_builder::QueryId)]
             /// The actual table struct
             ///
             /// This is the type which provides the base methods of the query
@@ -672,7 +674,7 @@
             /// Helper type for representing a boxed query from this table
             pub type BoxedQuery<'a, DB, ST = SqlType> = BoxedSelectStatement<'a, ST, table, DB>;
 
-            __diesel_table_query_source_impl!(table, $schema, $sql_name);
+            $crate::__diesel_table_query_source_impl!(table, $schema, $sql_name);
 
             impl AsQuery for table {
                 type SqlType = SqlType;
@@ -803,13 +805,20 @@
                 $($imports)*
 
                 #[allow(non_camel_case_types, dead_code)]
-                #[derive(Debug, Clone, Copy)]
+                #[derive(Debug, Clone, Copy, $crate::query_builder::QueryId)]
                 /// Represents `table_name.*`, which is sometimes needed for
                 /// efficient count queries. It cannot be used in place of
                 /// `all_columns`, and has a `SqlType` of `()` to prevent it
                 /// being used that way
                 pub struct star;
 
+                impl<__GB> $crate::expression::ValidGrouping<__GB> for star
+                where
+                    ($($column_name,)+): $crate::expression::ValidGrouping<__GB>,
+                {
+                    type IsAggregate = <($($column_name,)+) as $crate::expression::ValidGrouping<__GB>>::IsAggregate;
+                }
+
                 impl Expression for star {
                     type SqlType = ();
                 }
@@ -831,7 +840,7 @@
                 impl AppearsOnTable<table> for star {
                 }
 
-                $(__diesel_column! {
+                $($crate::__diesel_column! {
                     table = table,
                     name = $column_name,
                     sql_name = $column_sql_name,
@@ -906,7 +915,6 @@
 /// # Example
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("../doctest_setup.rs");
 /// use schema::*;
 ///
@@ -949,7 +957,7 @@
 /// ```sql
 /// post JOIN users ON posts.user_id = users.id
 /// ```
-#[macro_export]
+#[macro_export(local_inner_macros)]
 macro_rules! joinable {
     ($($child:ident)::* -> $($parent:ident)::* ($source:ident)) => {
         joinable_inner!($($child)::* ::table => $($parent)::* ::table : ($($child)::* ::$source = $($parent)::* ::table));
@@ -957,7 +965,7 @@
     }
 }
 
-#[macro_export]
+#[macro_export(local_inner_macros)]
 #[doc(hidden)]
 macro_rules! joinable_inner {
     ($left_table:path => $right_table:path : ($foreign_key:path = $parent_table:path)) => {
@@ -1026,7 +1034,7 @@
 /// allow_tables_to_appear_in_same_query!(comments, users);
 /// allow_tables_to_appear_in_same_query!(posts, users);
 /// ```
-#[macro_export]
+#[macro_export(local_inner_macros)]
 macro_rules! allow_tables_to_appear_in_same_query {
     ($left_mod:ident, $($right_mod:ident),+ $(,)*) => {
         $(
diff --git a/diesel/src/macros/ops.rs b/diesel/src/macros/ops.rs
index e150898..63292c3 100644
--- a/diesel/src/macros/ops.rs
+++ b/diesel/src/macros/ops.rs
@@ -21,7 +21,7 @@
     };
 }
 
-#[macro_export]
+#[macro_export(local_inner_macros)]
 /// Indicates that an expression allows all numeric operators. If you create new
 /// SQL functions that return a numeric type, you should invoke this macro that
 /// type. Unfortunately, Rust disallows us from automatically implementing `Add`
@@ -35,10 +35,12 @@
     };
 }
 
-#[macro_export]
+#[macro_export(local_inner_macros)]
 #[doc(hidden)]
 macro_rules! __diesel_generate_ops_impls_if_numeric {
     ($column_name:ident, Nullable<$($inner:tt)::*>) => { __diesel_generate_ops_impls_if_numeric!($column_name, $($inner)::*); };
+    
+    ($column_name:ident, Unsigned<$($inner:tt)::*>) => { __diesel_generate_ops_impls_if_numeric!($column_name, $($inner)::*); };
 
     ($column_name:ident, SmallInt) => { numeric_expr!($column_name); };
     ($column_name:ident, Int2) => { numeric_expr!($column_name); };
@@ -65,7 +67,7 @@
     ($column_name:ident, $non_numeric_type:ty) => {};
 }
 
-#[macro_export]
+#[macro_export(local_inner_macros)]
 #[doc(hidden)]
 macro_rules! date_time_expr {
     ($tpe:ty) => {
@@ -74,7 +76,7 @@
     };
 }
 
-#[macro_export]
+#[macro_export(local_inner_macros)]
 #[doc(hidden)]
 macro_rules! __diesel_generate_ops_impls_if_date_time {
     ($column_name:ident, Nullable<$($inner:tt)::*>) => { __diesel_generate_ops_impls_if_date_time!($column_name, $($inner)::*); };
diff --git a/diesel/src/macros/static_cond.rs b/diesel/src/macros/static_cond.rs
index c6279f6..d59c78e 100644
--- a/diesel/src/macros/static_cond.rs
+++ b/diesel/src/macros/static_cond.rs
@@ -21,18 +21,18 @@
 
     // no else condition provided: fall through with empty else
     (if $lhs:tt == $rhs:tt $then:tt) => {
-        static_cond!(if $lhs == $rhs $then else { });
+        $crate::static_cond!(if $lhs == $rhs $then else { });
     };
     (if $lhs:tt != $rhs:tt $then:tt) => {
-        static_cond!(if $lhs != $rhs $then else { });
+        $crate::static_cond!(if $lhs != $rhs $then else { });
     };
 
     // we evaluate a conditional by generating a new macro (in an inner scope, so name shadowing is
     // not a big concern) and calling it
     (if $lhs:tt == $rhs:tt $then:tt else $els:tt) => {
-        static_cond!(@go $lhs $rhs $then $els);
+        $crate::static_cond!(@go $lhs $rhs $then $els);
     };
     (if $lhs:tt != $rhs:tt $then:tt else $els:tt) => {
-        static_cond!(@go $lhs $rhs $els $then);
+        $crate::static_cond!(@go $lhs $rhs $els $then);
     };
 }
diff --git a/diesel/src/migration/errors.rs b/diesel/src/migration/errors.rs
index 262185d..c0983a7 100644
--- a/diesel/src/migration/errors.rs
+++ b/diesel/src/migration/errors.rs
@@ -11,9 +11,10 @@
 
 /// Errors that occur while preparing to run migrations
 #[derive(Debug)]
+#[non_exhaustive]
 pub enum MigrationError {
     /// The migration directory wasn't found
-    MigrationDirectoryNotFound,
+    MigrationDirectoryNotFound(PathBuf),
     /// Provided migration was in an unknown format
     UnknownMigrationFormat(PathBuf),
     /// General system IO error
@@ -22,9 +23,6 @@
     UnknownMigrationVersion(String),
     /// No migrations had to be/ could be run
     NoMigrationRun,
-    ///
-    #[doc(hidden)]
-    __NonExhaustive,
 }
 
 impl Error for MigrationError {}
@@ -32,14 +30,16 @@
 impl fmt::Display for MigrationError {
     fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
         match *self {
-            MigrationError::MigrationDirectoryNotFound => write!(
+            MigrationError::MigrationDirectoryNotFound(ref p) => write!(
                 f,
-                "Unable to find migrations directory in this directory or any parent directories."
+                "Unable to find migrations directory in {:?} or any parent directories.",
+                p
             ),
-            MigrationError::UnknownMigrationFormat(_) => {
-                write!(f,"Invalid migration directory, the directory's name should be \
-                 <timestamp>_<name_of_migration>, and it should only contain up.sql and down.sql.")
-            }
+            MigrationError::UnknownMigrationFormat(_) => write!(
+                f,
+                "Invalid migration directory, the directory's name should be \
+                 <timestamp>_<name_of_migration>, and it should only contain up.sql and down.sql."
+            ),
             MigrationError::IoError(ref error) => write!(f, "{}", error),
             MigrationError::UnknownMigrationVersion(_) => write!(
                 f,
@@ -49,7 +49,6 @@
                 f,
                 "No migrations have been run. Did you forget `diesel migration run`?"
             ),
-            MigrationError::__NonExhaustive => unreachable!(),
         }
     }
 }
@@ -58,8 +57,8 @@
     fn eq(&self, other: &Self) -> bool {
         match (self, other) {
             (
-                &MigrationError::MigrationDirectoryNotFound,
-                &MigrationError::MigrationDirectoryNotFound,
+                &MigrationError::MigrationDirectoryNotFound(_),
+                &MigrationError::MigrationDirectoryNotFound(_),
             ) => true,
             (
                 &MigrationError::UnknownMigrationFormat(ref p1),
@@ -79,6 +78,7 @@
 /// Errors that occur while running migrations
 #[derive(Debug, PartialEq)]
 #[allow(clippy::enum_variant_names)]
+#[non_exhaustive]
 pub enum RunMigrationsError {
     /// A general migration error occured
     MigrationError(MigrationError),
@@ -86,9 +86,6 @@
     QueryError(result::Error),
     /// The provided migration was empty
     EmptyMigration,
-    ///
-    #[doc(hidden)]
-    __NonExhaustive,
 }
 
 impl Error for RunMigrationsError {}
@@ -101,7 +98,6 @@
             RunMigrationsError::EmptyMigration => {
                 write!(f, "Failed with: Attempted to run an empty migration.")
             }
-            RunMigrationsError::__NonExhaustive => unreachable!(),
         }
     }
 }
diff --git a/diesel/src/migration/mod.rs b/diesel/src/migration/mod.rs
index e679d03..f567055 100644
--- a/diesel/src/migration/mod.rs
+++ b/diesel/src/migration/mod.rs
@@ -66,15 +66,13 @@
     /// Setup the following table:
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
-    /// table! {
+    /// diesel::table! {
     ///      __diesel_schema_migrations(version) {
     ///          version -> Text,
     ///          /// defaults to `CURRENT_TIMESTAMP`
     ///          run_on -> Timestamp,
     ///      }
     /// }
-    /// # fn main() {}
     /// ```
     fn setup(&self) -> QueryResult<usize>;
 }
diff --git a/diesel/src/mysql/backend.rs b/diesel/src/mysql/backend.rs
index c7e66f4..a8ceb33 100644
--- a/diesel/src/mysql/backend.rs
+++ b/diesel/src/mysql/backend.rs
@@ -36,6 +36,7 @@
 /// The null variant is omitted, as we will never prepare a statement in which
 /// one of the bind parameters can always be NULL
 #[derive(Debug, Hash, PartialEq, Eq, Clone, Copy)]
+#[non_exhaustive]
 pub enum MysqlType {
     /// Sets `buffer_type` to `MYSQL_TYPE_TINY`
     Tiny,
diff --git a/diesel/src/mysql/connection/raw.rs b/diesel/src/mysql/connection/raw.rs
index 27efc5d..be771ce 100644
--- a/diesel/src/mysql/connection/raw.rs
+++ b/diesel/src/mysql/connection/raw.rs
@@ -45,6 +45,7 @@
         let password = connection_options.password();
         let database = connection_options.database();
         let port = connection_options.port();
+        let unix_socket = connection_options.unix_socket();
 
         unsafe {
             // Make sure you don't use the fake one!
@@ -59,7 +60,9 @@
                     .map(CStr::as_ptr)
                     .unwrap_or_else(|| ptr::null_mut()),
                 u32::from(port.unwrap_or(0)),
-                ptr::null_mut(),
+                unix_socket
+                    .map(CStr::as_ptr)
+                    .unwrap_or_else(|| ptr::null_mut()),
                 0,
             )
         };
diff --git a/diesel/src/mysql/connection/stmt/mod.rs b/diesel/src/mysql/connection/stmt/mod.rs
index d1a5e40..6524a43 100644
--- a/diesel/src/mysql/connection/stmt/mod.rs
+++ b/diesel/src/mysql/connection/stmt/mod.rs
@@ -141,12 +141,14 @@
     fn last_error_type(&self) -> DatabaseErrorKind {
         let last_error_number = unsafe { ffi::mysql_stmt_errno(self.stmt.as_ptr()) };
         // These values are not exposed by the C API, but are documented
-        // at https://dev.mysql.com/doc/refman/5.7/en/error-messages-server.html
+        // at https://dev.mysql.com/doc/refman/8.0/en/server-error-reference.html
         // and are from the ANSI SQLSTATE standard
         match last_error_number {
             1062 | 1586 | 1859 => DatabaseErrorKind::UniqueViolation,
             1216 | 1217 | 1451 | 1452 | 1830 | 1834 => DatabaseErrorKind::ForeignKeyViolation,
             1792 => DatabaseErrorKind::ReadOnlyTransaction,
+            1048 | 1364 => DatabaseErrorKind::NotNullViolation,
+            3819 => DatabaseErrorKind::CheckViolation,
             _ => DatabaseErrorKind::__Unknown,
         }
     }
diff --git a/diesel/src/mysql/connection/url.rs b/diesel/src/mysql/connection/url.rs
index 99d1929..5fc8462 100644
--- a/diesel/src/mysql/connection/url.rs
+++ b/diesel/src/mysql/connection/url.rs
@@ -3,6 +3,7 @@
 
 use self::percent_encoding::percent_decode;
 use self::url::{Host, Url};
+use std::collections::HashMap;
 use std::ffi::{CStr, CString};
 
 use crate::result::{ConnectionError, ConnectionResult};
@@ -13,6 +14,7 @@
     password: Option<CString>,
     database: Option<CString>,
     port: Option<u16>,
+    unix_socket: Option<CString>,
 }
 
 impl ConnectionOptions {
@@ -30,8 +32,19 @@
             return Err(connection_url_error());
         }
 
+        let query_pairs = url.query_pairs().into_owned().collect::<HashMap<_, _>>();
+        if query_pairs.get("database").is_some() {
+            return Err(connection_url_error());
+        }
+
+        let unix_socket = match query_pairs.get("unix_socket") {
+            Some(v) => Some(CString::new(v.as_bytes())?),
+            _ => None,
+        };
+
         let host = match url.host() {
             Some(Host::Ipv6(host)) => Some(CString::new(host.to_string())?),
+            Some(host) if host.to_string() == "localhost" && unix_socket != None => None,
             Some(host) => Some(CString::new(host.to_string())?),
             None => None,
         };
@@ -40,7 +53,8 @@
             Some(password) => Some(decode_into_cstring(password)?),
             None => None,
         };
-        let database = match url.path_segments().and_then(|mut iter| iter.nth(0)) {
+
+        let database = match url.path_segments().and_then(|mut iter| iter.next()) {
             Some("") | None => None,
             Some(segment) => Some(CString::new(segment.as_bytes())?),
         };
@@ -51,6 +65,7 @@
             password: password,
             database: database,
             port: url.port(),
+            unix_socket: unix_socket,
         })
     }
 
@@ -73,6 +88,10 @@
     pub fn port(&self) -> Option<u16> {
         self.port
     }
+
+    pub fn unix_socket(&self) -> Option<&CStr> {
+        self.unix_socket.as_ref().map(|x| &**x)
+    }
 }
 
 fn decode_into_cstring(s: &str) -> ConnectionResult<CString> {
@@ -84,7 +103,7 @@
 
 fn connection_url_error() -> ConnectionError {
     let msg = "MySQL connection URLs must be in the form \
-               `mysql://[[user]:[password]@]host[:port][/database]`";
+               `mysql://[[user]:[password]@]host[:port][/database][?unix_socket=socket-path]`";
     ConnectionError::InvalidConnectionUrl(msg.into())
 }
 
@@ -94,6 +113,7 @@
     assert!(ConnectionOptions::parse("http://localhost").is_err());
     assert!(ConnectionOptions::parse("file:///tmp/mysql.sock").is_err());
     assert!(ConnectionOptions::parse("socket:///tmp/mysql.sock").is_err());
+    assert!(ConnectionOptions::parse("mysql://localhost?database=somedb").is_err());
     assert!(ConnectionOptions::parse("mysql://localhost").is_ok());
 }
 
@@ -186,3 +206,24 @@
             .host()
     );
 }
+
+#[test]
+fn unix_socket_tests() {
+    let unix_socket = "/var/run/mysqld.sock";
+    let username = "foo";
+    let password = "bar";
+    let db_url = format!(
+        "mysql://{}:{}@localhost?unix_socket={}",
+        username, password, unix_socket
+    );
+    let conn_opts = ConnectionOptions::parse(db_url.as_str()).unwrap();
+    let cstring = |s| CString::new(s).unwrap();
+    assert_eq!(None, conn_opts.host);
+    assert_eq!(None, conn_opts.port);
+    assert_eq!(cstring(username), conn_opts.user);
+    assert_eq!(cstring(password), conn_opts.password.unwrap());
+    assert_eq!(
+        CString::new(unix_socket).unwrap(),
+        conn_opts.unix_socket.unwrap()
+    );
+}
diff --git a/diesel/src/mysql/query_builder/limit_offset.rs b/diesel/src/mysql/query_builder/limit_offset.rs
new file mode 100644
index 0000000..2ddb0ae
--- /dev/null
+++ b/diesel/src/mysql/query_builder/limit_offset.rs
@@ -0,0 +1,104 @@
+use crate::mysql::Mysql;
+use crate::query_builder::limit_clause::{LimitClause, NoLimitClause};
+use crate::query_builder::limit_offset_clause::{BoxedLimitOffsetClause, LimitOffsetClause};
+use crate::query_builder::offset_clause::{NoOffsetClause, OffsetClause};
+use crate::query_builder::{AstPass, IntoBoxedClause, QueryFragment};
+use crate::result::QueryResult;
+
+impl QueryFragment<Mysql> for LimitOffsetClause<NoLimitClause, NoOffsetClause> {
+    fn walk_ast(&self, _out: AstPass<Mysql>) -> QueryResult<()> {
+        Ok(())
+    }
+}
+
+impl<L> QueryFragment<Mysql> for LimitOffsetClause<LimitClause<L>, NoOffsetClause>
+where
+    LimitClause<L>: QueryFragment<Mysql>,
+{
+    fn walk_ast(&self, out: AstPass<Mysql>) -> QueryResult<()> {
+        self.limit_clause.walk_ast(out)?;
+        Ok(())
+    }
+}
+
+impl<L, O> QueryFragment<Mysql> for LimitOffsetClause<LimitClause<L>, OffsetClause<O>>
+where
+    LimitClause<L>: QueryFragment<Mysql>,
+    OffsetClause<O>: QueryFragment<Mysql>,
+{
+    fn walk_ast(&self, mut out: AstPass<Mysql>) -> QueryResult<()> {
+        self.limit_clause.walk_ast(out.reborrow())?;
+        self.offset_clause.walk_ast(out.reborrow())?;
+        Ok(())
+    }
+}
+
+impl<'a> QueryFragment<Mysql> for BoxedLimitOffsetClause<'a, Mysql> {
+    fn walk_ast(&self, mut out: AstPass<Mysql>) -> QueryResult<()> {
+        match (self.limit.as_ref(), self.offset.as_ref()) {
+            (Some(limit), Some(offset)) => {
+                limit.walk_ast(out.reborrow())?;
+                offset.walk_ast(out.reborrow())?;
+            }
+            (Some(limit), None) => {
+                limit.walk_ast(out.reborrow())?;
+            }
+            (None, Some(offset)) => {
+                // Mysql requires a limit clause in front of any offset clause
+                // The documentation proposes the following:
+                // > To retrieve all rows from a certain offset up to the end of the
+                // > result set, you can use some large number for the second parameter.
+                // https://dev.mysql.com/doc/refman/8.0/en/select.html
+                // Therefore we just use u64::MAX as limit here
+                // That does not result in any limitations because mysql only supports
+                // up to 64TB of data per table. Assuming 1 bit per row this means
+                // 1024 * 1024 * 1024 * 1024 * 8 = 562.949.953.421.312 rows which is smaller
+                // than 2^64 = 18.446.744.073.709.551.615
+                out.push_sql(" LIMIT 18446744073709551615 ");
+                offset.walk_ast(out.reborrow())?;
+            }
+            (None, None) => {}
+        }
+        Ok(())
+    }
+}
+
+impl<'a> IntoBoxedClause<'a, Mysql> for LimitOffsetClause<NoLimitClause, NoOffsetClause> {
+    type BoxedClause = BoxedLimitOffsetClause<'a, Mysql>;
+
+    fn into_boxed(self) -> Self::BoxedClause {
+        BoxedLimitOffsetClause {
+            limit: None,
+            offset: None,
+        }
+    }
+}
+
+impl<'a, L> IntoBoxedClause<'a, Mysql> for LimitOffsetClause<LimitClause<L>, NoOffsetClause>
+where
+    L: QueryFragment<Mysql> + Send + 'a,
+{
+    type BoxedClause = BoxedLimitOffsetClause<'a, Mysql>;
+
+    fn into_boxed(self) -> Self::BoxedClause {
+        BoxedLimitOffsetClause {
+            limit: Some(Box::new(self.limit_clause)),
+            offset: None,
+        }
+    }
+}
+
+impl<'a, L, O> IntoBoxedClause<'a, Mysql> for LimitOffsetClause<LimitClause<L>, OffsetClause<O>>
+where
+    L: QueryFragment<Mysql> + Send + 'a,
+    O: QueryFragment<Mysql> + Send + 'a,
+{
+    type BoxedClause = BoxedLimitOffsetClause<'a, Mysql>;
+
+    fn into_boxed(self) -> Self::BoxedClause {
+        BoxedLimitOffsetClause {
+            limit: Some(Box::new(self.limit_clause)),
+            offset: Some(Box::new(self.offset_clause)),
+        }
+    }
+}
diff --git a/diesel/src/mysql/query_builder/mod.rs b/diesel/src/mysql/query_builder/mod.rs
index cd20c86..37d73cc 100644
--- a/diesel/src/mysql/query_builder/mod.rs
+++ b/diesel/src/mysql/query_builder/mod.rs
@@ -2,6 +2,7 @@
 use crate::query_builder::QueryBuilder;
 use crate::result::QueryResult;
 
+mod limit_offset;
 mod query_fragment_impls;
 
 /// The MySQL query builder
diff --git a/diesel/src/mysql/types/mod.rs b/diesel/src/mysql/types/mod.rs
index 48e1f70..de593a1 100644
--- a/diesel/src/mysql/types/mod.rs
+++ b/diesel/src/mysql/types/mod.rs
@@ -9,7 +9,9 @@
 
 use crate::deserialize::{self, FromSql};
 use crate::mysql::{Mysql, MysqlTypeMetadata, MysqlValue};
+use crate::query_builder::QueryId;
 use crate::serialize::{self, IsNull, Output, ToSql};
+use crate::sql_types::ops::*;
 use crate::sql_types::*;
 
 impl ToSql<TinyInt, Mysql> for i8 {
@@ -30,6 +32,38 @@
 #[derive(Debug, Clone, Copy, Default, SqlType, QueryId)]
 pub struct Unsigned<ST>(ST);
 
+impl<T> Add for Unsigned<T>
+where
+    T: Add,
+{
+    type Rhs = Unsigned<T::Rhs>;
+    type Output = Unsigned<T::Output>;
+}
+
+impl<T> Sub for Unsigned<T>
+where
+    T: Sub,
+{
+    type Rhs = Unsigned<T::Rhs>;
+    type Output = Unsigned<T::Output>;
+}
+
+impl<T> Mul for Unsigned<T>
+where
+    T: Mul,
+{
+    type Rhs = Unsigned<T::Rhs>;
+    type Output = Unsigned<T::Output>;
+}
+
+impl<T> Div for Unsigned<T>
+where
+    T: Div,
+{
+    type Rhs = Unsigned<T::Rhs>;
+    type Output = Unsigned<T::Output>;
+}
+
 impl ToSql<Unsigned<TinyInt>, Mysql> for u8 {
     fn to_sql<W: Write>(&self, out: &mut Output<W, Mysql>) -> serialize::Result {
         ToSql::<TinyInt, Mysql>::to_sql(&(*self as i8), out)
diff --git a/diesel/src/pg/backend.rs b/diesel/src/pg/backend.rs
index 5b852e7..5d78758 100644
--- a/diesel/src/pg/backend.rs
+++ b/diesel/src/pg/backend.rs
@@ -52,5 +52,6 @@
 }
 
 impl SupportsReturningClause for Pg {}
+impl SupportsOnConflictClause for Pg {}
 impl SupportsDefaultKeyword for Pg {}
 impl UsesAnsiSavepointSyntax for Pg {}
diff --git a/diesel/src/pg/connection/mod.rs b/diesel/src/pg/connection/mod.rs
index ac4512a..b5aabeb 100644
--- a/diesel/src/pg/connection/mod.rs
+++ b/diesel/src/pg/connection/mod.rs
@@ -115,7 +115,6 @@
     /// [`TransactionBuilder`]: ../pg/struct.TransactionBuilder.html
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../../doctest_setup.rs");
     /// #
     /// # fn main() {
diff --git a/diesel/src/pg/connection/raw.rs b/diesel/src/pg/connection/raw.rs
index 98eb4f9..c7edf24 100644
--- a/diesel/src/pg/connection/raw.rs
+++ b/diesel/src/pg/connection/raw.rs
@@ -32,6 +32,14 @@
             }
             _ => {
                 let message = last_error_message(connection_ptr);
+
+                if !connection_ptr.is_null() {
+                    // Note that even if the server connection attempt fails (as indicated by PQstatus),
+                    // the application should call PQfinish to free the memory used by the PGconn object.
+                    // https://www.postgresql.org/docs/current/libpq-connect.html
+                    unsafe { PQfinish(connection_ptr) }
+                }
+
                 Err(ConnectionError::BadConnection(message))
             }
         }
diff --git a/diesel/src/pg/connection/result.rs b/diesel/src/pg/connection/result.rs
index b1e3bb1..9a4eda3 100644
--- a/diesel/src/pg/connection/result.rs
+++ b/diesel/src/pg/connection/result.rs
@@ -42,6 +42,10 @@
                         Some(error_codes::READ_ONLY_TRANSACTION) => {
                             DatabaseErrorKind::ReadOnlyTransaction
                         }
+                        Some(error_codes::NOT_NULL_VIOLATION) => {
+                            DatabaseErrorKind::NotNullViolation
+                        }
+                        Some(error_codes::CHECK_VIOLATION) => DatabaseErrorKind::CheckViolation,
                         _ => DatabaseErrorKind::__Unknown,
                     };
                 let error_information = Box::new(PgErrorInformation(internal_result));
@@ -182,4 +186,6 @@
     pub const FOREIGN_KEY_VIOLATION: &str = "23503";
     pub const SERIALIZATION_FAILURE: &str = "40001";
     pub const READ_ONLY_TRANSACTION: &str = "25006";
+    pub const NOT_NULL_VIOLATION: &str = "23502";
+    pub const CHECK_VIOLATION: &str = "23514";
 }
diff --git a/diesel/src/pg/expression/array.rs b/diesel/src/pg/expression/array.rs
index 42c6339..74a6b5f 100644
--- a/diesel/src/pg/expression/array.rs
+++ b/diesel/src/pg/expression/array.rs
@@ -1,8 +1,8 @@
 use crate::backend::Backend;
 use crate::expression::{
-    AppearsOnTable, AsExpressionList, Expression, NonAggregate, SelectableExpression,
+    AppearsOnTable, AsExpressionList, Expression, SelectableExpression, ValidGrouping,
 };
-use crate::query_builder::{AstPass, QueryFragment};
+use crate::query_builder::{AstPass, QueryFragment, QueryId};
 use crate::sql_types;
 use std::marker::PhantomData;
 
@@ -21,7 +21,6 @@
 /// # Examples
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("../../doctest_setup.rs");
 /// #
 /// # fn main() {
@@ -91,9 +90,9 @@
 {
 }
 
-impl<T, ST> NonAggregate for ArrayLiteral<T, ST>
+impl<T, ST, GB> ValidGrouping<GB> for ArrayLiteral<T, ST>
 where
-    T: NonAggregate,
-    ArrayLiteral<T, ST>: Expression,
+    T: ValidGrouping<GB>,
 {
+    type IsAggregate = T::IsAggregate;
 }
diff --git a/diesel/src/pg/expression/array_comparison.rs b/diesel/src/pg/expression/array_comparison.rs
index 899aa75..0f4ef4d 100644
--- a/diesel/src/pg/expression/array_comparison.rs
+++ b/diesel/src/pg/expression/array_comparison.rs
@@ -1,5 +1,5 @@
 use crate::expression::subselect::Subselect;
-use crate::expression::{AsExpression, Expression};
+use crate::expression::{AsExpression, Expression, ValidGrouping};
 use crate::pg::Pg;
 use crate::query_builder::*;
 use crate::result::QueryResult;
@@ -14,7 +14,6 @@
 /// # Example
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("../../doctest_setup.rs");
 /// # use diesel::dsl::*;
 /// #
@@ -43,7 +42,6 @@
 /// # Example
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("../../doctest_setup.rs");
 /// # use diesel::dsl::*;
 /// #
@@ -64,7 +62,7 @@
 }
 
 #[doc(hidden)]
-#[derive(Debug, Copy, Clone, QueryId, NonAggregate)]
+#[derive(Debug, Copy, Clone, QueryId, ValidGrouping)]
 pub struct Any<Expr> {
     expr: Expr,
 }
@@ -97,7 +95,7 @@
 impl_selectable_expression!(Any<Expr>);
 
 #[doc(hidden)]
-#[derive(Debug, Copy, Clone, QueryId, NonAggregate)]
+#[derive(Debug, Copy, Clone, QueryId, ValidGrouping)]
 pub struct All<Expr> {
     expr: Expr,
 }
diff --git a/diesel/src/pg/expression/date_and_time.rs b/diesel/src/pg/expression/date_and_time.rs
index 2aaf79c..f0cfff2 100644
--- a/diesel/src/pg/expression/date_and_time.rs
+++ b/diesel/src/pg/expression/date_and_time.rs
@@ -1,4 +1,4 @@
-use crate::expression::Expression;
+use crate::expression::{Expression, ValidGrouping};
 use crate::pg::Pg;
 use crate::query_builder::*;
 use crate::result::QueryResult;
@@ -11,7 +11,7 @@
 impl DateTimeLike for Timestamptz {}
 impl<T: NotNull + DateTimeLike> DateTimeLike for Nullable<T> {}
 
-#[derive(Debug, Copy, Clone, QueryId, NonAggregate)]
+#[derive(Debug, Copy, Clone, QueryId, ValidGrouping)]
 pub struct AtTimeZone<Ts, Tz> {
     timestamp: Ts,
     timezone: Tz,
diff --git a/diesel/src/pg/expression/expression_methods.rs b/diesel/src/pg/expression/expression_methods.rs
index 669bfb8..8b89318 100644
--- a/diesel/src/pg/expression/expression_methods.rs
+++ b/diesel/src/pg/expression/expression_methods.rs
@@ -14,7 +14,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -41,7 +40,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -81,7 +79,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../../doctest_setup.rs");
     /// #
     /// # table! {
@@ -144,7 +141,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../../doctest_setup.rs");
     /// #
     /// # table! {
@@ -203,7 +199,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../../doctest_setup.rs");
     /// #
     /// # table! {
@@ -254,7 +249,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../../doctest_setup.rs");
     /// #
     /// # table! {
@@ -330,7 +324,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../../doctest_setup.rs");
     /// #
     /// # table! {
@@ -379,7 +372,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../../doctest_setup.rs");
     /// #
     /// # table! {
@@ -431,7 +423,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -458,7 +449,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../../doctest_setup.rs");
     /// #
     /// # fn main() {
diff --git a/diesel/src/pg/expression/extensions/interval_dsl.rs b/diesel/src/pg/expression/extensions/interval_dsl.rs
index 3c62837..cbae7cc 100644
--- a/diesel/src/pg/expression/extensions/interval_dsl.rs
+++ b/diesel/src/pg/expression/extensions/interval_dsl.rs
@@ -10,7 +10,6 @@
 /// # Examples
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("../../../doctest_setup.rs");
 /// # use diesel::dsl::*;
 /// #
@@ -42,7 +41,6 @@
 /// ```
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("../../../doctest_setup.rs");
 /// # use diesel::dsl::*;
 /// #
diff --git a/diesel/src/pg/mod.rs b/diesel/src/pg/mod.rs
index 8a9b7b9..e284495 100644
--- a/diesel/src/pg/mod.rs
+++ b/diesel/src/pg/mod.rs
@@ -6,12 +6,11 @@
 
 pub mod expression;
 pub mod types;
-pub mod upsert;
 
 mod backend;
 mod connection;
 mod metadata_lookup;
-mod query_builder;
+pub(crate) mod query_builder;
 pub(crate) mod serialize;
 mod transaction;
 mod value;
@@ -23,6 +22,10 @@
 pub use self::query_builder::PgQueryBuilder;
 pub use self::transaction::TransactionBuilder;
 pub use self::value::PgValue;
+#[doc(hidden)]
+#[cfg(feature = "with-deprecated")]
+#[deprecated(since = "2.0.0", note = "Use `diesel::upsert` instead")]
+pub use crate::upsert;
 
 /// Data structures for PG types which have no corresponding Rust type
 ///
diff --git a/diesel/src/pg/query_builder/distinct_on.rs b/diesel/src/pg/query_builder/distinct_on.rs
index 37d464b..2634371 100644
--- a/diesel/src/pg/query_builder/distinct_on.rs
+++ b/diesel/src/pg/query_builder/distinct_on.rs
@@ -1,6 +1,6 @@
 use crate::expression::SelectableExpression;
 use crate::pg::Pg;
-use crate::query_builder::{AstPass, QueryFragment, SelectQuery, SelectStatement};
+use crate::query_builder::{AstPass, QueryFragment, QueryId, SelectQuery, SelectStatement};
 use crate::query_dsl::methods::DistinctOnDsl;
 use crate::result::QueryResult;
 
@@ -20,14 +20,14 @@
     }
 }
 
-impl<ST, F, S, D, W, O, L, Of, G, Selection> DistinctOnDsl<Selection>
-    for SelectStatement<F, S, D, W, O, L, Of, G>
+impl<ST, F, S, D, W, O, LOf, G, Selection> DistinctOnDsl<Selection>
+    for SelectStatement<F, S, D, W, O, LOf, G>
 where
     Selection: SelectableExpression<F>,
     Self: SelectQuery<SqlType = ST>,
-    SelectStatement<F, S, DistinctOnClause<Selection>, W, O, L, Of, G>: SelectQuery<SqlType = ST>,
+    SelectStatement<F, S, DistinctOnClause<Selection>, W, O, LOf, G>: SelectQuery<SqlType = ST>,
 {
-    type Output = SelectStatement<F, S, DistinctOnClause<Selection>, W, O, L, Of, G>;
+    type Output = SelectStatement<F, S, DistinctOnClause<Selection>, W, O, LOf, G>;
 
     fn distinct_on(self, selection: Selection) -> Self::Output {
         SelectStatement::new(
@@ -36,8 +36,7 @@
             DistinctOnClause(selection),
             self.where_clause,
             self.order,
-            self.limit,
-            self.offset,
+            self.limit_offset,
             self.group_by,
             self.locking,
         )
diff --git a/diesel/src/pg/query_builder/limit_offset.rs b/diesel/src/pg/query_builder/limit_offset.rs
new file mode 100644
index 0000000..ad4c61e
--- /dev/null
+++ b/diesel/src/pg/query_builder/limit_offset.rs
@@ -0,0 +1,43 @@
+use crate::pg::Pg;
+use crate::query_builder::limit_offset_clause::{BoxedLimitOffsetClause, LimitOffsetClause};
+use crate::query_builder::{AstPass, IntoBoxedClause, QueryFragment};
+use crate::result::QueryResult;
+
+impl<'a, L, O> IntoBoxedClause<'a, Pg> for LimitOffsetClause<L, O>
+where
+    L: QueryFragment<Pg> + Send + 'a,
+    O: QueryFragment<Pg> + Send + 'a,
+{
+    type BoxedClause = BoxedLimitOffsetClause<'a, Pg>;
+
+    fn into_boxed(self) -> Self::BoxedClause {
+        BoxedLimitOffsetClause {
+            limit: Some(Box::new(self.limit_clause)),
+            offset: Some(Box::new(self.offset_clause)),
+        }
+    }
+}
+
+impl<'a> QueryFragment<Pg> for BoxedLimitOffsetClause<'a, Pg> {
+    fn walk_ast(&self, mut out: AstPass<Pg>) -> QueryResult<()> {
+        if let Some(ref limit) = self.limit {
+            limit.walk_ast(out.reborrow())?;
+        }
+        if let Some(ref offset) = self.offset {
+            offset.walk_ast(out.reborrow())?;
+        }
+        Ok(())
+    }
+}
+
+impl<L, O> QueryFragment<Pg> for LimitOffsetClause<L, O>
+where
+    L: QueryFragment<Pg>,
+    O: QueryFragment<Pg>,
+{
+    fn walk_ast(&self, mut out: AstPass<Pg>) -> QueryResult<()> {
+        self.limit_clause.walk_ast(out.reborrow())?;
+        self.offset_clause.walk_ast(out.reborrow())?;
+        Ok(())
+    }
+}
diff --git a/diesel/src/pg/query_builder/mod.rs b/diesel/src/pg/query_builder/mod.rs
index 91669e2..3e8ad82 100644
--- a/diesel/src/pg/query_builder/mod.rs
+++ b/diesel/src/pg/query_builder/mod.rs
@@ -3,6 +3,8 @@
 use crate::result::QueryResult;
 
 mod distinct_on;
+mod limit_offset;
+pub(crate) mod on_constraint;
 mod query_fragment_impls;
 pub use self::distinct_on::DistinctOnClause;
 
diff --git a/diesel/src/pg/query_builder/on_constraint.rs b/diesel/src/pg/query_builder/on_constraint.rs
new file mode 100644
index 0000000..0b8202e
--- /dev/null
+++ b/diesel/src/pg/query_builder/on_constraint.rs
@@ -0,0 +1,62 @@
+use crate::pg::Pg;
+use crate::query_builder::upsert::on_conflict_target::{ConflictTarget, OnConflictTarget};
+use crate::query_builder::*;
+use crate::result::QueryResult;
+
+/// Used to specify the constraint name for an upsert statement in the form `ON
+/// CONFLICT ON CONSTRAINT`. Note that `constraint_name` must be the name of a
+/// unique constraint, not the name of an index.
+///
+/// # Example
+///
+/// ```rust
+/// # extern crate diesel;
+/// # include!("../../upsert/on_conflict_docs_setup.rs");
+/// #
+/// # fn main() {
+/// #     use self::users::dsl::*;
+/// use diesel::upsert::*;
+///
+/// #     let conn = establish_connection();
+/// #     conn.execute("TRUNCATE TABLE users").unwrap();
+/// conn.execute("ALTER TABLE users ADD CONSTRAINT users_name UNIQUE (name)").unwrap();
+/// let user = User { id: 1, name: "Sean", };
+/// let same_name_different_id = User { id: 2, name: "Sean" };
+/// let same_id_different_name = User { id: 1, name: "Pascal" };
+///
+/// assert_eq!(Ok(1), diesel::insert_into(users).values(&user).execute(&conn));
+///
+/// let inserted_row_count = diesel::insert_into(users)
+///     .values(&same_name_different_id)
+///     .on_conflict(on_constraint("users_name"))
+///     .do_nothing()
+///     .execute(&conn);
+/// assert_eq!(Ok(0), inserted_row_count);
+///
+/// let pk_conflict_result = diesel::insert_into(users)
+///     .values(&same_id_different_name)
+///     .on_conflict(on_constraint("users_name"))
+///     .do_nothing()
+///     .execute(&conn);
+/// assert!(pk_conflict_result.is_err());
+/// # }
+/// ```
+pub fn on_constraint(constraint_name: &str) -> OnConstraint {
+    OnConstraint { constraint_name }
+}
+
+#[doc(hidden)]
+#[derive(Debug, Clone, Copy)]
+pub struct OnConstraint<'a> {
+    constraint_name: &'a str,
+}
+
+impl<'a> QueryFragment<Pg> for ConflictTarget<OnConstraint<'a>> {
+    fn walk_ast(&self, mut out: AstPass<Pg>) -> QueryResult<()> {
+        out.push_sql(" ON CONSTRAINT ");
+        out.push_identifier(self.0.constraint_name)?;
+        Ok(())
+    }
+}
+
+impl<'a, Table> OnConflictTarget<Table> for ConflictTarget<OnConstraint<'a>> {}
diff --git a/diesel/src/pg/serialize/write_tuple.rs b/diesel/src/pg/serialize/write_tuple.rs
index 5a3c904..5c2f55c 100644
--- a/diesel/src/pg/serialize/write_tuple.rs
+++ b/diesel/src/pg/serialize/write_tuple.rs
@@ -14,15 +14,13 @@
 ///
 /// # Example
 ///
-/// ```no_run
-/// # #[macro_use]
-/// # extern crate diesel;
-/// #
+/// ```
 /// # #[cfg(feature = "postgres")]
 /// # mod the_impl {
+/// #     use diesel::prelude::*;
 /// #     use diesel::pg::Pg;
 /// #     use diesel::serialize::{self, ToSql, Output, WriteTuple};
-/// #     use diesel::sql_types::{Integer, Text};
+/// #     use diesel::sql_types::{Integer, Text, SqlType};
 /// #     use std::io::Write;
 /// #
 ///     #[derive(SqlType)]
diff --git a/diesel/src/pg/transaction.rs b/diesel/src/pg/transaction.rs
index a5402ef..fc49a2f 100644
--- a/diesel/src/pg/transaction.rs
+++ b/diesel/src/pg/transaction.rs
@@ -40,7 +40,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// # use diesel::sql_query;
     /// #
@@ -93,7 +92,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// # use diesel::result::Error::RollbackTransaction;
     /// # use diesel::sql_query;
@@ -137,7 +135,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -165,7 +162,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -193,7 +189,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -218,7 +213,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -243,7 +237,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -266,7 +259,14 @@
     /// Runs the given function inside of the transaction
     /// with the parameters given to this builder.
     ///
-    /// Returns an error if the connection is already inside a transaction.
+    /// Returns an error if the connection is already inside a transaction,
+    /// or if the transaction fails to commit or rollback
+    ///
+    /// If the transaction fails to commit due to a `SerializationFailure` or a
+    /// `ReadOnlyTransaction` a rollback will be attempted. If the rollback succeeds,
+    /// the original error will be returned, otherwise the error generated by the rollback
+    /// will be returned. In the second case the connection should be considered broken
+    /// as it contains a uncommitted unabortable open transaction.
     pub fn run<T, E, F>(&self, f: F) -> Result<T, E>
     where
         F: FnOnce() -> Result<T, E>,
diff --git a/diesel/src/pg/types/date_and_time/deprecated_time.rs b/diesel/src/pg/types/date_and_time/deprecated_time.rs
index 748a813..f6e372d 100644
--- a/diesel/src/pg/types/date_and_time/deprecated_time.rs
+++ b/diesel/src/pg/types/date_and_time/deprecated_time.rs
@@ -4,7 +4,8 @@
 
 use self::time::{Duration, Timespec};
 
-use crate::deserialize::{self, FromSql};
+use crate::deserialize::{self, FromSql, FromSqlRow};
+use crate::expression::AsExpression;
 use crate::pg::{Pg, PgValue};
 use crate::serialize::{self, Output, ToSql};
 use crate::sql_types;
diff --git a/diesel/src/pg/types/date_and_time/mod.rs b/diesel/src/pg/types/date_and_time/mod.rs
index e14da6c..360f913 100644
--- a/diesel/src/pg/types/date_and_time/mod.rs
+++ b/diesel/src/pg/types/date_and_time/mod.rs
@@ -1,7 +1,8 @@
 use std::io::Write;
 use std::ops::Add;
 
-use crate::deserialize::{self, FromSql};
+use crate::deserialize::{self, FromSql, FromSqlRow};
+use crate::expression::AsExpression;
 use crate::pg::{Pg, PgValue};
 use crate::serialize::{self, IsNull, Output, ToSql};
 use crate::sql_types::{self, Date, Interval, Time, Timestamp, Timestamptz};
diff --git a/diesel/src/pg/types/floats/mod.rs b/diesel/src/pg/types/floats/mod.rs
index 68433c6..2191559 100644
--- a/diesel/src/pg/types/floats/mod.rs
+++ b/diesel/src/pg/types/floats/mod.rs
@@ -2,7 +2,8 @@
 use std::error::Error;
 use std::io::prelude::*;
 
-use crate::deserialize::{self, FromSql};
+use crate::deserialize::{self, FromSql, FromSqlRow};
+use crate::expression::AsExpression;
 use crate::pg::{Pg, PgValue};
 use crate::serialize::{self, IsNull, Output, ToSql};
 use crate::sql_types;
diff --git a/diesel/src/pg/types/json.rs b/diesel/src/pg/types/json.rs
index 00f6d60..1717a68 100644
--- a/diesel/src/pg/types/json.rs
+++ b/diesel/src/pg/types/json.rs
@@ -12,6 +12,8 @@
 #[allow(dead_code)]
 mod foreign_derives {
     use super::serde_json;
+    use crate::deserialize::FromSqlRow;
+    use crate::expression::AsExpression;
     use crate::sql_types::{Json, Jsonb};
 
     #[derive(FromSqlRow, AsExpression)]
diff --git a/diesel/src/pg/types/mac_addr.rs b/diesel/src/pg/types/mac_addr.rs
index 69a9d21..a5a9775 100644
--- a/diesel/src/pg/types/mac_addr.rs
+++ b/diesel/src/pg/types/mac_addr.rs
@@ -9,6 +9,8 @@
 #[allow(dead_code)]
 mod foreign_derives {
     use super::*;
+    use crate::deserialize::FromSqlRow;
+    use crate::expression::AsExpression;
 
     #[derive(FromSqlRow, AsExpression)]
     #[diesel(foreign_derive)]
diff --git a/diesel/src/pg/types/mod.rs b/diesel/src/pg/types/mod.rs
index f6b7a24..f946ec3 100644
--- a/diesel/src/pg/types/mod.rs
+++ b/diesel/src/pg/types/mod.rs
@@ -24,6 +24,9 @@
 ///
 /// Note: All types in this module can be accessed through `diesel::sql_types`
 pub mod sql_types {
+    use crate::query_builder::QueryId;
+    use crate::sql_types::SqlType;
+
     /// The `OID` SQL type. This is a PostgreSQL specific type.
     ///
     /// ### [`ToSql`] impls
@@ -181,7 +184,7 @@
     ///
     /// [`ToSql`]: ../../../serialize/trait.ToSql.html
     /// [`FromSql`]: ../../../deserialize/trait.FromSql.html
-    /// [Uuid]: https://doc.rust-lang.org/uuid/uuid/struct.Uuid.html
+    /// [Uuid]: https://docs.rs/uuid/*/uuid/struct.Uuid.html
     #[derive(Debug, Clone, Copy, Default, QueryId, SqlType)]
     #[postgres(oid = "2950", array_oid = "2951")]
     pub struct Uuid;
@@ -251,7 +254,6 @@
     ///
     /// ```rust
     /// # #![allow(dead_code)]
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../../doctest_setup.rs");
     /// #
     /// table! {
@@ -309,7 +311,6 @@
     /// # Examples
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../../doctest_setup.rs");
     /// use diesel::data_types::Cents;
     ///
@@ -357,7 +358,6 @@
     /// # Examples
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../../doctest_setup.rs");
     /// table! {
     ///     devices {
@@ -407,7 +407,6 @@
     /// # Examples
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../../doctest_setup.rs");
     /// #
     /// table! {
@@ -462,7 +461,6 @@
     ///
     /// ```rust
     /// # #![allow(dead_code)]
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../../doctest_setup.rs");
     /// table! {
     ///     clients {
diff --git a/diesel/src/pg/types/money.rs b/diesel/src/pg/types/money.rs
index bd6d909..8939ce1 100644
--- a/diesel/src/pg/types/money.rs
+++ b/diesel/src/pg/types/money.rs
@@ -3,7 +3,8 @@
 use std::io::prelude::*;
 use std::ops::{Add, AddAssign, Sub, SubAssign};
 
-use crate::deserialize::{self, FromSql};
+use crate::deserialize::{self, FromSql, FromSqlRow};
+use crate::expression::AsExpression;
 use crate::pg::{Pg, PgValue};
 use crate::serialize::{self, Output, ToSql};
 use crate::sql_types::{BigInt, Money};
diff --git a/diesel/src/pg/types/network_address.rs b/diesel/src/pg/types/network_address.rs
index cf8942b..563f185 100644
--- a/diesel/src/pg/types/network_address.rs
+++ b/diesel/src/pg/types/network_address.rs
@@ -24,6 +24,8 @@
 #[allow(dead_code)]
 mod foreign_derives {
     use super::*;
+    use crate::deserialize::FromSqlRow;
+    use crate::expression::AsExpression;
 
     #[derive(FromSqlRow, AsExpression)]
     #[diesel(foreign_derive)]
diff --git a/diesel/src/pg/types/record.rs b/diesel/src/pg/types/record.rs
index aef8ba7..05fd2fa 100644
--- a/diesel/src/pg/types/record.rs
+++ b/diesel/src/pg/types/record.rs
@@ -3,9 +3,11 @@
 use std::num::NonZeroU32;
 
 use crate::deserialize::{self, FromSql, FromSqlRow, Queryable};
-use crate::expression::{AppearsOnTable, AsExpression, Expression, SelectableExpression};
+use crate::expression::{
+    AppearsOnTable, AsExpression, Expression, SelectableExpression, ValidGrouping,
+};
 use crate::pg::{Pg, PgValue};
-use crate::query_builder::{AstPass, QueryFragment};
+use crate::query_builder::{AstPass, QueryFragment, QueryId};
 use crate::result::QueryResult;
 use crate::row::Row;
 use crate::serialize::{self, IsNull, Output, ToSql, WriteTuple};
@@ -134,7 +136,7 @@
 
 __diesel_for_each_tuple!(tuple_impls);
 
-#[derive(Debug, Clone, Copy, QueryId, NonAggregate)]
+#[derive(Debug, Clone, Copy, QueryId, ValidGrouping)]
 pub struct PgTuple<T>(T);
 
 impl<T> QueryFragment<Pg> for PgTuple<T>
diff --git a/diesel/src/pg/types/uuid.rs b/diesel/src/pg/types/uuid.rs
index b8b625c..5e5e86e 100644
--- a/diesel/src/pg/types/uuid.rs
+++ b/diesel/src/pg/types/uuid.rs
@@ -1,6 +1,8 @@
 use std::io::prelude::*;
 
+use crate::deserialize::FromSqlRow;
 use crate::deserialize::{self, FromSql};
+use crate::expression::AsExpression;
 use crate::pg::{Pg, PgValue};
 use crate::serialize::{self, IsNull, Output, ToSql};
 use crate::sql_types::Uuid;
diff --git a/diesel/src/pg/upsert/mod.rs b/diesel/src/pg/upsert/mod.rs
deleted file mode 100644
index 8381492..0000000
--- a/diesel/src/pg/upsert/mod.rs
+++ /dev/null
@@ -1,13 +0,0 @@
-//! Types and functions related to PG's `ON CONFLICT` clause
-//!
-//! See [the methods on `InsertStatement`](../../query_builder/struct.InsertStatement.html#impl-1)
-//! for usage examples.
-
-mod on_conflict_actions;
-mod on_conflict_clause;
-mod on_conflict_extension;
-mod on_conflict_target;
-
-pub use self::on_conflict_actions::excluded;
-pub use self::on_conflict_extension::*;
-pub use self::on_conflict_target::on_constraint;
diff --git a/diesel/src/pg/upsert/on_conflict_target.rs b/diesel/src/pg/upsert/on_conflict_target.rs
deleted file mode 100644
index 832ffaf..0000000
--- a/diesel/src/pg/upsert/on_conflict_target.rs
+++ /dev/null
@@ -1,143 +0,0 @@
-use crate::expression::SqlLiteral;
-use crate::pg::Pg;
-use crate::query_builder::*;
-use crate::query_source::Column;
-use crate::result::QueryResult;
-
-/// Used to specify the constraint name for an upsert statement in the form `ON
-/// CONFLICT ON CONSTRAINT`. Note that `constraint_name` must be the name of a
-/// unique constraint, not the name of an index.
-///
-/// # Example
-///
-/// ```rust
-/// # #[macro_use] extern crate diesel;
-/// # include!("on_conflict_docs_setup.rs");
-/// #
-/// # fn main() {
-/// #     use self::users::dsl::*;
-/// use diesel::pg::upsert::*;
-///
-/// #     let conn = establish_connection();
-/// #     conn.execute("TRUNCATE TABLE users").unwrap();
-/// conn.execute("ALTER TABLE users ADD CONSTRAINT users_name UNIQUE (name)").unwrap();
-/// let user = User { id: 1, name: "Sean", };
-/// let same_name_different_id = User { id: 2, name: "Sean" };
-/// let same_id_different_name = User { id: 1, name: "Pascal" };
-///
-/// assert_eq!(Ok(1), diesel::insert_into(users).values(&user).execute(&conn));
-///
-/// let inserted_row_count = diesel::insert_into(users)
-///     .values(&same_name_different_id)
-///     .on_conflict(on_constraint("users_name"))
-///     .do_nothing()
-///     .execute(&conn);
-/// assert_eq!(Ok(0), inserted_row_count);
-///
-/// let pk_conflict_result = diesel::insert_into(users)
-///     .values(&same_id_different_name)
-///     .on_conflict(on_constraint("users_name"))
-///     .do_nothing()
-///     .execute(&conn);
-/// assert!(pk_conflict_result.is_err());
-/// # }
-/// ```
-pub fn on_constraint(constraint_name: &str) -> OnConstraint {
-    OnConstraint {
-        constraint_name: constraint_name,
-    }
-}
-
-#[doc(hidden)]
-#[derive(Debug, Clone, Copy)]
-pub struct OnConstraint<'a> {
-    constraint_name: &'a str,
-}
-
-pub trait OnConflictTarget<Table>: QueryFragment<Pg> {}
-
-#[doc(hidden)]
-#[derive(Debug, Clone, Copy)]
-pub struct NoConflictTarget;
-
-impl QueryFragment<Pg> for NoConflictTarget {
-    fn walk_ast(&self, _: AstPass<Pg>) -> QueryResult<()> {
-        Ok(())
-    }
-}
-
-impl<Table> OnConflictTarget<Table> for NoConflictTarget {}
-
-#[doc(hidden)]
-#[derive(Debug, Clone, Copy)]
-pub struct ConflictTarget<T>(pub T);
-
-impl<T: Column> QueryFragment<Pg> for ConflictTarget<T> {
-    fn walk_ast(&self, mut out: AstPass<Pg>) -> QueryResult<()> {
-        out.push_sql(" (");
-        out.push_identifier(T::NAME)?;
-        out.push_sql(")");
-        Ok(())
-    }
-}
-
-impl<T: Column> OnConflictTarget<T::Table> for ConflictTarget<T> {}
-
-impl<ST> QueryFragment<Pg> for ConflictTarget<SqlLiteral<ST>>
-where
-    SqlLiteral<ST>: QueryFragment<Pg>,
-{
-    fn walk_ast(&self, mut out: AstPass<Pg>) -> QueryResult<()> {
-        out.push_sql(" ");
-        self.0.walk_ast(out.reborrow())?;
-        Ok(())
-    }
-}
-
-impl<Tab, ST> OnConflictTarget<Tab> for ConflictTarget<SqlLiteral<ST>> where
-    ConflictTarget<SqlLiteral<ST>>: QueryFragment<Pg>
-{
-}
-
-impl<'a> QueryFragment<Pg> for ConflictTarget<OnConstraint<'a>> {
-    fn walk_ast(&self, mut out: AstPass<Pg>) -> QueryResult<()> {
-        out.push_sql(" ON CONSTRAINT ");
-        out.push_identifier(self.0.constraint_name)?;
-        Ok(())
-    }
-}
-
-impl<'a, Table> OnConflictTarget<Table> for ConflictTarget<OnConstraint<'a>> {}
-
-macro_rules! on_conflict_tuples {
-    ($($col:ident),+) => {
-        impl<T, $($col),+> QueryFragment<Pg> for ConflictTarget<(T, $($col),+)> where
-            T: Column,
-            $($col: Column<Table=T::Table>,)+
-        {
-            fn walk_ast(&self, mut out: AstPass<Pg>) -> QueryResult<()> {
-                out.push_sql(" (");
-                out.push_identifier(T::NAME)?;
-                $(
-                    out.push_sql(", ");
-                    out.push_identifier($col::NAME)?;
-                )+
-                out.push_sql(")");
-                Ok(())
-            }
-        }
-
-        impl<T, $($col),+> OnConflictTarget<T::Table> for ConflictTarget<(T, $($col),+)> where
-            T: Column,
-            $($col: Column<Table=T::Table>,)+
-        {
-        }
-    }
-}
-
-on_conflict_tuples!(U);
-on_conflict_tuples!(U, V);
-on_conflict_tuples!(U, V, W);
-on_conflict_tuples!(U, V, W, X);
-on_conflict_tuples!(U, V, W, X, Y);
-on_conflict_tuples!(U, V, W, X, Y, Z);
diff --git a/diesel/src/query_builder/ast_pass.rs b/diesel/src/query_builder/ast_pass.rs
index 4ecdf8f..e9bdebe 100644
--- a/diesel/src/query_builder/ast_pass.rs
+++ b/diesel/src/query_builder/ast_pass.rs
@@ -144,7 +144,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # extern crate diesel;
     /// # use diesel::query_builder::{QueryFragment, AstPass};
     /// # use diesel::backend::Backend;
     /// # use diesel::QueryResult;
diff --git a/diesel/src/query_builder/clause_macro.rs b/diesel/src/query_builder/clause_macro.rs
index 2217382..6f86753 100644
--- a/diesel/src/query_builder/clause_macro.rs
+++ b/diesel/src/query_builder/clause_macro.rs
@@ -1,13 +1,35 @@
 macro_rules! simple_clause {
-    ($no_clause:ident, $clause:ident, $sql:expr) => {
-        simple_clause!($no_clause, $clause, $sql, backend_bounds = );
+    (
+        $(#[doc = $($no_clause_doc:tt)*])*
+        $no_clause:ident,
+        $(#[doc = $($clause_doc:tt)*])*
+        $clause:ident,
+        $sql:expr
+    ) => {
+        simple_clause!(
+            $(#[doc = $($no_clause_doc)*])*
+            $no_clause,
+            $(#[doc = $($clause_doc)*])*
+            $clause,
+            $sql,
+            backend_bounds =
+        );
     };
 
-    ($no_clause:ident, $clause:ident, $sql:expr, backend_bounds = $($backend_bounds:ident),*) => {
+    (
+        $(#[doc = $($no_clause_doc:tt)*])*
+        $no_clause:ident,
+        $(#[doc = $($clause_doc:tt)*])*
+        $clause:ident,
+        $sql:expr,
+        backend_bounds = $($backend_bounds:ident),*
+    ) => {
         use crate::backend::Backend;
         use crate::result::QueryResult;
+        use crate::query_builder::QueryId;
         use super::{QueryFragment, AstPass};
 
+        $(#[doc = $($no_clause_doc)*])*
         #[derive(Debug, Clone, Copy, QueryId)]
         pub struct $no_clause;
 
@@ -17,6 +39,7 @@
             }
         }
 
+        $(#[doc = $($clause_doc)*])*
         #[derive(Debug, Clone, Copy, QueryId)]
         pub struct $clause<Expr>(pub Expr);
 
diff --git a/diesel/src/query_builder/delete_statement/mod.rs b/diesel/src/query_builder/delete_statement/mod.rs
index 2f5b1ed..8514385 100644
--- a/diesel/src/query_builder/delete_statement/mod.rs
+++ b/diesel/src/query_builder/delete_statement/mod.rs
@@ -50,7 +50,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -88,7 +87,6 @@
     /// ### Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -207,7 +205,6 @@
     /// ### Deleting a record:
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../../doctest_setup.rs");
     /// #
     /// # #[cfg(feature = "postgres")]
diff --git a/diesel/src/query_builder/functions.rs b/diesel/src/query_builder/functions.rs
index 5e56187..009b0b1 100644
--- a/diesel/src/query_builder/functions.rs
+++ b/diesel/src/query_builder/functions.rs
@@ -21,7 +21,6 @@
 /// # Examples
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("../doctest_setup.rs");
 /// #
 /// # #[cfg(feature = "postgres")]
@@ -46,7 +45,6 @@
 /// [`set`]: query_builder/struct.UpdateStatement.html#method.set
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("../doctest_setup.rs");
 /// #
 /// # table! {
@@ -95,7 +93,6 @@
 /// ### Deleting a single record:
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("../doctest_setup.rs");
 /// #
 /// # fn main() {
@@ -116,7 +113,6 @@
 /// ### Deleting a whole table:
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("../doctest_setup.rs");
 /// #
 /// # fn main() {
@@ -154,7 +150,6 @@
 /// # Examples
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("../doctest_setup.rs");
 /// #
 /// # fn main() {
@@ -182,7 +177,6 @@
 /// ### Using a tuple for values
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("../doctest_setup.rs");
 /// #
 /// # fn main() {
@@ -212,7 +206,6 @@
 /// ### Using struct for values
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("../doctest_setup.rs");
 /// # use schema::users;
 /// #
@@ -259,7 +252,6 @@
 /// [`.into_columns`]: query_builder/struct.InsertStatement.html#method.into_columns
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("../doctest_setup.rs");
 /// #
 /// # fn main() {
@@ -292,7 +284,6 @@
 /// ### With return value
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("../doctest_setup.rs");
 /// #
 /// # #[cfg(feature = "postgres")]
@@ -328,7 +319,6 @@
 /// # Example
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("../doctest_setup.rs");
 /// #
 /// # fn main() {
@@ -387,7 +377,6 @@
 /// # Example
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("../doctest_setup.rs");
 /// #
 /// # #[cfg(not(feature = "postgres"))]
@@ -436,7 +425,6 @@
 /// # Example
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("../doctest_setup.rs");
 /// #
 /// # use schema::users;
diff --git a/diesel/src/query_builder/group_by_clause.rs b/diesel/src/query_builder/group_by_clause.rs
index b2cf936..9584a5a 100644
--- a/diesel/src/query_builder/group_by_clause.rs
+++ b/diesel/src/query_builder/group_by_clause.rs
@@ -1 +1,13 @@
 simple_clause!(NoGroupByClause, GroupByClause, " GROUP BY ");
+
+pub trait ValidGroupByClause {
+    type Expressions;
+}
+
+impl ValidGroupByClause for NoGroupByClause {
+    type Expressions = ();
+}
+
+impl<GB> ValidGroupByClause for GroupByClause<GB> {
+    type Expressions = GB;
+}
diff --git a/diesel/src/query_builder/insert_statement/insert_from_select.rs b/diesel/src/query_builder/insert_statement/insert_from_select.rs
index e609dab..f5e1132 100644
--- a/diesel/src/query_builder/insert_statement/insert_from_select.rs
+++ b/diesel/src/query_builder/insert_statement/insert_from_select.rs
@@ -7,8 +7,8 @@
 /// Represents `(Columns) SELECT FROM ...` for use in an `INSERT` statement
 #[derive(Debug, Clone, Copy)]
 pub struct InsertFromSelect<Select, Columns> {
-    query: Select,
-    columns: Columns,
+    pub(in crate::query_builder) query: Select,
+    pub(in crate::query_builder) columns: Columns,
 }
 
 impl<Select, Columns> InsertFromSelect<Select, Columns> {
diff --git a/diesel/src/query_builder/insert_statement/mod.rs b/diesel/src/query_builder/insert_statement/mod.rs
index 5e32a35..c765a1e 100644
--- a/diesel/src/query_builder/insert_statement/mod.rs
+++ b/diesel/src/query_builder/insert_statement/mod.rs
@@ -48,7 +48,6 @@
     /// Inserts `DEFAULT VALUES` into the targeted table.
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../../doctest_setup.rs");
     /// #
     /// # table! {
@@ -142,7 +141,6 @@
         }
     }
 
-    #[cfg(feature = "postgres")]
     pub(crate) fn replace_values<F, V>(self, f: F) -> InsertStatement<T, V, Op, Ret>
     where
         F: FnOnce(U) -> V,
@@ -397,7 +395,6 @@
     /// ### Inserting records:
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../../doctest_setup.rs");
     /// #
     /// # #[cfg(feature = "postgres")]
diff --git a/diesel/src/query_builder/limit_clause.rs b/diesel/src/query_builder/limit_clause.rs
index 9c67ecc..fd83de7 100644
--- a/diesel/src/query_builder/limit_clause.rs
+++ b/diesel/src/query_builder/limit_clause.rs
@@ -1 +1,11 @@
-simple_clause!(NoLimitClause, LimitClause, " LIMIT ");
+simple_clause!(
+    /// A query node indicating the absence of a limit clause
+    ///
+    /// This type is only relevant for implementing custom backends
+    NoLimitClause,
+    /// A query node representing a limit clause
+    ///
+    /// This type is only relevant for implementing custom backends
+    LimitClause,
+    " LIMIT "
+);
diff --git a/diesel/src/query_builder/limit_offset_clause.rs b/diesel/src/query_builder/limit_offset_clause.rs
new file mode 100644
index 0000000..e167284
--- /dev/null
+++ b/diesel/src/query_builder/limit_offset_clause.rs
@@ -0,0 +1,24 @@
+use super::QueryFragment;
+use crate::query_builder::QueryId;
+
+/// A helper query node that contains both limit and offset clauses
+///
+/// This type is only relevant for implementing custom backends
+#[derive(Debug, Clone, Copy, QueryId)]
+pub struct LimitOffsetClause<Limit, Offset> {
+    /// The limit clause
+    pub limit_clause: Limit,
+    /// The offset clause
+    pub offset_clause: Offset,
+}
+
+/// A boxed variant of [`LimitOffsetClause`](../struct.LimitOffsetClause.html)
+///
+/// This type is only relevant for implementing custom backends
+#[allow(missing_debug_implementations)]
+pub struct BoxedLimitOffsetClause<'a, DB> {
+    /// The limit clause
+    pub limit: Option<Box<dyn QueryFragment<DB> + Send + 'a>>,
+    /// The offset clause
+    pub offset: Option<Box<dyn QueryFragment<DB> + Send + 'a>>,
+}
diff --git a/diesel/src/query_builder/locking_clause.rs b/diesel/src/query_builder/locking_clause.rs
index 4467984..8605ac9 100644
--- a/diesel/src/query_builder/locking_clause.rs
+++ b/diesel/src/query_builder/locking_clause.rs
@@ -1,5 +1,5 @@
 use crate::backend::Backend;
-use crate::query_builder::{AstPass, QueryFragment};
+use crate::query_builder::{AstPass, QueryFragment, QueryId};
 use crate::result::QueryResult;
 
 #[derive(Debug, Clone, Copy, QueryId)]
diff --git a/diesel/src/query_builder/mod.rs b/diesel/src/query_builder/mod.rs
index cc64a56..c4e520a 100644
--- a/diesel/src/query_builder/mod.rs
+++ b/diesel/src/query_builder/mod.rs
@@ -18,17 +18,19 @@
 pub mod functions;
 mod group_by_clause;
 mod insert_statement;
-mod limit_clause;
+pub(crate) mod limit_clause;
+pub(crate) mod limit_offset_clause;
 pub(crate) mod locking_clause;
 #[doc(hidden)]
 pub mod nodes;
-mod offset_clause;
+pub(crate) mod offset_clause;
 mod order_clause;
 mod returning_clause;
 mod select_clause;
 mod select_statement;
 mod sql_query;
 mod update_statement;
+pub(crate) mod upsert;
 mod where_clause;
 
 pub use self::ast_pass::AstPass;
@@ -48,6 +50,10 @@
     AsChangeset, BoxedUpdateStatement, IntoUpdateTarget, UpdateStatement, UpdateTarget,
 };
 
+pub use self::limit_clause::{LimitClause, NoLimitClause};
+pub use self::limit_offset_clause::{BoxedLimitOffsetClause, LimitOffsetClause};
+pub use self::offset_clause::{NoOffsetClause, OffsetClause};
+
 pub(crate) use self::insert_statement::ColumnList;
 
 use std::error::Error;
@@ -230,6 +236,17 @@
     }
 }
 
+/// A trait used to construct type erased boxed variant of the current query node
+///
+/// Mainly useful for implementing third party backends
+pub trait IntoBoxedClause<'a, DB> {
+    /// Resulting type
+    type BoxedClause;
+
+    /// Convert the given query node in it's boxed representation
+    fn into_boxed(self) -> Self::BoxedClause;
+}
+
 /// Types that can be converted into a complete, typed SQL query.
 ///
 /// This is used internally to automatically add the right select clause when
@@ -272,7 +289,6 @@
 /// ```rust
 /// # include!("../doctest_setup.rs");
 /// #
-/// # #[macro_use] extern crate diesel;
 /// # use diesel::*;
 /// # use schema::*;
 /// #
diff --git a/diesel/src/query_builder/offset_clause.rs b/diesel/src/query_builder/offset_clause.rs
index cf0dde8..3cb72e0 100644
--- a/diesel/src/query_builder/offset_clause.rs
+++ b/diesel/src/query_builder/offset_clause.rs
@@ -1 +1,11 @@
-simple_clause!(NoOffsetClause, OffsetClause, " OFFSET ");
+simple_clause!(
+    /// A query node indicating the absence of an offset clause
+    ///
+    /// This type is only relevant for implementing custom backends
+    NoOffsetClause,
+    /// A query node representing an offset clause
+    ///
+    /// This type is only relevant for implementing custom backends
+    OffsetClause,
+    " OFFSET "
+);
diff --git a/diesel/src/query_builder/order_clause.rs b/diesel/src/query_builder/order_clause.rs
index a6b2980..deea8dc 100644
--- a/diesel/src/query_builder/order_clause.rs
+++ b/diesel/src/query_builder/order_clause.rs
@@ -1,20 +1,20 @@
 simple_clause!(NoOrderClause, OrderClause, " ORDER BY ");
 
-impl<'a, DB, Expr> Into<Option<Box<dyn QueryFragment<DB> + 'a>>> for OrderClause<Expr>
+impl<'a, DB, Expr> Into<Option<Box<dyn QueryFragment<DB> + Send + 'a>>> for OrderClause<Expr>
 where
     DB: Backend,
-    Expr: QueryFragment<DB> + 'a,
+    Expr: QueryFragment<DB> + Send + 'a,
 {
-    fn into(self) -> Option<Box<dyn QueryFragment<DB> + 'a>> {
+    fn into(self) -> Option<Box<dyn QueryFragment<DB> + Send + 'a>> {
         Some(Box::new(self.0))
     }
 }
 
-impl<'a, DB> Into<Option<Box<dyn QueryFragment<DB> + 'a>>> for NoOrderClause
+impl<'a, DB> Into<Option<Box<dyn QueryFragment<DB> + Send + 'a>>> for NoOrderClause
 where
     DB: Backend,
 {
-    fn into(self) -> Option<Box<dyn QueryFragment<DB> + 'a>> {
+    fn into(self) -> Option<Box<dyn QueryFragment<DB> + Send + 'a>> {
         None
     }
 }
diff --git a/diesel/src/query_builder/query_id.rs b/diesel/src/query_builder/query_id.rs
index b2aebf4..aa2ccb9 100644
--- a/diesel/src/query_builder/query_id.rs
+++ b/diesel/src/query_builder/query_id.rs
@@ -15,33 +15,10 @@
 ///
 /// ### Deriving
 ///
-/// This trait can be automatically derived by Diesel.
+/// This trait can [be automatically derived](derive.QueryId.html)
+/// by Diesel.
 /// For example, given this struct:
 ///
-/// ```rust
-/// # #[macro_use] extern crate diesel;
-/// #[derive(QueryId)]
-/// pub struct And<Left, Right> {
-///     left: Left,
-///     right: Right,
-/// }
-/// # fn main() {}
-/// ```
-///
-/// the following implementation will be generated
-///
-/// ```rust,ignore
-/// impl<Left, Right> QueryId for And<Left, Right>
-/// where
-///     Left: QueryId,
-///     Right: QueryId,
-/// {
-///     type QueryId = And<Left::QueryId, Right::QueryId>;
-///
-///     const HAS_STATIC_QUERY_ID: bool = Left::HAS_STATIC_QUERY_ID && Right::HAS_STATIC_QUERY_ID;
-/// }
-/// ```
-///
 /// If the SQL generated by a struct is not uniquely identifiable by its type,
 /// meaning that `HAS_STATIC_QUERY_ID` should always be false,
 /// you should not derive this trait.
@@ -88,6 +65,9 @@
     }
 }
 
+#[doc(inline)]
+pub use diesel_derives::QueryId;
+
 impl QueryId for () {
     type QueryId = ();
 
diff --git a/diesel/src/query_builder/select_clause.rs b/diesel/src/query_builder/select_clause.rs
index 61f0843..54bff35 100644
--- a/diesel/src/query_builder/select_clause.rs
+++ b/diesel/src/query_builder/select_clause.rs
@@ -9,6 +9,7 @@
 pub struct SelectClause<T>(pub T);
 
 pub trait SelectClauseExpression<QS> {
+    type Selection: SelectableExpression<QS>;
     type SelectClauseSqlType;
 }
 
@@ -16,6 +17,7 @@
 where
     T: SelectableExpression<QS>,
 {
+    type Selection = T;
     type SelectClauseSqlType = T::SqlType;
 }
 
@@ -23,6 +25,7 @@
 where
     QS: QuerySource,
 {
+    type Selection = QS::DefaultSelection;
     type SelectClauseSqlType = <QS::DefaultSelection as Expression>::SqlType;
 }
 
diff --git a/diesel/src/query_builder/select_statement/boxed.rs b/diesel/src/query_builder/select_statement/boxed.rs
index 0c0eb84..5216e61 100644
--- a/diesel/src/query_builder/select_statement/boxed.rs
+++ b/diesel/src/query_builder/select_statement/boxed.rs
@@ -9,6 +9,7 @@
 use crate::query_builder::group_by_clause::GroupByClause;
 use crate::query_builder::insert_statement::InsertFromSelect;
 use crate::query_builder::limit_clause::LimitClause;
+use crate::query_builder::limit_offset_clause::BoxedLimitOffsetClause;
 use crate::query_builder::offset_clause::OffsetClause;
 use crate::query_builder::order_clause::OrderClause;
 use crate::query_builder::where_clause::*;
@@ -22,41 +23,65 @@
 
 #[allow(missing_debug_implementations)]
 pub struct BoxedSelectStatement<'a, ST, QS, DB> {
-    select: Box<dyn QueryFragment<DB> + 'a>,
+    select: Box<dyn QueryFragment<DB> + Send + 'a>,
     from: QS,
-    distinct: Box<dyn QueryFragment<DB> + 'a>,
+    distinct: Box<dyn QueryFragment<DB> + Send + 'a>,
     where_clause: BoxedWhereClause<'a, DB>,
-    order: Option<Box<dyn QueryFragment<DB> + 'a>>,
-    limit: Box<dyn QueryFragment<DB> + 'a>,
-    offset: Box<dyn QueryFragment<DB> + 'a>,
-    group_by: Box<dyn QueryFragment<DB> + 'a>,
+    order: Option<Box<dyn QueryFragment<DB> + Send + 'a>>,
+    limit_offset: BoxedLimitOffsetClause<'a, DB>,
+    group_by: Box<dyn QueryFragment<DB> + Send + 'a>,
     _marker: PhantomData<ST>,
 }
 
 impl<'a, ST, QS, DB> BoxedSelectStatement<'a, ST, QS, DB> {
     #[allow(clippy::too_many_arguments)]
     pub fn new(
-        select: Box<dyn QueryFragment<DB> + 'a>,
+        select: Box<dyn QueryFragment<DB> + Send + 'a>,
         from: QS,
-        distinct: Box<dyn QueryFragment<DB> + 'a>,
+        distinct: Box<dyn QueryFragment<DB> + Send + 'a>,
         where_clause: BoxedWhereClause<'a, DB>,
-        order: Option<Box<dyn QueryFragment<DB> + 'a>>,
-        limit: Box<dyn QueryFragment<DB> + 'a>,
-        offset: Box<dyn QueryFragment<DB> + 'a>,
-        group_by: Box<dyn QueryFragment<DB> + 'a>,
+        order: Option<Box<dyn QueryFragment<DB> + Send + 'a>>,
+        limit_offset: BoxedLimitOffsetClause<'a, DB>,
+        group_by: Box<dyn QueryFragment<DB> + Send + 'a>,
     ) -> Self {
         BoxedSelectStatement {
-            select: select,
-            from: from,
-            distinct: distinct,
-            where_clause: where_clause,
-            order: order,
-            limit: limit,
-            offset: offset,
-            group_by: group_by,
+            select,
+            from,
+            distinct,
+            where_clause,
+            order,
+            limit_offset,
+            group_by,
             _marker: PhantomData,
         }
     }
+
+    pub(crate) fn build_query(
+        &self,
+        mut out: AstPass<DB>,
+        where_clause_handler: impl Fn(&BoxedWhereClause<'a, DB>, AstPass<DB>) -> QueryResult<()>,
+    ) -> QueryResult<()>
+    where
+        DB: Backend,
+        QS: QuerySource,
+        QS::FromClause: QueryFragment<DB>,
+        BoxedLimitOffsetClause<'a, DB>: QueryFragment<DB>,
+    {
+        out.push_sql("SELECT ");
+        self.distinct.walk_ast(out.reborrow())?;
+        self.select.walk_ast(out.reborrow())?;
+        out.push_sql(" FROM ");
+        self.from.from_clause().walk_ast(out.reborrow())?;
+        where_clause_handler(&self.where_clause, out.reborrow())?;
+        self.group_by.walk_ast(out.reborrow())?;
+
+        if let Some(ref order) = self.order {
+            out.push_sql(" ORDER BY ");
+            order.walk_ast(out.reborrow())?;
+        }
+        self.limit_offset.walk_ast(out.reborrow())?;
+        Ok(())
+    }
 }
 
 impl<'a, ST, QS, DB> Query for BoxedSelectStatement<'a, ST, QS, DB>
@@ -83,30 +108,17 @@
     DB: Backend,
     QS: QuerySource,
     QS::FromClause: QueryFragment<DB>,
+    BoxedLimitOffsetClause<'a, DB>: QueryFragment<DB>,
 {
-    fn walk_ast(&self, mut out: AstPass<DB>) -> QueryResult<()> {
-        out.push_sql("SELECT ");
-        self.distinct.walk_ast(out.reborrow())?;
-        self.select.walk_ast(out.reborrow())?;
-        out.push_sql(" FROM ");
-        self.from.from_clause().walk_ast(out.reborrow())?;
-        self.where_clause.walk_ast(out.reborrow())?;
-        self.group_by.walk_ast(out.reborrow())?;
-
-        if let Some(ref order) = self.order {
-            out.push_sql(" ORDER BY ");
-            order.walk_ast(out.reborrow())?;
-        }
-
-        self.limit.walk_ast(out.reborrow())?;
-        self.offset.walk_ast(out.reborrow())?;
-        Ok(())
+    fn walk_ast(&self, out: AstPass<DB>) -> QueryResult<()> {
+        self.build_query(out, |where_clause, out| where_clause.walk_ast(out))
     }
 }
 
 impl<'a, ST, DB> QueryFragment<DB> for BoxedSelectStatement<'a, ST, (), DB>
 where
     DB: Backend,
+    BoxedLimitOffsetClause<'a, DB>: QueryFragment<DB>,
 {
     fn walk_ast(&self, mut out: AstPass<DB>) -> QueryResult<()> {
         out.push_sql("SELECT ");
@@ -115,8 +127,7 @@
         self.where_clause.walk_ast(out.reborrow())?;
         self.group_by.walk_ast(out.reborrow())?;
         self.order.walk_ast(out.reborrow())?;
-        self.limit.walk_ast(out.reborrow())?;
-        self.offset.walk_ast(out.reborrow())?;
+        self.limit_offset.walk_ast(out.reborrow())?;
         Ok(())
     }
 }
@@ -141,8 +152,7 @@
             self.distinct,
             self.where_clause,
             self.order,
-            self.limit,
-            self.offset,
+            self.limit_offset,
             self.group_by,
         )
     }
@@ -164,7 +174,7 @@
 impl<'a, ST, QS, DB, Selection> SelectDsl<Selection> for BoxedSelectStatement<'a, ST, QS, DB>
 where
     DB: Backend,
-    Selection: SelectableExpression<QS> + QueryFragment<DB> + 'a,
+    Selection: SelectableExpression<QS> + QueryFragment<DB> + Send + 'a,
 {
     type Output = BoxedSelectStatement<'a, Selection::SqlType, QS, DB>;
 
@@ -175,8 +185,7 @@
             self.distinct,
             self.where_clause,
             self.order,
-            self.limit,
-            self.offset,
+            self.limit_offset,
             self.group_by,
         )
     }
@@ -216,7 +225,7 @@
     type Output = Self;
 
     fn limit(mut self, limit: i64) -> Self::Output {
-        self.limit = Box::new(LimitClause(limit.into_sql::<BigInt>()));
+        self.limit_offset.limit = Some(Box::new(LimitClause(limit.into_sql::<BigInt>())));
         self
     }
 }
@@ -229,7 +238,7 @@
     type Output = Self;
 
     fn offset(mut self, offset: i64) -> Self::Output {
-        self.offset = Box::new(OffsetClause(offset.into_sql::<BigInt>()));
+        self.limit_offset.offset = Some(Box::new(OffsetClause(offset.into_sql::<BigInt>())));
         self
     }
 }
@@ -237,7 +246,7 @@
 impl<'a, ST, QS, DB, Order> OrderDsl<Order> for BoxedSelectStatement<'a, ST, QS, DB>
 where
     DB: Backend,
-    Order: QueryFragment<DB> + AppearsOnTable<QS> + 'a,
+    Order: QueryFragment<DB> + AppearsOnTable<QS> + Send + 'a,
 {
     type Output = Self;
 
@@ -250,7 +259,7 @@
 impl<'a, ST, QS, DB, Order> ThenOrderDsl<Order> for BoxedSelectStatement<'a, ST, QS, DB>
 where
     DB: Backend + 'a,
-    Order: QueryFragment<DB> + AppearsOnTable<QS> + 'a,
+    Order: QueryFragment<DB> + AppearsOnTable<QS> + Send + 'a,
 {
     type Output = Self;
 
@@ -266,7 +275,7 @@
 impl<'a, ST, QS, DB, Expr> GroupByDsl<Expr> for BoxedSelectStatement<'a, ST, QS, DB>
 where
     DB: Backend,
-    Expr: QueryFragment<DB> + AppearsOnTable<QS> + 'a,
+    Expr: QueryFragment<DB> + AppearsOnTable<QS> + Send + 'a,
     Self: Query,
 {
     type Output = Self;
@@ -333,10 +342,49 @@
             distinct: self.distinct,
             where_clause: self.where_clause,
             order: self.order,
-            limit: self.limit,
-            offset: self.offset,
+            limit_offset: self.limit_offset,
             group_by: self.group_by,
             _marker: PhantomData,
         }
     }
 }
+
+#[cfg(test)]
+mod tests {
+    use crate::prelude::*;
+
+    table! {
+        users {
+            id -> Integer,
+        }
+    }
+
+    fn assert_send<T>(_: T)
+    where
+        T: Send,
+    {
+    }
+
+    macro_rules! assert_boxed_query_send {
+        ($backend:ty) => {{
+            assert_send(users::table.into_boxed::<$backend>());
+            assert_send(
+                users::table
+                    .filter(users::id.eq(10))
+                    .into_boxed::<$backend>(),
+            );
+        };};
+    }
+
+    #[test]
+    fn boxed_is_send() {
+        #[cfg(feature = "postgres")]
+        assert_boxed_query_send!(crate::pg::Pg);
+
+        #[cfg(feature = "sqlite")]
+        assert_boxed_query_send!(crate::sqlite::Sqlite);
+
+        #[cfg(feature = "mysql")]
+        assert_boxed_query_send!(crate::mysql::Mysql);
+    }
+}
diff --git a/diesel/src/query_builder/select_statement/dsl_impls.rs b/diesel/src/query_builder/select_statement/dsl_impls.rs
index ff2e1b4..61d7a09 100644
--- a/diesel/src/query_builder/select_statement/dsl_impls.rs
+++ b/diesel/src/query_builder/select_statement/dsl_impls.rs
@@ -9,13 +9,16 @@
 use crate::query_builder::group_by_clause::*;
 use crate::query_builder::insert_statement::InsertFromSelect;
 use crate::query_builder::limit_clause::*;
+use crate::query_builder::limit_offset_clause::{BoxedLimitOffsetClause, LimitOffsetClause};
 use crate::query_builder::locking_clause::*;
 use crate::query_builder::offset_clause::*;
 use crate::query_builder::order_clause::*;
 use crate::query_builder::select_clause::*;
 use crate::query_builder::update_statement::*;
 use crate::query_builder::where_clause::*;
-use crate::query_builder::{AsQuery, Query, QueryFragment, SelectQuery, SelectStatement};
+use crate::query_builder::{
+    AsQuery, IntoBoxedClause, Query, QueryFragment, SelectQuery, SelectStatement,
+};
 use crate::query_dsl::boxed_dsl::BoxedDsl;
 use crate::query_dsl::methods::*;
 use crate::query_dsl::*;
@@ -23,12 +26,12 @@
 use crate::query_source::QuerySource;
 use crate::sql_types::{BigInt, Bool};
 
-impl<F, S, D, W, O, L, Of, G, LC, Rhs, Kind, On> InternalJoinDsl<Rhs, Kind, On>
-    for SelectStatement<F, S, D, W, O, L, Of, G, LC>
+impl<F, S, D, W, O, LOf, G, LC, Rhs, Kind, On> InternalJoinDsl<Rhs, Kind, On>
+    for SelectStatement<F, S, D, W, O, LOf, G, LC>
 where
-    SelectStatement<JoinOn<Join<F, Rhs, Kind>, On>, S, D, W, O, L, Of, G, LC>: AsQuery,
+    SelectStatement<JoinOn<Join<F, Rhs, Kind>, On>, S, D, W, O, LOf, G, LC>: AsQuery,
 {
-    type Output = SelectStatement<JoinOn<Join<F, Rhs, Kind>, On>, S, D, W, O, L, Of, G, LC>;
+    type Output = SelectStatement<JoinOn<Join<F, Rhs, Kind>, On>, S, D, W, O, LOf, G, LC>;
 
     fn join(self, rhs: Rhs, kind: Kind, on: On) -> Self::Output {
         SelectStatement::new(
@@ -37,21 +40,21 @@
             self.distinct,
             self.where_clause,
             self.order,
-            self.limit,
-            self.offset,
+            self.limit_offset,
             self.group_by,
             self.locking,
         )
     }
 }
 
-impl<F, S, D, W, O, L, Of, G, LC, Selection> SelectDsl<Selection>
-    for SelectStatement<F, S, D, W, O, L, Of, G, LC>
+impl<F, S, D, W, O, LOf, G, LC, Selection> SelectDsl<Selection>
+    for SelectStatement<F, S, D, W, O, LOf, G, LC>
 where
-    Selection: SelectableExpression<F>,
-    SelectStatement<F, SelectClause<Selection>, D, W, O, L, Of, G, LC>: SelectQuery,
+    G: ValidGroupByClause,
+    Selection: SelectableExpression<F> + ValidGrouping<G::Expressions>,
+    SelectStatement<F, SelectClause<Selection>, D, W, O, LOf, G, LC>: SelectQuery,
 {
-    type Output = SelectStatement<F, SelectClause<Selection>, D, W, O, L, Of, G, LC>;
+    type Output = SelectStatement<F, SelectClause<Selection>, D, W, O, LOf, G, LC>;
 
     fn select(self, selection: Selection) -> Self::Output {
         SelectStatement::new(
@@ -60,20 +63,19 @@
             self.distinct,
             self.where_clause,
             self.order,
-            self.limit,
-            self.offset,
+            self.limit_offset,
             self.group_by,
             self.locking,
         )
     }
 }
 
-impl<ST, F, S, D, W, O, L, Of, G> DistinctDsl for SelectStatement<F, S, D, W, O, L, Of, G>
+impl<ST, F, S, D, W, O, LOf, G> DistinctDsl for SelectStatement<F, S, D, W, O, LOf, G>
 where
     Self: SelectQuery<SqlType = ST>,
-    SelectStatement<F, S, DistinctClause, W, O, L, Of, G>: SelectQuery<SqlType = ST>,
+    SelectStatement<F, S, DistinctClause, W, O, LOf, G>: SelectQuery<SqlType = ST>,
 {
-    type Output = SelectStatement<F, S, DistinctClause, W, O, L, Of, G>;
+    type Output = SelectStatement<F, S, DistinctClause, W, O, LOf, G>;
 
     fn distinct(self) -> Self::Output {
         SelectStatement::new(
@@ -82,21 +84,20 @@
             DistinctClause,
             self.where_clause,
             self.order,
-            self.limit,
-            self.offset,
+            self.limit_offset,
             self.group_by,
             self.locking,
         )
     }
 }
 
-impl<F, S, D, W, O, L, Of, G, LC, Predicate> FilterDsl<Predicate>
-    for SelectStatement<F, S, D, W, O, L, Of, G, LC>
+impl<F, S, D, W, O, LOf, G, LC, Predicate> FilterDsl<Predicate>
+    for SelectStatement<F, S, D, W, O, LOf, G, LC>
 where
     Predicate: Expression<SqlType = Bool> + NonAggregate,
     W: WhereAnd<Predicate>,
 {
-    type Output = SelectStatement<F, S, D, W::Output, O, L, Of, G, LC>;
+    type Output = SelectStatement<F, S, D, W::Output, O, LOf, G, LC>;
 
     fn filter(self, predicate: Predicate) -> Self::Output {
         SelectStatement::new(
@@ -105,21 +106,20 @@
             self.distinct,
             self.where_clause.and(predicate),
             self.order,
-            self.limit,
-            self.offset,
+            self.limit_offset,
             self.group_by,
             self.locking,
         )
     }
 }
 
-impl<F, S, D, W, O, L, Of, G, LC, Predicate> OrFilterDsl<Predicate>
-    for SelectStatement<F, S, D, W, O, L, Of, G, LC>
+impl<F, S, D, W, O, LOf, G, LC, Predicate> OrFilterDsl<Predicate>
+    for SelectStatement<F, S, D, W, O, LOf, G, LC>
 where
     Predicate: Expression<SqlType = Bool> + NonAggregate,
     W: WhereOr<Predicate>,
 {
-    type Output = SelectStatement<F, S, D, W::Output, O, L, Of, G, LC>;
+    type Output = SelectStatement<F, S, D, W::Output, O, LOf, G, LC>;
 
     fn or_filter(self, predicate: Predicate) -> Self::Output {
         SelectStatement::new(
@@ -128,8 +128,7 @@
             self.distinct,
             self.where_clause.or(predicate),
             self.order,
-            self.limit,
-            self.offset,
+            self.limit_offset,
             self.group_by,
             self.locking,
         )
@@ -140,7 +139,7 @@
 use crate::expression_methods::EqAll;
 use crate::query_source::Table;
 
-impl<F, S, D, W, O, L, Of, G, LC, PK> FindDsl<PK> for SelectStatement<F, S, D, W, O, L, Of, G, LC>
+impl<F, S, D, W, O, LOf, G, LC, PK> FindDsl<PK> for SelectStatement<F, S, D, W, O, LOf, G, LC>
 where
     F: Table,
     F::PrimaryKey: EqAll<PK>,
@@ -154,14 +153,14 @@
     }
 }
 
-impl<ST, F, S, D, W, O, L, Of, G, LC, Expr> OrderDsl<Expr>
-    for SelectStatement<F, S, D, W, O, L, Of, G, LC>
+impl<ST, F, S, D, W, O, LOf, G, LC, Expr> OrderDsl<Expr>
+    for SelectStatement<F, S, D, W, O, LOf, G, LC>
 where
     Expr: AppearsOnTable<F>,
     Self: SelectQuery<SqlType = ST>,
-    SelectStatement<F, S, D, W, OrderClause<Expr>, L, Of, G, LC>: SelectQuery<SqlType = ST>,
+    SelectStatement<F, S, D, W, OrderClause<Expr>, LOf, G, LC>: SelectQuery<SqlType = ST>,
 {
-    type Output = SelectStatement<F, S, D, W, OrderClause<Expr>, L, Of, G, LC>;
+    type Output = SelectStatement<F, S, D, W, OrderClause<Expr>, LOf, G, LC>;
 
     fn order(self, expr: Expr) -> Self::Output {
         let order = OrderClause(expr);
@@ -171,20 +170,19 @@
             self.distinct,
             self.where_clause,
             order,
-            self.limit,
-            self.offset,
+            self.limit_offset,
             self.group_by,
             self.locking,
         )
     }
 }
 
-impl<F, S, D, W, O, L, Of, G, LC, Expr> ThenOrderDsl<Expr>
-    for SelectStatement<F, S, D, W, OrderClause<O>, L, Of, G, LC>
+impl<F, S, D, W, O, LOf, G, LC, Expr> ThenOrderDsl<Expr>
+    for SelectStatement<F, S, D, W, OrderClause<O>, LOf, G, LC>
 where
     Expr: AppearsOnTable<F>,
 {
-    type Output = SelectStatement<F, S, D, W, OrderClause<(O, Expr)>, L, Of, G, LC>;
+    type Output = SelectStatement<F, S, D, W, OrderClause<(O, Expr)>, LOf, G, LC>;
 
     fn then_order_by(self, expr: Expr) -> Self::Output {
         SelectStatement::new(
@@ -193,16 +191,15 @@
             self.distinct,
             self.where_clause,
             OrderClause((self.order.0, expr)),
-            self.limit,
-            self.offset,
+            self.limit_offset,
             self.group_by,
             self.locking,
         )
     }
 }
 
-impl<F, S, D, W, L, Of, G, LC, Expr> ThenOrderDsl<Expr>
-    for SelectStatement<F, S, D, W, NoOrderClause, L, Of, G, LC>
+impl<F, S, D, W, LOf, G, LC, Expr> ThenOrderDsl<Expr>
+    for SelectStatement<F, S, D, W, NoOrderClause, LOf, G, LC>
 where
     Expr: Expression,
     Self: OrderDsl<Expr>,
@@ -217,12 +214,14 @@
 #[doc(hidden)]
 pub type Limit = AsExprOf<i64, BigInt>;
 
-impl<ST, F, S, D, W, O, L, Of, G, LC> LimitDsl for SelectStatement<F, S, D, W, O, L, Of, G, LC>
+impl<ST, F, S, D, W, O, L, Of, G, LC> LimitDsl
+    for SelectStatement<F, S, D, W, O, LimitOffsetClause<L, Of>, G, LC>
 where
     Self: SelectQuery<SqlType = ST>,
-    SelectStatement<F, S, D, W, O, LimitClause<Limit>, Of, G, LC>: SelectQuery<SqlType = ST>,
+    SelectStatement<F, S, D, W, O, LimitOffsetClause<LimitClause<Limit>, Of>, G, LC>:
+        SelectQuery<SqlType = ST>,
 {
-    type Output = SelectStatement<F, S, D, W, O, LimitClause<Limit>, Of, G, LC>;
+    type Output = SelectStatement<F, S, D, W, O, LimitOffsetClause<LimitClause<Limit>, Of>, G, LC>;
 
     fn limit(self, limit: i64) -> Self::Output {
         let limit_clause = LimitClause(limit.into_sql::<BigInt>());
@@ -232,8 +231,10 @@
             self.distinct,
             self.where_clause,
             self.order,
-            limit_clause,
-            self.offset,
+            LimitOffsetClause {
+                limit_clause,
+                offset_clause: self.limit_offset.offset_clause,
+            },
             self.group_by,
             self.locking,
         )
@@ -243,12 +244,14 @@
 #[doc(hidden)]
 pub type Offset = Limit;
 
-impl<ST, F, S, D, W, O, L, Of, G, LC> OffsetDsl for SelectStatement<F, S, D, W, O, L, Of, G, LC>
+impl<ST, F, S, D, W, O, L, Of, G, LC> OffsetDsl
+    for SelectStatement<F, S, D, W, O, LimitOffsetClause<L, Of>, G, LC>
 where
     Self: SelectQuery<SqlType = ST>,
-    SelectStatement<F, S, D, W, O, L, OffsetClause<Offset>, G, LC>: SelectQuery<SqlType = ST>,
+    SelectStatement<F, S, D, W, O, LimitOffsetClause<L, OffsetClause<Offset>>, G, LC>:
+        SelectQuery<SqlType = ST>,
 {
-    type Output = SelectStatement<F, S, D, W, O, L, OffsetClause<Offset>, G, LC>;
+    type Output = SelectStatement<F, S, D, W, O, LimitOffsetClause<L, OffsetClause<Offset>>, G, LC>;
 
     fn offset(self, offset: i64) -> Self::Output {
         let offset_clause = OffsetClause(offset.into_sql::<BigInt>());
@@ -258,20 +261,22 @@
             self.distinct,
             self.where_clause,
             self.order,
-            self.limit,
-            offset_clause,
+            LimitOffsetClause {
+                limit_clause: self.limit_offset.limit_clause,
+                offset_clause,
+            },
             self.group_by,
             self.locking,
         )
     }
 }
 
-impl<F, S, D, W, O, L, Of, G, Expr> GroupByDsl<Expr> for SelectStatement<F, S, D, W, O, L, Of, G>
+impl<F, S, D, W, O, LOf, G, Expr> GroupByDsl<Expr> for SelectStatement<F, S, D, W, O, LOf, G>
 where
-    SelectStatement<F, S, D, W, O, L, Of, GroupByClause<Expr>>: Query,
+    SelectStatement<F, S, D, W, O, LOf, GroupByClause<Expr>>: SelectQuery,
     Expr: Expression,
 {
-    type Output = SelectStatement<F, S, D, W, O, L, Of, GroupByClause<Expr>>;
+    type Output = SelectStatement<F, S, D, W, O, LOf, GroupByClause<Expr>>;
 
     fn group_by(self, expr: Expr) -> Self::Output {
         let group_by = GroupByClause(expr);
@@ -281,16 +286,15 @@
             self.distinct,
             self.where_clause,
             self.order,
-            self.limit,
-            self.offset,
+            self.limit_offset,
             group_by,
             self.locking,
         )
     }
 }
 
-impl<F, S, W, O, L, Of, Lock> LockingDsl<Lock>
-    for SelectStatement<F, S, NoDistinctClause, W, O, L, Of>
+impl<F, S, W, O, LOf, Lock> LockingDsl<Lock>
+    for SelectStatement<F, S, NoDistinctClause, W, O, LOf>
 {
     type Output = SelectStatement<
         F,
@@ -298,8 +302,7 @@
         NoDistinctClause,
         W,
         O,
-        L,
-        Of,
+        LOf,
         NoGroupByClause,
         LockingClause<Lock, NoModifier>,
     >;
@@ -311,18 +314,17 @@
             self.distinct,
             self.where_clause,
             self.order,
-            self.limit,
-            self.offset,
+            self.limit_offset,
             self.group_by,
             LockingClause::new(lock, NoModifier),
         )
     }
 }
 
-impl<F, S, D, W, O, L, Of, G, LC, LM, Modifier> ModifyLockDsl<Modifier>
-    for SelectStatement<F, S, D, W, O, L, Of, G, LockingClause<LC, LM>>
+impl<F, S, D, W, O, LOf, G, LC, LM, Modifier> ModifyLockDsl<Modifier>
+    for SelectStatement<F, S, D, W, O, LOf, G, LockingClause<LC, LM>>
 {
-    type Output = SelectStatement<F, S, D, W, O, L, Of, G, LockingClause<LC, Modifier>>;
+    type Output = SelectStatement<F, S, D, W, O, LOf, G, LockingClause<LC, Modifier>>;
 
     fn modify_lock(self, modifier: Modifier) -> Self::Output {
         SelectStatement::new(
@@ -331,26 +333,24 @@
             self.distinct,
             self.where_clause,
             self.order,
-            self.limit,
-            self.offset,
+            self.limit_offset,
             self.group_by,
             LockingClause::new(self.locking.lock_mode, modifier),
         )
     }
 }
 
-impl<'a, F, S, D, W, O, L, Of, G, DB> BoxedDsl<'a, DB>
-    for SelectStatement<F, SelectClause<S>, D, W, O, L, Of, G>
+impl<'a, F, S, D, W, O, LOf, G, DB> BoxedDsl<'a, DB>
+    for SelectStatement<F, SelectClause<S>, D, W, O, LOf, G>
 where
     Self: AsQuery,
     DB: Backend,
-    S: QueryFragment<DB> + SelectableExpression<F> + 'a,
-    D: QueryFragment<DB> + 'a,
+    S: QueryFragment<DB> + SelectableExpression<F> + Send + 'a,
+    D: QueryFragment<DB> + Send + 'a,
     W: Into<BoxedWhereClause<'a, DB>>,
-    O: Into<Option<Box<dyn QueryFragment<DB> + 'a>>>,
-    L: QueryFragment<DB> + 'a,
-    Of: QueryFragment<DB> + 'a,
-    G: QueryFragment<DB> + 'a,
+    O: Into<Option<Box<dyn QueryFragment<DB> + Send + 'a>>>,
+    LOf: IntoBoxedClause<'a, DB, BoxedClause = BoxedLimitOffsetClause<'a, DB>>,
+    G: QueryFragment<DB> + Send + 'a,
 {
     type Output = BoxedSelectStatement<'a, S::SqlType, F, DB>;
 
@@ -361,26 +361,24 @@
             Box::new(self.distinct),
             self.where_clause.into(),
             self.order.into(),
-            Box::new(self.limit),
-            Box::new(self.offset),
+            self.limit_offset.into_boxed(),
             Box::new(self.group_by),
         )
     }
 }
 
-impl<'a, F, D, W, O, L, Of, G, DB> BoxedDsl<'a, DB>
-    for SelectStatement<F, DefaultSelectClause, D, W, O, L, Of, G>
+impl<'a, F, D, W, O, LOf, G, DB> BoxedDsl<'a, DB>
+    for SelectStatement<F, DefaultSelectClause, D, W, O, LOf, G>
 where
     Self: AsQuery,
     DB: Backend,
     F: QuerySource,
-    F::DefaultSelection: QueryFragment<DB> + 'a,
-    D: QueryFragment<DB> + 'a,
+    F::DefaultSelection: QueryFragment<DB> + Send + 'a,
+    D: QueryFragment<DB> + Send + 'a,
     W: Into<BoxedWhereClause<'a, DB>>,
-    O: Into<Option<Box<dyn QueryFragment<DB> + 'a>>>,
-    L: QueryFragment<DB> + 'a,
-    Of: QueryFragment<DB> + 'a,
-    G: QueryFragment<DB> + 'a,
+    O: Into<Option<Box<dyn QueryFragment<DB> + Send + 'a>>>,
+    LOf: IntoBoxedClause<'a, DB, BoxedClause = BoxedLimitOffsetClause<'a, DB>>,
+    G: QueryFragment<DB> + Send + 'a,
 {
     type Output = BoxedSelectStatement<'a, <F::DefaultSelection as Expression>::SqlType, F, DB>;
     fn internal_into_boxed(self) -> Self::Output {
@@ -390,14 +388,13 @@
             Box::new(self.distinct),
             self.where_clause.into(),
             self.order.into(),
-            Box::new(self.limit),
-            Box::new(self.offset),
+            self.limit_offset.into_boxed(),
             Box::new(self.group_by),
         )
     }
 }
 
-impl<F, S, D, W, O, L, Of, G, LC> HasTable for SelectStatement<F, S, D, W, O, L, Of, G, LC>
+impl<F, S, D, W, O, LOf, G, LC> HasTable for SelectStatement<F, S, D, W, O, LOf, G, LC>
 where
     F: HasTable,
 {
@@ -426,7 +423,7 @@
 // FIXME: Should we disable joining when `.group_by` has been called? Are there
 // any other query methods where a join no longer has the same semantics as
 // joining on just the table?
-impl<F, S, D, W, O, L, Of, G, LC, Rhs> JoinTo<Rhs> for SelectStatement<F, S, D, W, O, L, Of, G, LC>
+impl<F, S, D, W, O, LOf, G, LC, Rhs> JoinTo<Rhs> for SelectStatement<F, S, D, W, O, LOf, G, LC>
 where
     F: JoinTo<Rhs>,
 {
@@ -438,15 +435,14 @@
     }
 }
 
-impl<F, S, D, W, O, L, Of, G, LC> QueryDsl for SelectStatement<F, S, D, W, O, L, Of, G, LC> {}
+impl<F, S, D, W, O, LOf, G, LC> QueryDsl for SelectStatement<F, S, D, W, O, LOf, G, LC> {}
 
-impl<F, S, D, W, O, L, Of, G, LC, Conn> RunQueryDsl<Conn>
-    for SelectStatement<F, S, D, W, O, L, Of, G, LC>
+impl<F, S, D, W, O, LOf, G, LC, Conn> RunQueryDsl<Conn>
+    for SelectStatement<F, S, D, W, O, LOf, G, LC>
 {
 }
 
-impl<F, S, D, W, O, L, Of, G, LC, Tab> Insertable<Tab>
-    for SelectStatement<F, S, D, W, O, L, Of, G, LC>
+impl<F, S, D, W, O, LOf, G, LC, Tab> Insertable<Tab> for SelectStatement<F, S, D, W, O, LOf, G, LC>
 where
     Tab: Table,
     Self: Query,
@@ -458,8 +454,8 @@
     }
 }
 
-impl<'a, F, S, D, W, O, L, Of, G, LC, Tab> Insertable<Tab>
-    for &'a SelectStatement<F, S, D, W, O, L, Of, G, LC>
+impl<'a, F, S, D, W, O, LOf, G, LC, Tab> Insertable<Tab>
+    for &'a SelectStatement<F, S, D, W, O, LOf, G, LC>
 where
     Tab: Table,
     Self: Query,
@@ -471,10 +467,10 @@
     }
 }
 
-impl<'a, F, S, D, W, O, L, Of, G> SelectNullableDsl
-    for SelectStatement<F, SelectClause<S>, D, W, O, L, Of, G>
+impl<'a, F, S, D, W, O, LOf, G> SelectNullableDsl
+    for SelectStatement<F, SelectClause<S>, D, W, O, LOf, G>
 {
-    type Output = SelectStatement<F, SelectClause<Nullable<S>>, D, W, O, L, Of, G>;
+    type Output = SelectStatement<F, SelectClause<Nullable<S>>, D, W, O, LOf, G>;
 
     fn nullable(self) -> Self::Output {
         SelectStatement::new(
@@ -483,21 +479,19 @@
             self.distinct,
             self.where_clause,
             self.order,
-            self.limit,
-            self.offset,
+            self.limit_offset,
             self.group_by,
             self.locking,
         )
     }
 }
 
-impl<'a, F, D, W, O, L, Of, G> SelectNullableDsl
-    for SelectStatement<F, DefaultSelectClause, D, W, O, L, Of, G>
+impl<'a, F, D, W, O, LOf, G> SelectNullableDsl
+    for SelectStatement<F, DefaultSelectClause, D, W, O, LOf, G>
 where
     F: QuerySource,
 {
-    type Output =
-        SelectStatement<F, SelectClause<Nullable<F::DefaultSelection>>, D, W, O, L, Of, G>;
+    type Output = SelectStatement<F, SelectClause<Nullable<F::DefaultSelection>>, D, W, O, LOf, G>;
 
     fn nullable(self) -> Self::Output {
         SelectStatement::new(
@@ -506,8 +500,7 @@
             self.distinct,
             self.where_clause,
             self.order,
-            self.limit,
-            self.offset,
+            self.limit_offset,
             self.group_by,
             self.locking,
         )
diff --git a/diesel/src/query_builder/select_statement/mod.rs b/diesel/src/query_builder/select_statement/mod.rs
index c4944a1..0998377 100644
--- a/diesel/src/query_builder/select_statement/mod.rs
+++ b/diesel/src/query_builder/select_statement/mod.rs
@@ -17,7 +17,7 @@
 pub use self::boxed::BoxedSelectStatement;
 
 use super::distinct_clause::NoDistinctClause;
-use super::group_by_clause::NoGroupByClause;
+use super::group_by_clause::*;
 use super::limit_clause::NoLimitClause;
 use super::locking_clause::NoLockingClause;
 use super::offset_clause::NoOffsetClause;
@@ -28,7 +28,8 @@
 use crate::backend::Backend;
 use crate::expression::subselect::ValidSubselect;
 use crate::expression::*;
-use crate::query_builder::SelectQuery;
+use crate::query_builder::limit_offset_clause::LimitOffsetClause;
+use crate::query_builder::{QueryId, SelectQuery};
 use crate::query_source::joins::{AppendSelection, Inner, Join};
 use crate::query_source::*;
 use crate::result::QueryResult;
@@ -42,8 +43,7 @@
     Distinct = NoDistinctClause,
     Where = NoWhereClause,
     Order = NoOrderClause,
-    Limit = NoLimitClause,
-    Offset = NoOffsetClause,
+    LimitOffset = LimitOffsetClause<NoLimitClause, NoOffsetClause>,
     GroupBy = NoGroupByClause,
     Locking = NoLockingClause,
 > {
@@ -52,13 +52,12 @@
     pub(crate) distinct: Distinct,
     pub(crate) where_clause: Where,
     pub(crate) order: Order,
-    pub(crate) limit: Limit,
-    pub(crate) offset: Offset,
+    pub(crate) limit_offset: LimitOffset,
     pub(crate) group_by: GroupBy,
     pub(crate) locking: Locking,
 }
 
-impl<F, S, D, W, O, L, Of, G, LC> SelectStatement<F, S, D, W, O, L, Of, G, LC> {
+impl<F, S, D, W, O, LOf, G, LC> SelectStatement<F, S, D, W, O, LOf, G, LC> {
     #[allow(clippy::too_many_arguments)]
     pub fn new(
         select: S,
@@ -66,21 +65,19 @@
         distinct: D,
         where_clause: W,
         order: O,
-        limit: L,
-        offset: Of,
+        limit_offset: LOf,
         group_by: G,
         locking: LC,
     ) -> Self {
         SelectStatement {
-            select: select,
-            from: from,
-            distinct: distinct,
-            where_clause: where_clause,
-            order: order,
-            limit: limit,
-            offset: offset,
-            group_by: group_by,
-            locking: locking,
+            select,
+            from,
+            distinct,
+            where_clause,
+            order,
+            limit_offset,
+            group_by,
+            locking,
         }
     }
 }
@@ -93,31 +90,34 @@
             NoDistinctClause,
             NoWhereClause,
             NoOrderClause,
-            NoLimitClause,
-            NoOffsetClause,
+            LimitOffsetClause {
+                limit_clause: NoLimitClause,
+                offset_clause: NoOffsetClause,
+            },
             NoGroupByClause,
             NoLockingClause,
         )
     }
 }
 
-impl<F, S, D, W, O, L, Of, G, LC> Query for SelectStatement<F, S, D, W, O, L, Of, G, LC>
+impl<F, S, D, W, O, LOf, G, LC> Query for SelectStatement<F, S, D, W, O, LOf, G, LC>
 where
+    G: ValidGroupByClause,
     S: SelectClauseExpression<F>,
+    S::Selection: ValidGrouping<G::Expressions>,
     W: ValidWhereClause<F>,
 {
     type SqlType = S::SelectClauseSqlType;
 }
 
-impl<F, S, D, W, O, L, Of, G, LC> SelectQuery for SelectStatement<F, S, D, W, O, L, Of, G, LC>
+impl<F, S, D, W, O, LOf, G, LC> SelectQuery for SelectStatement<F, S, D, W, O, LOf, G, LC>
 where
     S: SelectClauseExpression<F>,
 {
     type SqlType = S::SelectClauseSqlType;
 }
 
-impl<F, S, D, W, O, L, Of, G, LC, DB> QueryFragment<DB>
-    for SelectStatement<F, S, D, W, O, L, Of, G, LC>
+impl<F, S, D, W, O, LOf, G, LC, DB> QueryFragment<DB> for SelectStatement<F, S, D, W, O, LOf, G, LC>
 where
     DB: Backend,
     S: SelectClauseQueryFragment<F, DB>,
@@ -126,8 +126,7 @@
     D: QueryFragment<DB>,
     W: QueryFragment<DB>,
     O: QueryFragment<DB>,
-    L: QueryFragment<DB>,
-    Of: QueryFragment<DB>,
+    LOf: QueryFragment<DB>,
     G: QueryFragment<DB>,
     LC: QueryFragment<DB>,
 {
@@ -140,23 +139,20 @@
         self.where_clause.walk_ast(out.reborrow())?;
         self.group_by.walk_ast(out.reborrow())?;
         self.order.walk_ast(out.reborrow())?;
-        self.limit.walk_ast(out.reborrow())?;
-        self.offset.walk_ast(out.reborrow())?;
+        self.limit_offset.walk_ast(out.reborrow())?;
         self.locking.walk_ast(out.reborrow())?;
         Ok(())
     }
 }
 
-impl<S, D, W, O, L, Of, G, LC, DB> QueryFragment<DB>
-    for SelectStatement<(), S, D, W, O, L, Of, G, LC>
+impl<S, D, W, O, LOf, G, LC, DB> QueryFragment<DB> for SelectStatement<(), S, D, W, O, LOf, G, LC>
 where
     DB: Backend,
     S: SelectClauseQueryFragment<(), DB>,
     D: QueryFragment<DB>,
     W: QueryFragment<DB>,
     O: QueryFragment<DB>,
-    L: QueryFragment<DB>,
-    Of: QueryFragment<DB>,
+    LOf: QueryFragment<DB>,
     G: QueryFragment<DB>,
     LC: QueryFragment<DB>,
 {
@@ -167,15 +163,14 @@
         self.where_clause.walk_ast(out.reborrow())?;
         self.group_by.walk_ast(out.reborrow())?;
         self.order.walk_ast(out.reborrow())?;
-        self.limit.walk_ast(out.reborrow())?;
-        self.offset.walk_ast(out.reborrow())?;
+        self.limit_offset.walk_ast(out.reborrow())?;
         self.locking.walk_ast(out.reborrow())?;
         Ok(())
     }
 }
 
-impl<S, F, D, W, O, L, Of, G, LC, QS> ValidSubselect<QS>
-    for SelectStatement<F, S, D, W, O, L, Of, LC, G>
+impl<S, F, D, W, O, LOf, G, LC, QS> ValidSubselect<QS>
+    for SelectStatement<F, S, D, W, O, LOf, LC, G>
 where
     Self: SelectQuery,
     W: ValidWhereClause<Join<F, QS, Inner>>,
diff --git a/diesel/src/query_builder/sql_query.rs b/diesel/src/query_builder/sql_query.rs
index 316c9f4..f177d4a 100644
--- a/diesel/src/query_builder/sql_query.rs
+++ b/diesel/src/query_builder/sql_query.rs
@@ -28,7 +28,11 @@
         SqlQuery { inner, query }
     }
 
-    /// Bind a value for use with this SQL query.
+    /// Bind a value for use with this SQL query. The given query should have
+    /// placeholders that vary based on the database type,
+    /// like [SQLite Parameter](https://sqlite.org/lang_expr.html#varparam) syntax,
+    /// [PostgreSQL PREPARE syntax](https://www.postgresql.org/docs/current/sql-prepare.html),
+    /// or [MySQL bind syntax](https://dev.mysql.com/doc/refman/8.0/en/mysql-stmt-bind-param.html).
     ///
     /// # Safety
     ///
@@ -39,7 +43,6 @@
     /// # Example
     ///
     /// ```
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # use schema::users;
diff --git a/diesel/src/query_builder/update_statement/changeset.rs b/diesel/src/query_builder/update_statement/changeset.rs
index 2db594d..afd587c 100644
--- a/diesel/src/query_builder/update_statement/changeset.rs
+++ b/diesel/src/query_builder/update_statement/changeset.rs
@@ -8,18 +8,7 @@
 /// Types which can be passed to
 /// [`update.set`](struct.UpdateStatement.html#method.set).
 ///
-/// ### Deriving
-///
-/// This trait can be automatically derived using by adding `#[derive(AsChangeset)]`
-/// to your struct.  Structs which derive this trait must be annotated with
-/// `#[table_name = "something"]`. If the field name of your struct differs
-/// from the name of the column, you can annotate the field with
-/// `#[column_name = "some_column_name"]`.
-///
-/// By default, any `Option` fields on the struct are skipped if their value is
-/// `None`. If you would like to assign `NULL` to the field instead, you can
-/// annotate your struct with `#[changeset_options(treat_none_as_null =
-/// "true")]`.
+/// This trait can be [derived](derive.AsChangeset.html)
 pub trait AsChangeset {
     /// The table which `Self::Changeset` will be updating
     type Target: QuerySource;
@@ -31,6 +20,9 @@
     fn as_changeset(self) -> Self::Changeset;
 }
 
+#[doc(inline)]
+pub use diesel_derives::AsChangeset;
+
 impl<T: AsChangeset> AsChangeset for Option<T> {
     type Target = T::Target;
     type Changeset = Option<T::Changeset>;
diff --git a/diesel/src/query_builder/update_statement/mod.rs b/diesel/src/query_builder/update_statement/mod.rs
index 8e00427..d50ceca 100644
--- a/diesel/src/query_builder/update_statement/mod.rs
+++ b/diesel/src/query_builder/update_statement/mod.rs
@@ -75,7 +75,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -114,7 +113,6 @@
     /// ### Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -250,7 +248,6 @@
     /// ### Updating a single record:
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../../doctest_setup.rs");
     /// #
     /// # #[cfg(feature = "postgres")]
diff --git a/diesel/src/query_builder/upsert/into_conflict_clause.rs b/diesel/src/query_builder/upsert/into_conflict_clause.rs
new file mode 100644
index 0000000..892886b
--- /dev/null
+++ b/diesel/src/query_builder/upsert/into_conflict_clause.rs
@@ -0,0 +1,124 @@
+use crate::insertable::{BatchInsert, OwnedBatchInsert};
+use crate::query_builder::insert_statement::InsertFromSelect;
+#[cfg(feature = "sqlite")]
+use crate::query_builder::where_clause::{BoxedWhereClause, WhereClause};
+#[cfg(any(feature = "sqlite", feature = "postgres"))]
+use crate::query_builder::{AstPass, QueryFragment};
+use crate::query_builder::{BoxedSelectStatement, Query, SelectStatement, ValuesClause};
+#[cfg(any(feature = "sqlite", feature = "postgres"))]
+use crate::result::QueryResult;
+
+pub trait IntoConflictValueClause {
+    type ValueClause;
+
+    fn into_value_clause(self) -> Self::ValueClause;
+}
+
+#[derive(Debug, Clone, Copy)]
+pub struct OnConflictSelectWrapper<S>(S);
+
+impl<Q> Query for OnConflictSelectWrapper<Q>
+where
+    Q: Query,
+{
+    type SqlType = Q::SqlType;
+}
+
+#[cfg(feature = "postgres")]
+impl<S> QueryFragment<crate::pg::Pg> for OnConflictSelectWrapper<S>
+where
+    S: QueryFragment<crate::pg::Pg>,
+{
+    fn walk_ast(&self, out: AstPass<crate::pg::Pg>) -> QueryResult<()> {
+        self.0.walk_ast(out)
+    }
+}
+
+// The corresponding impl for`NoWhereClause` is missing because of
+// https://www.sqlite.org/lang_UPSERT.html (Parsing Ambiguity)
+#[cfg(feature = "sqlite")]
+impl<F, S, D, W, O, LOf, G, LC> QueryFragment<crate::sqlite::Sqlite>
+    for OnConflictSelectWrapper<SelectStatement<F, S, D, WhereClause<W>, O, LOf, G, LC>>
+where
+    SelectStatement<F, S, D, WhereClause<W>, O, LOf, G, LC>: QueryFragment<crate::sqlite::Sqlite>,
+{
+    fn walk_ast(&self, out: AstPass<crate::sqlite::Sqlite>) -> QueryResult<()> {
+        self.0.walk_ast(out)
+    }
+}
+
+#[cfg(feature = "sqlite")]
+impl<'a, ST, QS> QueryFragment<crate::sqlite::Sqlite>
+    for OnConflictSelectWrapper<BoxedSelectStatement<'a, ST, QS, crate::sqlite::Sqlite>>
+where
+    BoxedSelectStatement<'a, ST, QS, crate::sqlite::Sqlite>: QueryFragment<crate::sqlite::Sqlite>,
+    QS: crate::query_source::QuerySource,
+    QS::FromClause: QueryFragment<crate::sqlite::Sqlite>,
+{
+    fn walk_ast(&self, pass: AstPass<crate::sqlite::Sqlite>) -> QueryResult<()> {
+        // https://www.sqlite.org/lang_UPSERT.html (Parsing Ambiguity)
+        self.0.build_query(pass, |where_clause, mut pass| {
+            match where_clause {
+                BoxedWhereClause::None => pass.push_sql(" WHERE 1=1 "),
+                w => w.walk_ast(pass.reborrow())?,
+            }
+            Ok(())
+        })
+    }
+}
+
+impl<Inner, Tab> IntoConflictValueClause for ValuesClause<Inner, Tab> {
+    type ValueClause = Self;
+
+    fn into_value_clause(self) -> Self::ValueClause {
+        self
+    }
+}
+
+impl<'a, Inner, Tab> IntoConflictValueClause for BatchInsert<'a, Inner, Tab> {
+    type ValueClause = Self;
+
+    fn into_value_clause(self) -> Self::ValueClause {
+        self
+    }
+}
+
+impl<Inner, Tab> IntoConflictValueClause for OwnedBatchInsert<Inner, Tab> {
+    type ValueClause = Self;
+
+    fn into_value_clause(self) -> Self::ValueClause {
+        self
+    }
+}
+
+impl<F, S, D, W, O, LOf, G, LC, Columns> IntoConflictValueClause
+    for InsertFromSelect<SelectStatement<F, S, D, W, O, LOf, G, LC>, Columns>
+{
+    type ValueClause = InsertFromSelect<
+        OnConflictSelectWrapper<SelectStatement<F, S, D, W, O, LOf, G, LC>>,
+        Columns,
+    >;
+
+    fn into_value_clause(self) -> Self::ValueClause {
+        let InsertFromSelect { columns, query } = self;
+        InsertFromSelect {
+            query: OnConflictSelectWrapper(query),
+            columns,
+        }
+    }
+}
+
+impl<'a, ST, QS, DB, Columns> IntoConflictValueClause
+    for InsertFromSelect<BoxedSelectStatement<'a, ST, QS, DB>, Columns>
+{
+    type ValueClause =
+        InsertFromSelect<OnConflictSelectWrapper<BoxedSelectStatement<'a, ST, QS, DB>>, Columns>;
+
+    fn into_value_clause(self) -> Self::ValueClause {
+        let InsertFromSelect { columns, query } = self;
+        InsertFromSelect {
+            query: OnConflictSelectWrapper(query),
+            columns,
+        }
+    }
+}
diff --git a/diesel/src/query_builder/upsert/mod.rs b/diesel/src/query_builder/upsert/mod.rs
new file mode 100644
index 0000000..df68774
--- /dev/null
+++ b/diesel/src/query_builder/upsert/mod.rs
@@ -0,0 +1,4 @@
+pub(crate) mod into_conflict_clause;
+pub(crate) mod on_conflict_actions;
+pub(crate) mod on_conflict_clause;
+pub(crate) mod on_conflict_target;
diff --git a/diesel/src/pg/upsert/on_conflict_actions.rs b/diesel/src/query_builder/upsert/on_conflict_actions.rs
similarity index 64%
rename from diesel/src/pg/upsert/on_conflict_actions.rs
rename to diesel/src/query_builder/upsert/on_conflict_actions.rs
index 41b2672..3a3d71f 100644
--- a/diesel/src/pg/upsert/on_conflict_actions.rs
+++ b/diesel/src/query_builder/upsert/on_conflict_actions.rs
@@ -1,20 +1,18 @@
+use crate::backend::{Backend, SupportsOnConflictClause};
 use crate::expression::{AppearsOnTable, Expression};
-use crate::pg::Pg;
 use crate::query_builder::*;
 use crate::query_source::*;
 use crate::result::QueryResult;
 
-/// Represents `excluded.column` in an `ON CONFLICT DO UPDATE` clause.
-pub fn excluded<T>(excluded: T) -> Excluded<T> {
-    Excluded(excluded)
-}
-
 #[doc(hidden)]
 #[derive(Debug, Clone, Copy)]
 pub struct DoNothing;
 
-impl QueryFragment<Pg> for DoNothing {
-    fn walk_ast(&self, mut out: AstPass<Pg>) -> QueryResult<()> {
+impl<DB> QueryFragment<DB> for DoNothing
+where
+    DB: Backend + SupportsOnConflictClause,
+{
+    fn walk_ast(&self, mut out: AstPass<DB>) -> QueryResult<()> {
         out.push_sql(" DO NOTHING");
         Ok(())
     }
@@ -32,11 +30,12 @@
     }
 }
 
-impl<T> QueryFragment<Pg> for DoUpdate<T>
+impl<DB, T> QueryFragment<DB> for DoUpdate<T>
 where
-    T: QueryFragment<Pg>,
+    DB: Backend + SupportsOnConflictClause,
+    T: QueryFragment<DB>,
 {
-    fn walk_ast(&self, mut out: AstPass<Pg>) -> QueryResult<()> {
+    fn walk_ast(&self, mut out: AstPass<DB>) -> QueryResult<()> {
         out.unsafe_to_cache_prepared();
         if self.changeset.is_noop()? {
             out.push_sql(" DO NOTHING");
@@ -52,11 +51,18 @@
 #[derive(Debug, Clone, Copy)]
 pub struct Excluded<T>(T);
 
-impl<T> QueryFragment<Pg> for Excluded<T>
+impl<T> Excluded<T> {
+    pub(crate) fn new(t: T) -> Self {
+        Excluded(t)
+    }
+}
+
+impl<DB, T> QueryFragment<DB> for Excluded<T>
 where
+    DB: Backend + SupportsOnConflictClause,
     T: Column,
 {
-    fn walk_ast(&self, mut out: AstPass<Pg>) -> QueryResult<()> {
+    fn walk_ast(&self, mut out: AstPass<DB>) -> QueryResult<()> {
         out.push_sql("excluded.");
         out.push_identifier(T::NAME)?;
         Ok(())
diff --git a/diesel/src/pg/upsert/on_conflict_clause.rs b/diesel/src/query_builder/upsert/on_conflict_clause.rs
similarity index 66%
rename from diesel/src/pg/upsert/on_conflict_clause.rs
rename to diesel/src/query_builder/upsert/on_conflict_clause.rs
index d1063ab..95246cd 100644
--- a/diesel/src/pg/upsert/on_conflict_clause.rs
+++ b/diesel/src/query_builder/upsert/on_conflict_clause.rs
@@ -1,7 +1,7 @@
 use super::on_conflict_actions::*;
 use super::on_conflict_target::*;
+use crate::backend::{Backend, SupportsOnConflictClause};
 use crate::insertable::*;
-use crate::pg::Pg;
 use crate::query_builder::*;
 use crate::result::QueryResult;
 
@@ -29,22 +29,25 @@
     }
 }
 
-impl<Values, Target, Action> CanInsertInSingleQuery<Pg> for OnConflictValues<Values, Target, Action>
+impl<DB, Values, Target, Action> CanInsertInSingleQuery<DB>
+    for OnConflictValues<Values, Target, Action>
 where
-    Values: CanInsertInSingleQuery<Pg>,
+    DB: Backend + SupportsOnConflictClause,
+    Values: CanInsertInSingleQuery<DB>,
 {
     fn rows_to_insert(&self) -> Option<usize> {
         self.values.rows_to_insert()
     }
 }
 
-impl<Values, Target, Action> QueryFragment<Pg> for OnConflictValues<Values, Target, Action>
+impl<DB, Values, Target, Action> QueryFragment<DB> for OnConflictValues<Values, Target, Action>
 where
-    Values: QueryFragment<Pg>,
-    Target: QueryFragment<Pg>,
-    Action: QueryFragment<Pg>,
+    DB: Backend + SupportsOnConflictClause,
+    Values: QueryFragment<DB>,
+    Target: QueryFragment<DB>,
+    Action: QueryFragment<DB>,
 {
-    fn walk_ast(&self, mut out: AstPass<Pg>) -> QueryResult<()> {
+    fn walk_ast(&self, mut out: AstPass<DB>) -> QueryResult<()> {
         self.values.walk_ast(out.reborrow())?;
         out.push_sql(" ON CONFLICT");
         self.target.walk_ast(out.reborrow())?;
diff --git a/diesel/src/query_builder/upsert/on_conflict_target.rs b/diesel/src/query_builder/upsert/on_conflict_target.rs
new file mode 100644
index 0000000..45028d4
--- /dev/null
+++ b/diesel/src/query_builder/upsert/on_conflict_target.rs
@@ -0,0 +1,106 @@
+use crate::backend::{Backend, SupportsOnConflictClause};
+use crate::expression::SqlLiteral;
+use crate::query_builder::*;
+use crate::query_source::Column;
+use crate::result::QueryResult;
+
+#[doc(hidden)]
+pub trait OnConflictTarget<Table> {}
+
+#[doc(hidden)]
+#[derive(Debug, Clone, Copy)]
+pub struct NoConflictTarget;
+
+impl<DB> QueryFragment<DB> for NoConflictTarget
+where
+    DB: Backend + SupportsOnConflictClause,
+{
+    fn walk_ast(&self, _: AstPass<DB>) -> QueryResult<()> {
+        Ok(())
+    }
+}
+
+impl<Table> OnConflictTarget<Table> for NoConflictTarget {}
+
+#[doc(hidden)]
+#[derive(Debug, Clone, Copy)]
+pub struct ConflictTarget<T>(pub T);
+
+impl<DB, T> QueryFragment<DB> for ConflictTarget<T>
+where
+    DB: Backend + SupportsOnConflictClause,
+    T: Column,
+{
+    fn walk_ast(&self, mut out: AstPass<DB>) -> QueryResult<()> {
+        out.push_sql(" (");
+        out.push_identifier(T::NAME)?;
+        out.push_sql(")");
+        Ok(())
+    }
+}
+
+impl<T> OnConflictTarget<T::Table> for ConflictTarget<T> where T: Column {}
+
+impl<DB, ST> QueryFragment<DB> for ConflictTarget<SqlLiteral<ST>>
+where
+    DB: Backend + SupportsOnConflictClause,
+    SqlLiteral<ST>: QueryFragment<DB>,
+{
+    fn walk_ast(&self, mut out: AstPass<DB>) -> QueryResult<()> {
+        out.push_sql(" ");
+        self.0.walk_ast(out.reborrow())?;
+        Ok(())
+    }
+}
+
+impl<Tab, ST> OnConflictTarget<Tab> for ConflictTarget<SqlLiteral<ST>> {}
+
+impl<DB, T> QueryFragment<DB> for ConflictTarget<(T,)>
+where
+    DB: Backend + SupportsOnConflictClause,
+    T: Column,
+{
+    fn walk_ast(&self, mut out: AstPass<DB>) -> QueryResult<()> {
+        out.push_sql(" (");
+        out.push_identifier(T::NAME)?;
+        out.push_sql(")");
+        Ok(())
+    }
+}
+
+impl<T> OnConflictTarget<T::Table> for ConflictTarget<(T,)> where T: Column {}
+
+macro_rules! on_conflict_tuples {
+    ($(
+        $Tuple:tt {
+            $(($idx:tt) -> $T:ident, $ST:ident, $TT:ident,)*
+        }
+    )+) => {
+        $(
+            impl<_DB, _T, $($T),*> QueryFragment<_DB> for ConflictTarget<(_T, $($T),*)> where
+                _DB: Backend + SupportsOnConflictClause,
+                _T: Column,
+                $($T: Column<Table=_T::Table>,)*
+            {
+                fn walk_ast(&self, mut out: AstPass<_DB>) -> QueryResult<()> {
+                    out.push_sql(" (");
+                    out.push_identifier(_T::NAME)?;
+                    $(
+                        out.push_sql(", ");
+                        out.push_identifier($T::NAME)?;
+                    )*
+                    out.push_sql(")");
+                    Ok(())
+                }
+            }
+
+            impl<_T, $($T),*> OnConflictTarget<_T::Table> for ConflictTarget<(_T, $($T),*)> where
+                _T: Column,
+                $($T: Column<Table=_T::Table>,)*
+            {
+            }
+        )*
+    }
+}
+
+__diesel_for_each_tuple!(on_conflict_tuples);
diff --git a/diesel/src/query_builder/where_clause.rs b/diesel/src/query_builder/where_clause.rs
index e8ca3e6..8c47086 100644
--- a/diesel/src/query_builder/where_clause.rs
+++ b/diesel/src/query_builder/where_clause.rs
@@ -59,8 +59,8 @@
     }
 }
 
-impl<DB> Into<BoxedWhereClause<'static, DB>> for NoWhereClause {
-    fn into(self) -> BoxedWhereClause<'static, DB> {
+impl<'a, DB> Into<BoxedWhereClause<'a, DB>> for NoWhereClause {
+    fn into(self) -> BoxedWhereClause<'a, DB> {
         BoxedWhereClause::None
     }
 }
@@ -108,7 +108,7 @@
 impl<'a, DB, Predicate> Into<BoxedWhereClause<'a, DB>> for WhereClause<Predicate>
 where
     DB: Backend,
-    Predicate: QueryFragment<DB> + 'a,
+    Predicate: QueryFragment<DB> + Send + 'a,
 {
     fn into(self) -> BoxedWhereClause<'a, DB> {
         BoxedWhereClause::Where(Box::new(self.0))
@@ -125,7 +125,7 @@
 
 #[allow(missing_debug_implementations)] // We can't...
 pub enum BoxedWhereClause<'a, DB> {
-    Where(Box<dyn QueryFragment<DB> + 'a>),
+    Where(Box<dyn QueryFragment<DB> + Send + 'a>),
     None,
 }
 
@@ -153,7 +153,7 @@
 impl<'a, DB, Predicate> WhereAnd<Predicate> for BoxedWhereClause<'a, DB>
 where
     DB: Backend + 'a,
-    Predicate: QueryFragment<DB> + 'a,
+    Predicate: QueryFragment<DB> + Send + 'a,
 {
     type Output = Self;
 
@@ -170,7 +170,7 @@
 impl<'a, DB, Predicate> WhereOr<Predicate> for BoxedWhereClause<'a, DB>
 where
     DB: Backend + 'a,
-    Predicate: QueryFragment<DB> + 'a,
+    Predicate: QueryFragment<DB> + Send + 'a,
 {
     type Output = Self;
 
diff --git a/diesel/src/query_dsl/belonging_to_dsl.rs b/diesel/src/query_dsl/belonging_to_dsl.rs
index f4c620a..1c00c64 100644
--- a/diesel/src/query_dsl/belonging_to_dsl.rs
+++ b/diesel/src/query_dsl/belonging_to_dsl.rs
@@ -3,7 +3,6 @@
 /// # Example
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("../doctest_setup.rs");
 /// # use schema::{posts, users};
 /// #
diff --git a/diesel/src/query_dsl/group_by_dsl.rs b/diesel/src/query_dsl/group_by_dsl.rs
index 1204288..a79c145 100644
--- a/diesel/src/query_dsl/group_by_dsl.rs
+++ b/diesel/src/query_dsl/group_by_dsl.rs
@@ -1,5 +1,5 @@
 use crate::expression::Expression;
-use crate::query_builder::{AsQuery, Query};
+use crate::query_builder::AsQuery;
 use crate::query_source::Table;
 
 /// This trait is not yet part of Diesel's public API. It may change in the
@@ -15,7 +15,7 @@
 /// query is an error), you may need to use `sql` for your select clause.
 pub trait GroupByDsl<Expr: Expression> {
     /// The type returned by `.group_by`
-    type Output: Query;
+    type Output;
 
     /// See the trait documentation.
     fn group_by(self, expr: Expr) -> Self::Output;
diff --git a/diesel/src/query_dsl/join_dsl.rs b/diesel/src/query_dsl/join_dsl.rs
index 025878b..95b32b8 100644
--- a/diesel/src/query_dsl/join_dsl.rs
+++ b/diesel/src/query_dsl/join_dsl.rs
@@ -52,7 +52,6 @@
 /// # Example
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("../doctest_setup.rs");
 /// # use schema::{users, posts};
 /// #
diff --git a/diesel/src/query_dsl/mod.rs b/diesel/src/query_dsl/mod.rs
index 17b74ae..78cc0e8 100644
--- a/diesel/src/query_dsl/mod.rs
+++ b/diesel/src/query_dsl/mod.rs
@@ -83,7 +83,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -117,7 +116,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// # use schema::animals;
     /// #
@@ -189,7 +187,6 @@
     /// # Examples
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// # use schema::users;
     /// #
@@ -213,7 +210,6 @@
     /// ### When used with a left join
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// # use schema::{users, posts};
     /// #
@@ -296,7 +292,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -365,7 +360,6 @@
     /// ### With implicit `ON` clause
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// # use schema::{users, posts};
     /// # /*
@@ -393,7 +387,6 @@
     /// ### With explicit `ON` clause
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// # use schema::{users, posts};
     /// #
@@ -461,7 +454,6 @@
     /// # Example:
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -492,7 +484,6 @@
     /// # Example:
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -536,7 +527,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -573,7 +563,6 @@
     /// # Examples
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -641,7 +630,6 @@
     /// # Examples
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -689,7 +677,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// # use schema::users;
     /// #
@@ -739,7 +726,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// # use schema::users;
     /// #
@@ -922,7 +908,6 @@
     /// ### Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// # use schema::users;
     /// #
@@ -953,7 +938,6 @@
     /// ### Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// # use schema::users;
     /// #
@@ -989,7 +973,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -1025,7 +1008,6 @@
     /// This is use full for writing queries that contain subselects on non null
     /// fields comparing them to nullable fields.
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -1078,7 +1060,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -1132,7 +1113,6 @@
     /// ## Returning a single field
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -1153,7 +1133,6 @@
     /// ## Returning a tuple
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -1178,7 +1157,6 @@
     /// ## Returning a struct
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// #[derive(Queryable, PartialEq, Debug)]
@@ -1225,7 +1203,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -1286,7 +1263,6 @@
     /// # Example:
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../doctest_setup.rs");
     /// # fn main() {
     /// #     run_test();
diff --git a/diesel/src/query_dsl/save_changes_dsl.rs b/diesel/src/query_dsl/save_changes_dsl.rs
index 1695fc6..7dcffec 100644
--- a/diesel/src/query_dsl/save_changes_dsl.rs
+++ b/diesel/src/query_dsl/save_changes_dsl.rs
@@ -88,7 +88,6 @@
 /// # Example
 ///
 /// ```rust
-/// # #[macro_use] extern crate diesel;
 /// # include!("../doctest_setup.rs");
 /// # use schema::animals;
 /// #
diff --git a/diesel/src/result.rs b/diesel/src/result.rs
index e8a0b49..fb06e4a 100644
--- a/diesel/src/result.rs
+++ b/diesel/src/result.rs
@@ -11,6 +11,7 @@
 ///
 /// This type is not intended to be exhaustively matched, and new variants may
 /// be added in the future without a major version bump.
+#[non_exhaustive]
 pub enum Error {
     /// The query contained a nul byte.
     ///
@@ -72,9 +73,6 @@
     /// Attempted to perform an operation that cannot be done inside a transaction
     /// when a transaction was already open.
     AlreadyInTransaction,
-
-    #[doc(hidden)]
-    __Nonexhaustive,
 }
 
 #[derive(Debug, Clone, Copy)]
@@ -113,6 +111,12 @@
     /// to lock the rows.
     ReadOnlyTransaction,
 
+    /// A not null constraint was violated.
+    NotNullViolation,
+
+    /// A check constraint was violated.
+    CheckViolation,
+
     #[doc(hidden)]
     __Unknown, // Match against _ instead, more variants may be added in the future
 }
@@ -186,6 +190,7 @@
 ///
 /// [`Connection::establish`]: ../connection/trait.Connection.html#tymethod.establish
 #[derive(Debug, PartialEq)]
+#[non_exhaustive]
 pub enum ConnectionError {
     /// The connection URL contained a `NUL` byte.
     InvalidCString(NulError),
@@ -200,8 +205,6 @@
     /// This variant is returned if an error occurred executing the query to set
     /// those options. Diesel will never affect global configuration.
     CouldntSetupConfiguration(Error),
-    #[doc(hidden)]
-    __Nonexhaustive, // Match against _ instead, more variants may be added in the future
 }
 
 /// A specialized result type for queries.
@@ -278,7 +281,6 @@
                 f,
                 "Cannot perform this operation while a transaction is open",
             ),
-            Error::__Nonexhaustive => unreachable!(),
         }
     }
 }
@@ -302,7 +304,6 @@
             ConnectionError::BadConnection(ref s) => write!(f, "{}", s),
             ConnectionError::InvalidConnectionUrl(ref s) => write!(f, "{}", s),
             ConnectionError::CouldntSetupConfiguration(ref e) => e.fmt(f),
-            ConnectionError::__Nonexhaustive => unreachable!(),
         }
     }
 }
diff --git a/diesel/src/sql_types/mod.rs b/diesel/src/sql_types/mod.rs
index 79413f4..9cd8e98 100644
--- a/diesel/src/sql_types/mod.rs
+++ b/diesel/src/sql_types/mod.rs
@@ -20,6 +20,8 @@
 pub use self::fold::Foldable;
 pub use self::ord::SqlOrd;
 
+use crate::query_builder::QueryId;
+
 /// The boolean SQL type.
 ///
 /// On backends without a native boolean type,
@@ -363,38 +365,16 @@
 
 /// Indicates that a SQL type exists for a backend.
 ///
-/// # Deriving
-///
-/// This trait can be automatically derived by `#[derive(SqlType)]`.
-/// This derive will also implement [`NotNull`] and [`SingleValue`].
-/// When deriving this trait,
-/// you need to specify how the type is represented on various backends.
-/// You don't need to specify every backend,
-/// only the ones supported by your type.
-///
-/// For PostgreSQL, add `#[postgres(oid = "some_oid", array_oid = "some_oid")]`
-/// or `#[postgres(type_name = "pg_type_name")]` if the OID is not stable.
-/// For MySQL, specify which variant of [`MysqlType`] should be used
-/// by adding `#[mysql_type = "Variant"]`.
-/// For SQLite, specify which variant of [`SqliteType`] should be used
-/// by adding `#[sqlite_type = "Variant"]`.
-///
-/// [`NotNull`]: trait.NotNull.html
-/// [`SingleValue`]: trait.SingleValue.html
-/// [`MysqlType`]: ../mysql/enum.MysqlType.html
-/// [`SqliteType`]: ../sqlite/enum.SqliteType.html
+/// This trait can be derived using the [`SqlType` derive](derive.SqlType.html)
 ///
 /// # Example
 ///
 /// ```rust
-/// # #[macro_use]
-/// # extern crate diesel;
-/// #[derive(SqlType)]
+/// #[derive(diesel::sql_types::SqlType)]
 /// #[postgres(oid = "23", array_oid = "1007")]
 /// #[sqlite_type = "Integer"]
 /// #[mysql_type = "Long"]
 /// pub struct Integer;
-/// # fn main() {}
 /// ```
 pub trait HasSqlType<ST>: TypeMetadata {
     /// Fetch the metadata for the given type
@@ -464,3 +444,8 @@
 pub trait SingleValue {}
 
 impl<T: NotNull + SingleValue> SingleValue for Nullable<T> {}
+
+#[doc(inline)]
+pub use diesel_derives::DieselNumericOps;
+#[doc(inline)]
+pub use diesel_derives::SqlType;
diff --git a/diesel/src/sqlite/backend.rs b/diesel/src/sqlite/backend.rs
index 0b04766..24a1947 100644
--- a/diesel/src/sqlite/backend.rs
+++ b/diesel/src/sqlite/backend.rs
@@ -53,4 +53,5 @@
     type MetadataLookup = ();
 }
 
+impl SupportsOnConflictClause for Sqlite {}
 impl UsesAnsiSavepointSyntax for Sqlite {}
diff --git a/diesel/src/sqlite/connection/functions.rs b/diesel/src/sqlite/connection/functions.rs
index 3e6a526..e340b64 100644
--- a/diesel/src/sqlite/connection/functions.rs
+++ b/diesel/src/sqlite/connection/functions.rs
@@ -2,7 +2,7 @@
 
 use super::raw::RawConnection;
 use super::serialized_value::SerializedValue;
-use super::{Sqlite, SqliteValue};
+use super::{Sqlite, SqliteAggregateFunction, SqliteValue};
 use crate::deserialize::{FromSqlRow, Queryable};
 use crate::result::{DatabaseErrorKind, Error, QueryResult};
 use crate::row::Row;
@@ -30,29 +30,75 @@
     }
 
     conn.register_sql_function(fn_name, fields_needed, deterministic, move |conn, args| {
-        let mut row = FunctionRow { args };
-        let args_row = Args::Row::build_from_row(&mut row).map_err(Error::DeserializationError)?;
-        let args = Args::build(args_row);
+        let args = build_sql_function_args::<ArgsSqlType, Args>(args)?;
 
         let result = f(conn, args);
 
-        let mut buf = Output::new(Vec::new(), &());
-        let is_null = result.to_sql(&mut buf).map_err(Error::SerializationError)?;
-
-        let bytes = if let IsNull::Yes = is_null {
-            None
-        } else {
-            Some(buf.into_inner())
-        };
-
-        Ok(SerializedValue {
-            ty: Sqlite::metadata(&()),
-            data: bytes,
-        })
+        process_sql_function_result::<RetSqlType, Ret>(result)
     })?;
     Ok(())
 }
 
+pub fn register_aggregate<ArgsSqlType, RetSqlType, Args, Ret, A>(
+    conn: &RawConnection,
+    fn_name: &str,
+) -> QueryResult<()>
+where
+    A: SqliteAggregateFunction<Args, Output = Ret> + 'static + Send,
+    Args: Queryable<ArgsSqlType, Sqlite>,
+    Ret: ToSql<RetSqlType, Sqlite>,
+    Sqlite: HasSqlType<RetSqlType>,
+{
+    let fields_needed = Args::Row::FIELDS_NEEDED;
+    if fields_needed > 127 {
+        return Err(Error::DatabaseError(
+            DatabaseErrorKind::UnableToSendCommand,
+            Box::new("SQLite functions cannot take more than 127 parameters".to_string()),
+        ));
+    }
+
+    conn.register_aggregate_function::<ArgsSqlType, RetSqlType, Args, Ret, A>(
+        fn_name,
+        fields_needed,
+    )?;
+
+    Ok(())
+}
+
+pub(crate) fn build_sql_function_args<ArgsSqlType, Args>(
+    args: &[*mut ffi::sqlite3_value],
+) -> Result<Args, Error>
+where
+    Args: Queryable<ArgsSqlType, Sqlite>,
+{
+    let mut row = FunctionRow { args };
+    let args_row = Args::Row::build_from_row(&mut row).map_err(Error::DeserializationError)?;
+
+    Ok(Args::build(args_row))
+}
+
+pub(crate) fn process_sql_function_result<RetSqlType, Ret>(
+    result: Ret,
+) -> QueryResult<SerializedValue>
+where
+    Ret: ToSql<RetSqlType, Sqlite>,
+    Sqlite: HasSqlType<RetSqlType>,
+{
+    let mut buf = Output::new(Vec::new(), &());
+    let is_null = result.to_sql(&mut buf).map_err(Error::SerializationError)?;
+
+    let bytes = if let IsNull::Yes = is_null {
+        None
+    } else {
+        Some(buf.into_inner())
+    };
+
+    Ok(SerializedValue {
+        ty: Sqlite::metadata(&()),
+        data: bytes,
+    })
+}
+
 struct FunctionRow<'a> {
     args: &'a [*mut ffi::sqlite3_value],
 }
diff --git a/diesel/src/sqlite/connection/mod.rs b/diesel/src/sqlite/connection/mod.rs
index 2bc6cb1..b200d15 100644
--- a/diesel/src/sqlite/connection/mod.rs
+++ b/diesel/src/sqlite/connection/mod.rs
@@ -15,6 +15,7 @@
 use self::raw::RawConnection;
 use self::statement_iterator::*;
 use self::stmt::{Statement, StatementUse};
+use super::SqliteAggregateFunction;
 use crate::connection::*;
 use crate::deserialize::{Queryable, QueryableByName};
 use crate::query_builder::bind_collector::RawBytesBindCollector;
@@ -120,7 +121,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -150,7 +150,6 @@
     /// # Example
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("../../doctest_setup.rs");
     /// #
     /// # fn main() {
@@ -240,6 +239,20 @@
         )
     }
 
+    #[doc(hidden)]
+    pub fn register_aggregate_function<ArgsSqlType, RetSqlType, Args, Ret, A>(
+        &self,
+        fn_name: &str,
+    ) -> QueryResult<()>
+    where
+        A: SqliteAggregateFunction<Args, Output = Ret> + 'static + Send,
+        Args: Queryable<ArgsSqlType, Sqlite>,
+        Ret: ToSql<RetSqlType, Sqlite>,
+        Sqlite: HasSqlType<RetSqlType>,
+    {
+        functions::register_aggregate::<_, _, _, _, A>(&self.raw_connection, fn_name)
+    }
+
     fn register_diesel_sql_functions(&self) -> QueryResult<()> {
         use crate::sql_types::{Integer, Text};
 
@@ -372,4 +385,144 @@
             .get_result::<(i32, i32, i32)>(&connection);
         assert_eq!(Ok((2, 3, 4)), added);
     }
+
+    use crate::sqlite::SqliteAggregateFunction;
+
+    sql_function! {
+        #[aggregate]
+        fn my_sum(expr: Integer) -> Integer;
+    }
+
+    #[derive(Default)]
+    struct MySum {
+        sum: i32,
+    }
+
+    impl SqliteAggregateFunction<i32> for MySum {
+        type Output = i32;
+
+        fn step(&mut self, expr: i32) {
+            self.sum += expr;
+        }
+
+        fn finalize(aggregator: Option<Self>) -> Self::Output {
+            aggregator.map(|a| a.sum).unwrap_or_default()
+        }
+    }
+
+    table! {
+        my_sum_example {
+            id -> Integer,
+            value -> Integer,
+        }
+    }
+
+    #[test]
+    fn register_aggregate_function() {
+        use self::my_sum_example::dsl::*;
+
+        let connection = SqliteConnection::establish(":memory:").unwrap();
+        connection
+            .execute(
+                "CREATE TABLE my_sum_example (id integer primary key autoincrement, value integer)",
+            )
+            .unwrap();
+        connection
+            .execute("INSERT INTO my_sum_example (value) VALUES (1), (2), (3)")
+            .unwrap();
+
+        my_sum::register_impl::<MySum, _>(&connection).unwrap();
+
+        let result = my_sum_example
+            .select(my_sum(value))
+            .get_result::<i32>(&connection);
+        assert_eq!(Ok(6), result);
+    }
+
+    #[test]
+    fn register_aggregate_function_returns_finalize_default_on_empty_set() {
+        use self::my_sum_example::dsl::*;
+
+        let connection = SqliteConnection::establish(":memory:").unwrap();
+        connection
+            .execute(
+                "CREATE TABLE my_sum_example (id integer primary key autoincrement, value integer)",
+            )
+            .unwrap();
+
+        my_sum::register_impl::<MySum, _>(&connection).unwrap();
+
+        let result = my_sum_example
+            .select(my_sum(value))
+            .get_result::<i32>(&connection);
+        assert_eq!(Ok(0), result);
+    }
+
+    sql_function! {
+        #[aggregate]
+        fn range_max(expr1: Integer, expr2: Integer, expr3: Integer) -> Nullable<Integer>;
+    }
+
+    #[derive(Default)]
+    struct RangeMax<T> {
+        max_value: Option<T>,
+    }
+
+    impl<T: Default + Ord + Copy + Clone> SqliteAggregateFunction<(T, T, T)> for RangeMax<T> {
+        type Output = Option<T>;
+
+        fn step(&mut self, (x0, x1, x2): (T, T, T)) {
+            let max = if x0 >= x1 && x0 >= x2 {
+                x0
+            } else if x1 >= x0 && x1 >= x2 {
+                x1
+            } else {
+                x2
+            };
+
+            self.max_value = match self.max_value {
+                Some(current_max_value) if max > current_max_value => Some(max),
+                None => Some(max),
+                _ => self.max_value,
+            };
+        }
+
+        fn finalize(aggregator: Option<Self>) -> Self::Output {
+            aggregator?.max_value
+        }
+    }
+
+    table! {
+        range_max_example {
+            id -> Integer,
+            value1 -> Integer,
+            value2 -> Integer,
+            value3 -> Integer,
+        }
+    }
+
+    #[test]
+    fn register_aggregate_multiarg_function() {
+        use self::range_max_example::dsl::*;
+
+        let connection = SqliteConnection::establish(":memory:").unwrap();
+        connection
+            .execute(
+                r#"CREATE TABLE range_max_example (
+                id integer primary key autoincrement,
+                value1 integer,
+                value2 integer,
+                value3 integer
+            )"#,
+            )
+            .unwrap();
+        connection.execute("INSERT INTO range_max_example (value1, value2, value3) VALUES (3, 2, 1), (2, 2, 2)").unwrap();
+
+        range_max::register_impl::<RangeMax<i32>, _, _, _>(&connection).unwrap();
+        let result = range_max_example
+            .select(range_max(value1, value2, value3))
+            .get_result::<Option<i32>>(&connection)
+            .unwrap();
+        assert_eq!(Some(3), result);
+    }
 }
diff --git a/diesel/src/sqlite/connection/raw.rs b/diesel/src/sqlite/connection/raw.rs
index e46e4f1..6c7aee0 100644
--- a/diesel/src/sqlite/connection/raw.rs
+++ b/diesel/src/sqlite/connection/raw.rs
@@ -1,14 +1,19 @@
 extern crate libsqlite3_sys as ffi;
 
-use std::ffi::{CStr, CString};
+use std::ffi::{CStr, CString, NulError};
 use std::io::{stderr, Write};
 use std::os::raw as libc;
 use std::ptr::NonNull;
 use std::{mem, ptr, slice, str};
 
+use super::functions::{build_sql_function_args, process_sql_function_result};
 use super::serialized_value::SerializedValue;
+use super::{Sqlite, SqliteAggregateFunction};
+use crate::deserialize::Queryable;
 use crate::result::Error::DatabaseError;
 use crate::result::*;
+use crate::serialize::ToSql;
+use crate::sql_types::HasSqlType;
 
 #[allow(missing_debug_implementations, missing_copy_implementations)]
 pub struct RawConnection {
@@ -76,11 +81,8 @@
             + Send
             + 'static,
     {
-        let fn_name = CString::new(fn_name)?;
-        let mut flags = ffi::SQLITE_UTF8;
-        if deterministic {
-            flags |= ffi::SQLITE_DETERMINISTIC;
-        }
+        let fn_name = Self::get_fn_name(fn_name)?;
+        let flags = Self::get_flags(deterministic);
         let callback_fn = Box::into_raw(Box::new(f));
 
         let result = unsafe {
@@ -97,6 +99,53 @@
             )
         };
 
+        Self::process_sql_function_result(result)
+    }
+
+    pub fn register_aggregate_function<ArgsSqlType, RetSqlType, Args, Ret, A>(
+        &self,
+        fn_name: &str,
+        num_args: usize,
+    ) -> QueryResult<()>
+    where
+        A: SqliteAggregateFunction<Args, Output = Ret> + 'static + Send,
+        Args: Queryable<ArgsSqlType, Sqlite>,
+        Ret: ToSql<RetSqlType, Sqlite>,
+        Sqlite: HasSqlType<RetSqlType>,
+    {
+        let fn_name = Self::get_fn_name(fn_name)?;
+        let flags = Self::get_flags(false);
+
+        let result = unsafe {
+            ffi::sqlite3_create_function_v2(
+                self.internal_connection.as_ptr(),
+                fn_name.as_ptr(),
+                num_args as _,
+                flags,
+                ptr::null_mut(),
+                None,
+                Some(run_aggregator_step_function::<_, _, _, _, A>),
+                Some(run_aggregator_final_function::<_, _, _, _, A>),
+                None,
+            )
+        };
+
+        Self::process_sql_function_result(result)
+    }
+
+    fn get_fn_name(fn_name: &str) -> Result<CString, NulError> {
+        Ok(CString::new(fn_name)?)
+    }
+
+    fn get_flags(deterministic: bool) -> i32 {
+        let mut flags = ffi::SQLITE_UTF8;
+        if deterministic {
+            flags |= ffi::SQLITE_DETERMINISTIC;
+        }
+        flags
+    }
+
+    fn process_sql_function_result(result: i32) -> Result<(), Error> {
         if result == ffi::SQLITE_OK {
             Ok(())
         } else {
@@ -194,6 +243,135 @@
     }
 }
 
+// Need a custom option type here, because the std lib one does not have guarantees about the discriminate values
+// See: https://github.com/rust-lang/rfcs/blob/master/text/2195-really-tagged-unions.md#opaque-tags
+#[repr(u8)]
+enum OptionalAggregator<A> {
+    // Discriminant is 0
+    None,
+    Some(A),
+}
+
+#[allow(warnings)]
+extern "C" fn run_aggregator_step_function<ArgsSqlType, RetSqlType, Args, Ret, A>(
+    ctx: *mut ffi::sqlite3_context,
+    num_args: libc::c_int,
+    value_ptr: *mut *mut ffi::sqlite3_value,
+) where
+    A: SqliteAggregateFunction<Args, Output = Ret> + 'static + Send,
+    Args: Queryable<ArgsSqlType, Sqlite>,
+    Ret: ToSql<RetSqlType, Sqlite>,
+    Sqlite: HasSqlType<RetSqlType>,
+{
+    unsafe {
+        // This block of unsafe code makes the following assumptions:
+        //
+        // * sqlite3_aggregate_context allocates sizeof::<OptionalAggregator<A>>
+        //   bytes of zeroed memory as documented here:
+        //   https://www.sqlite.org/c3ref/aggregate_context.html
+        //   A null pointer is returned for negative or zero sized types,
+        //   which should be impossible in theory. We check that nevertheless
+        //
+        // * OptionalAggregator::None has a discriminant of 0 as specified by
+        //   #[repr(u8)] + RFC 2195
+        //
+        // * If all bytes are zero, the discriminant is also zero, so we can
+        //   assume that we get OptionalAggregator::None in this case. This is
+        //   not UB as we only access the discriminant here, so we do not try
+        //   to read any other zeroed memory. After that we initialize our enum
+        //   by writing a correct value at this location via ptr::write_unaligned
+        //
+        // * We use ptr::write_unaligned as we did not found any guarantees that
+        //   the memory will have a correct alignment.
+        //   (Note I(weiznich): would assume that it is aligned correctly, but we
+        //    we cannot guarantee it, so better be safe than sorry)
+        let aggregate_context = ffi::sqlite3_aggregate_context(
+            ctx,
+            std::mem::size_of::<OptionalAggregator<A>>() as i32,
+        );
+        let mut aggregate_context = NonNull::new(aggregate_context as *mut OptionalAggregator<A>);
+        let aggregator = match aggregate_context.map(|a| &mut *a.as_ptr()) {
+            Some(&mut OptionalAggregator::Some(ref mut agg)) => agg,
+            Some(mut a_ptr @ &mut OptionalAggregator::None) => {
+                ptr::write_unaligned(a_ptr as *mut _, OptionalAggregator::Some(A::default()));
+                if let &mut OptionalAggregator::Some(ref mut agg) = a_ptr {
+                    agg
+                } else {
+                    unreachable!(
+                        "We've written the aggregator above to that location, it must be there"
+                    )
+                }
+            }
+            None => {
+                null_aggregate_context_error(ctx);
+                return;
+            }
+        };
+
+        let mut f = |args: &[*mut ffi::sqlite3_value]| -> Result<(), Error> {
+            let args = build_sql_function_args::<ArgsSqlType, Args>(args)?;
+
+            Ok(aggregator.step(args))
+        };
+
+        let args = slice::from_raw_parts(value_ptr, num_args as _);
+        match f(args) {
+            Err(e) => {
+                let msg = e.to_string();
+                ffi::sqlite3_result_error(ctx, msg.as_ptr() as *const _, msg.len() as _);
+            }
+            _ => (),
+        };
+    }
+}
+
+extern "C" fn run_aggregator_final_function<ArgsSqlType, RetSqlType, Args, Ret, A>(
+    ctx: *mut ffi::sqlite3_context,
+) where
+    A: SqliteAggregateFunction<Args, Output = Ret> + 'static + Send,
+    Args: Queryable<ArgsSqlType, Sqlite>,
+    Ret: ToSql<RetSqlType, Sqlite>,
+    Sqlite: HasSqlType<RetSqlType>,
+{
+    unsafe {
+        // Within the xFinal callback, it is customary to set nBytes to 0 so no pointless memory
+        // allocations occur, a null pointer is returned in this case
+        // See: https://www.sqlite.org/c3ref/aggregate_context.html
+        //
+        // For the reasoning about the safety of the OptionalAggregator handling
+        // see the comment in run_aggregator_step_function.
+        let aggregate_context = ffi::sqlite3_aggregate_context(ctx, 0);
+        let mut aggregate_context = NonNull::new(aggregate_context as *mut OptionalAggregator<A>);
+        let aggregator = match aggregate_context {
+            Some(ref mut a) => match std::mem::replace(a.as_mut(), OptionalAggregator::None) {
+                OptionalAggregator::Some(agg) => Some(agg),
+                OptionalAggregator::None => unreachable!("We've written to the aggregator in the xStep callback. If xStep was never called, then ffi::sqlite_aggregate_context() would have returned a NULL pointer")
+            },
+            None => None,
+        };
+
+        let result = A::finalize(aggregator);
+
+        match process_sql_function_result::<RetSqlType, Ret>(result) {
+            Ok(value) => value.result_of(ctx),
+            Err(e) => {
+                let msg = e.to_string();
+                ffi::sqlite3_result_error(ctx, msg.as_ptr() as *const _, msg.len() as _);
+            }
+        }
+    }
+}
+
+unsafe fn null_aggregate_context_error(ctx: *mut ffi::sqlite3_context) {
+    static NULL_AG_CTX_ERR: &str = "An unknown error occurred. sqlite3_aggregate_context returned a null pointer. This should never happen.";
+
+    ffi::sqlite3_result_error(
+        ctx,
+        NULL_AG_CTX_ERR.as_ptr() as *const _ as *const _,
+        NULL_AG_CTX_ERR.len() as _,
+    );
+}
+
 extern "C" fn destroy_boxed_fn<F>(data: *mut libc::c_void)
 where
     F: FnMut(&RawConnection, &[*mut ffi::sqlite3_value]) -> QueryResult<SerializedValue>
diff --git a/diesel/src/sqlite/connection/stmt.rs b/diesel/src/sqlite/connection/stmt.rs
index 534f421..07fe298 100644
--- a/diesel/src/sqlite/connection/stmt.rs
+++ b/diesel/src/sqlite/connection/stmt.rs
@@ -103,6 +103,8 @@
             DatabaseErrorKind::UniqueViolation
         }
         ffi::SQLITE_CONSTRAINT_FOREIGNKEY => DatabaseErrorKind::ForeignKeyViolation,
+        ffi::SQLITE_CONSTRAINT_NOTNULL => DatabaseErrorKind::NotNullViolation,
+        ffi::SQLITE_CONSTRAINT_CHECK => DatabaseErrorKind::CheckViolation,
         _ => DatabaseErrorKind::__Unknown,
     };
     DatabaseError(error_kind, error_information)
diff --git a/diesel/src/sqlite/mod.rs b/diesel/src/sqlite/mod.rs
index 80b5299..cbd0723 100644
--- a/diesel/src/sqlite/mod.rs
+++ b/diesel/src/sqlite/mod.rs
@@ -13,3 +13,22 @@
 pub use self::backend::{Sqlite, SqliteType};
 pub use self::connection::SqliteConnection;
 pub use self::query_builder::SqliteQueryBuilder;
+
+/// Trait for the implementation of a SQLite aggregate function
+///
+/// This trait is to be used in conjunction with the `sql_function!`
+/// macro for defining a custom SQLite aggregate function. See
+/// the documentation [there](../prelude/macro.sql_function.html) for details.
+pub trait SqliteAggregateFunction<Args>: Default {
+    /// The result type of the SQLite aggregate function
+    type Output;
+
+    /// The `step()` method is called once for every record of the query
+    fn step(&mut self, args: Args);
+
+    /// After the last row has been processed, the `finalize()` method is
+    /// called to compute the result of the aggregate function. If no rows
+    /// were processed `aggregator` will be `None` and `finalize()` can be
+    /// used to specify a default result
+    fn finalize(aggregator: Option<Self>) -> Self::Output;
+}
diff --git a/diesel/src/sqlite/query_builder/limit_offset.rs b/diesel/src/sqlite/query_builder/limit_offset.rs
new file mode 100644
index 0000000..8d713a3
--- /dev/null
+++ b/diesel/src/sqlite/query_builder/limit_offset.rs
@@ -0,0 +1,126 @@
+use crate::query_builder::limit_clause::{LimitClause, NoLimitClause};
+use crate::query_builder::limit_offset_clause::{BoxedLimitOffsetClause, LimitOffsetClause};
+use crate::query_builder::offset_clause::{NoOffsetClause, OffsetClause};
+use crate::query_builder::{AstPass, IntoBoxedClause, QueryFragment};
+use crate::result::QueryResult;
+use crate::sqlite::Sqlite;
+
+impl QueryFragment<Sqlite> for LimitOffsetClause<NoLimitClause, NoOffsetClause> {
+    fn walk_ast(&self, _out: AstPass<Sqlite>) -> QueryResult<()> {
+        Ok(())
+    }
+}
+
+impl<L> QueryFragment<Sqlite> for LimitOffsetClause<LimitClause<L>, NoOffsetClause>
+where
+    LimitClause<L>: QueryFragment<Sqlite>,
+{
+    fn walk_ast(&self, out: AstPass<Sqlite>) -> QueryResult<()> {
+        self.limit_clause.walk_ast(out)?;
+        Ok(())
+    }
+}
+
+impl<O> QueryFragment<Sqlite> for LimitOffsetClause<NoLimitClause, OffsetClause<O>>
+where
+    OffsetClause<O>: QueryFragment<Sqlite>,
+{
+    fn walk_ast(&self, mut out: AstPass<Sqlite>) -> QueryResult<()> {
+        // Sqlite requires a limit clause in front of any offset clause
+        // using `LIMIT -1` is the same as not having any limit clause
+        // https://sqlite.org/lang_select.html
+        out.push_sql(" LIMIT -1 ");
+        self.offset_clause.walk_ast(out)?;
+        Ok(())
+    }
+}
+
+impl<L, O> QueryFragment<Sqlite> for LimitOffsetClause<LimitClause<L>, OffsetClause<O>>
+where
+    LimitClause<L>: QueryFragment<Sqlite>,
+    OffsetClause<O>: QueryFragment<Sqlite>,
+{
+    fn walk_ast(&self, mut out: AstPass<Sqlite>) -> QueryResult<()> {
+        self.limit_clause.walk_ast(out.reborrow())?;
+        self.offset_clause.walk_ast(out.reborrow())?;
+        Ok(())
+    }
+}
+
+impl<'a> QueryFragment<Sqlite> for BoxedLimitOffsetClause<'a, Sqlite> {
+    fn walk_ast(&self, mut out: AstPass<Sqlite>) -> QueryResult<()> {
+        match (self.limit.as_ref(), self.offset.as_ref()) {
+            (Some(limit), Some(offset)) => {
+                limit.walk_ast(out.reborrow())?;
+                offset.walk_ast(out.reborrow())?;
+            }
+            (Some(limit), None) => {
+                limit.walk_ast(out.reborrow())?;
+            }
+            (None, Some(offset)) => {
+                // See the `QueryFragment` implementation for `LimitOffsetClause` for details.
+                out.push_sql(" LIMIT -1 ");
+                offset.walk_ast(out.reborrow())?;
+            }
+            (None, None) => {}
+        }
+        Ok(())
+    }
+}
+
+// Have explicit impls here because we need to set `Some`/`None` for the clauses
+// correspondingly, otherwise we cannot match on it in the `QueryFragment` impl
+// above
+impl<'a> IntoBoxedClause<'a, Sqlite> for LimitOffsetClause<NoLimitClause, NoOffsetClause> {
+    type BoxedClause = BoxedLimitOffsetClause<'a, Sqlite>;
+
+    fn into_boxed(self) -> Self::BoxedClause {
+        BoxedLimitOffsetClause {
+            limit: None,
+            offset: None,
+        }
+    }
+}
+
+impl<'a, L> IntoBoxedClause<'a, Sqlite> for LimitOffsetClause<LimitClause<L>, NoOffsetClause>
+where
+    L: QueryFragment<Sqlite> + Send + 'a,
+{
+    type BoxedClause = BoxedLimitOffsetClause<'a, Sqlite>;
+
+    fn into_boxed(self) -> Self::BoxedClause {
+        BoxedLimitOffsetClause {
+            limit: Some(Box::new(self.limit_clause)),
+            offset: None,
+        }
+    }
+}
+
+impl<'a, O> IntoBoxedClause<'a, Sqlite> for LimitOffsetClause<NoLimitClause, OffsetClause<O>>
+where
+    O: QueryFragment<Sqlite> + Send + 'a,
+{
+    type BoxedClause = BoxedLimitOffsetClause<'a, Sqlite>;
+
+    fn into_boxed(self) -> Self::BoxedClause {
+        BoxedLimitOffsetClause {
+            limit: None,
+            offset: Some(Box::new(self.offset_clause)),
+        }
+    }
+}
+
+impl<'a, L, O> IntoBoxedClause<'a, Sqlite> for LimitOffsetClause<LimitClause<L>, OffsetClause<O>>
+where
+    L: QueryFragment<Sqlite> + Send + 'a,
+    O: QueryFragment<Sqlite> + Send + 'a,
+{
+    type BoxedClause = BoxedLimitOffsetClause<'a, Sqlite>;
+
+    fn into_boxed(self) -> Self::BoxedClause {
+        BoxedLimitOffsetClause {
+            limit: Some(Box::new(self.limit_clause)),
+            offset: Some(Box::new(self.offset_clause)),
+        }
+    }
+}
diff --git a/diesel/src/sqlite/query_builder/mod.rs b/diesel/src/sqlite/query_builder/mod.rs
index dd98659..546c963 100644
--- a/diesel/src/sqlite/query_builder/mod.rs
+++ b/diesel/src/sqlite/query_builder/mod.rs
@@ -4,6 +4,8 @@
 use crate::query_builder::QueryBuilder;
 use crate::result::QueryResult;
 
+mod limit_offset;
+
 /// Constructs SQL queries for use with the SQLite backend
 #[allow(missing_debug_implementations)]
 #[derive(Default)]
diff --git a/diesel/src/type_impls/date_and_time.rs b/diesel/src/type_impls/date_and_time.rs
index bb5662f..49fc57d 100644
--- a/diesel/src/type_impls/date_and_time.rs
+++ b/diesel/src/type_impls/date_and_time.rs
@@ -1,5 +1,7 @@
 #![allow(dead_code)]
 
+use crate::deserialize::FromSqlRow;
+use crate::expression::AsExpression;
 use std::time::SystemTime;
 
 #[derive(FromSqlRow, AsExpression)]
@@ -11,6 +13,8 @@
 mod chrono {
     extern crate chrono;
     use self::chrono::*;
+    use crate::deserialize::FromSqlRow;
+    use crate::expression::AsExpression;
     use crate::sql_types::{Date, Time, Timestamp};
 
     #[derive(FromSqlRow, AsExpression)]
diff --git a/diesel/src/type_impls/decimal.rs b/diesel/src/type_impls/decimal.rs
index 130a8e4..17170b3 100644
--- a/diesel/src/type_impls/decimal.rs
+++ b/diesel/src/type_impls/decimal.rs
@@ -4,6 +4,8 @@
 mod bigdecimal {
     extern crate bigdecimal;
     use self::bigdecimal::BigDecimal;
+    use crate::deserialize::FromSqlRow;
+    use crate::expression::AsExpression;
     use crate::sql_types::Numeric;
 
     #[derive(FromSqlRow, AsExpression)]
diff --git a/diesel/src/type_impls/tuples.rs b/diesel/src/type_impls/tuples.rs
index 0f47045..a544c10 100644
--- a/diesel/src/type_impls/tuples.rs
+++ b/diesel/src/type_impls/tuples.rs
@@ -4,7 +4,7 @@
 use crate::backend::Backend;
 use crate::deserialize::{self, FromSqlRow, Queryable, QueryableByName};
 use crate::expression::{
-    AppearsOnTable, AsExpression, AsExpressionList, Expression, NonAggregate, SelectableExpression,
+    AppearsOnTable, AsExpression, AsExpressionList, Expression, SelectableExpression, ValidGrouping,
 };
 use crate::insertable::{CanInsertInSingleQuery, InsertValues, Insertable};
 use crate::query_builder::*;
@@ -70,7 +70,7 @@
                 }
             }
 
-            impl<$($T: Expression + NonAggregate),+> Expression for ($($T,)+) {
+            impl<$($T: Expression),+> Expression for ($($T,)+) {
                 type SqlType = ($(<$T as Expression>::SqlType,)+);
             }
 
@@ -114,8 +114,11 @@
                 const HAS_STATIC_QUERY_ID: bool = $($T::HAS_STATIC_QUERY_ID &&)+ true;
             }
 
-            impl<$($T: Expression + NonAggregate),+> NonAggregate for ($($T,)+) {
-            }
+            const _: () = {
+                #[derive(ValidGrouping)]
+                #[diesel(foreign_derive)]
+                struct TupleWrapper<$($T,)*>(($($T,)*));
+            };
 
             impl<$($T,)+ Tab> UndecoratedInsertRecord<Tab> for ($($T,)+)
             where
diff --git a/diesel/src/upsert/mod.rs b/diesel/src/upsert/mod.rs
new file mode 100644
index 0000000..0d7e4d0
--- /dev/null
+++ b/diesel/src/upsert/mod.rs
@@ -0,0 +1,26 @@
+//! Types and functions related to PG's and Sqlite's `ON CONFLICT` clause
+//!
+//! Upsert is currently supported by diesel for the following database systems:
+//!
+//! * PostgreSQL version 9.5 or newer
+//! * Sqlite3 version 3.24.0 or newer
+//!
+//! See [the methods on `InsertStatement`](../query_builder/struct.InsertStatement.html#impl-2)
+//! for usage examples.
+//!
+//! Constructing an upsert statement from an existing select statement
+//! requires a where clause on sqlite due to a ambiguity in their
+//! parser. See [the corresponding documentation](https://www.sqlite.org/lang_UPSERT.html)
+//! for details.
+use crate::query_builder::upsert::on_conflict_actions::Excluded;
+
+mod on_conflict_extension;
+
+pub use self::on_conflict_extension::*;
+#[cfg(feature = "postgres")]
+pub use crate::pg::query_builder::on_constraint::*;
+
+/// Represents `excluded.column` in an `ON CONFLICT DO UPDATE` clause.
+pub fn excluded<T>(excluded: T) -> Excluded<T> {
+    Excluded::new(excluded)
+}
diff --git a/diesel/src/pg/upsert/on_conflict_docs_setup.rs b/diesel/src/upsert/on_conflict_docs_setup.rs
similarity index 67%
rename from diesel/src/pg/upsert/on_conflict_docs_setup.rs
rename to diesel/src/upsert/on_conflict_docs_setup.rs
index d3f7c74..fa92060 100644
--- a/diesel/src/pg/upsert/on_conflict_docs_setup.rs
+++ b/diesel/src/upsert/on_conflict_docs_setup.rs
@@ -1,5 +1,5 @@
-include!("../../doctest_setup.rs");
-use schema::users;
+include!("../doctest_setup.rs");
+use crate::schema::users;
 
 #[derive(Clone, Copy, Insertable, AsChangeset)]
 #[table_name="users"]
diff --git a/diesel/src/pg/upsert/on_conflict_extension.rs b/diesel/src/upsert/on_conflict_extension.rs
similarity index 79%
rename from diesel/src/pg/upsert/on_conflict_extension.rs
rename to diesel/src/upsert/on_conflict_extension.rs
index a23dfe7..38df509 100644
--- a/diesel/src/pg/upsert/on_conflict_extension.rs
+++ b/diesel/src/upsert/on_conflict_extension.rs
@@ -1,12 +1,13 @@
-use super::on_conflict_actions::*;
-use super::on_conflict_clause::*;
-use super::on_conflict_target::*;
+use crate::query_builder::upsert::into_conflict_clause::IntoConflictValueClause;
+use crate::query_builder::upsert::on_conflict_actions::*;
+use crate::query_builder::upsert::on_conflict_clause::*;
+use crate::query_builder::upsert::on_conflict_target::*;
 use crate::query_builder::{AsChangeset, InsertStatement, UndecoratedInsertRecord};
 use crate::query_source::QuerySource;
 
 impl<T, U, Op, Ret> InsertStatement<T, U, Op, Ret>
 where
-    U: UndecoratedInsertRecord<T>,
+    U: UndecoratedInsertRecord<T> + IntoConflictValueClause,
 {
     /// Adds `ON CONFLICT DO NOTHING` to the insert statement, without
     /// specifying any columns or constraints to restrict the conflict to.
@@ -16,13 +17,16 @@
     /// ### Single Record
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("on_conflict_docs_setup.rs");
     /// #
+    /// # #[cfg(any(feature = "sqlite", feature = "postgres"))]
     /// # fn main() {
     /// #     use self::users::dsl::*;
     /// #     let conn = establish_connection();
+    /// #     #[cfg(feature = "postgres")]
     /// #     conn.execute("TRUNCATE TABLE users").unwrap();
+    /// #     #[cfg(feature = "sqlite")]
+    /// #     conn.execute("DELETE FROM users").unwrap();
     /// let user = User { id: 1, name: "Sean", };
     ///
     /// let inserted_row_count = diesel::insert_into(users)
@@ -37,31 +41,40 @@
     ///     .execute(&conn);
     /// assert_eq!(Ok(0), inserted_row_count);
     /// # }
+    /// # #[cfg(feature = "mysql")]
+    /// # fn main() {}
     /// ```
     ///
     /// ### Vec of Records
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("on_conflict_docs_setup.rs");
     /// #
+    /// # #[cfg(any(feature = "sqlite", feature = "postgres"))]
     /// # fn main() {
     /// #     use self::users::dsl::*;
     /// #     let conn = establish_connection();
+    /// #     #[cfg(feature = "postgres")]
     /// #     conn.execute("TRUNCATE TABLE users").unwrap();
+    /// # #[cfg(feature = "postgres")]
     /// let user = User { id: 1, name: "Sean", };
     ///
+    /// # #[cfg(feature = "postgres")]
     /// let inserted_row_count = diesel::insert_into(users)
     ///     .values(&vec![user, user])
     ///     .on_conflict_do_nothing()
     ///     .execute(&conn);
+    /// # #[cfg(feature = "postgres")]
     /// assert_eq!(Ok(1), inserted_row_count);
     /// # }
+    /// # #[cfg(feature = "mysql")]
+    /// # fn main() {}
     /// ```
     pub fn on_conflict_do_nothing(
         self,
-    ) -> InsertStatement<T, OnConflictValues<U, NoConflictTarget, DoNothing>, Op, Ret> {
-        self.replace_values(OnConflictValues::do_nothing)
+    ) -> InsertStatement<T, OnConflictValues<U::ValueClause, NoConflictTarget, DoNothing>, Op, Ret>
+    {
+        self.replace_values(|values| OnConflictValues::do_nothing(values.into_value_clause()))
     }
 
     /// Adds an `ON CONFLICT` to the insert statement, if a conflict occurs
@@ -78,13 +91,16 @@
     /// ### Specifying a column as the target
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("on_conflict_docs_setup.rs");
     /// #
+    /// # #[cfg(any(feature = "sqlite", feature = "postgres"))]
     /// # fn main() {
     /// #     use self::users::dsl::*;
     /// #     let conn = establish_connection();
+    /// #     #[cfg(feature = "postgres")]
     /// #     conn.execute("TRUNCATE TABLE users").unwrap();
+    /// #     #[cfg(feature = "sqlite")]
+    /// #     conn.execute("DELETE FROM users").unwrap();
     /// conn.execute("CREATE UNIQUE INDEX users_name ON users (name)").unwrap();
     /// let user = User { id: 1, name: "Sean", };
     /// let same_name_different_id = User { id: 2, name: "Sean" };
@@ -106,13 +122,14 @@
     ///     .execute(&conn);
     /// assert!(pk_conflict_result.is_err());
     /// # }
+    /// # #[cfg(feature = "mysql")]
+    /// # fn main() {}
     /// ```
     ///
     /// ### Specifying multiple columns as the target
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
-    /// # include!("../../doctest_setup.rs");
+    /// # include!("../doctest_setup.rs");
     /// #
     /// # table! {
     /// #     users {
@@ -130,9 +147,10 @@
     /// #     hair_color: &'a str,
     /// # }
     /// #
+    /// # #[cfg(any(feature = "sqlite", feature = "postgres"))]
     /// # fn main() {
     /// #     use self::users::dsl::*;
-    /// use diesel::pg::upsert::*;
+    /// use diesel::upsert::*;
     ///
     /// #     let conn = establish_connection();
     /// #     conn.execute("DROP TABLE users").unwrap();
@@ -158,22 +176,24 @@
     ///     .execute(&conn);
     /// assert_eq!(Ok(0), inserted_row_count);
     /// # }
+    /// # #[cfg(feature = "mysql")]
+    /// # fn main() {}
     /// ```
     ///
     /// See the documentation for [`on_constraint`] and [`do_update`] for
     /// more examples.
     ///
-    /// [`on_constraint`]: ../pg/upsert/fn.on_constraint.html
-    /// [`do_update`]: ../pg/upsert/struct.IncompleteOnConflict.html#method.do_update
+    /// [`on_constraint`]: ../upsert/fn.on_constraint.html
+    /// [`do_update`]: ../upsert/struct.IncompleteOnConflict.html#method.do_update
     pub fn on_conflict<Target>(
         self,
         target: Target,
-    ) -> IncompleteOnConflict<Self, ConflictTarget<Target>>
+    ) -> IncompleteOnConflict<InsertStatement<T, U::ValueClause, Op, Ret>, ConflictTarget<Target>>
     where
         ConflictTarget<Target>: OnConflictTarget<T>,
     {
         IncompleteOnConflict {
-            stmt: self,
+            stmt: self.replace_values(IntoConflictValueClause::into_value_clause),
             target: ConflictTarget(target),
         }
     }
@@ -217,13 +237,16 @@
     /// ## Set specific value on conflict
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("on_conflict_docs_setup.rs");
     /// #
+    /// # #[cfg(any(feature = "sqlite", feature = "postgres"))]
     /// # fn main() {
     /// #     use self::users::dsl::*;
     /// #     let conn = establish_connection();
+    /// #     #[cfg(feature = "postgres")]
     /// #     conn.execute("TRUNCATE TABLE users").unwrap();
+    /// #     #[cfg(feature = "sqlite")]
+    /// #     conn.execute("DELETE FROM users").unwrap();
     /// let user = User { id: 1, name: "Pascal" };
     /// let user2 = User { id: 1, name: "Sean" };
     ///
@@ -240,18 +263,23 @@
     /// let users_in_db = users.load(&conn);
     /// assert_eq!(Ok(vec![(1, "I DONT KNOW ANYMORE".to_string())]), users_in_db);
     /// # }
+    /// # #[cfg(feature = "mysql")]
+    /// # fn main() {}
     /// ```
     ///
     /// ## Set `AsChangeset` struct on conflict
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("on_conflict_docs_setup.rs");
     /// #
+    /// # #[cfg(any(feature = "sqlite", feature = "postgres"))]
     /// # fn main() {
     /// #     use self::users::dsl::*;
     /// #     let conn = establish_connection();
+    /// #     #[cfg(feature = "postgres")]
     /// #     conn.execute("TRUNCATE TABLE users").unwrap();
+    /// #     #[cfg(feature = "sqlite")]
+    /// #     conn.execute("DELETE FROM users").unwrap();
     /// let user = User { id: 1, name: "Pascal" };
     /// let user2 = User { id: 1, name: "Sean" };
     ///
@@ -268,37 +296,47 @@
     /// let users_in_db = users.load(&conn);
     /// assert_eq!(Ok(vec![(1, "Sean".to_string())]), users_in_db);
     /// # }
+    /// # #[cfg(feature = "mysql")]
+    /// # fn main() {}
     /// ```
     ///
     /// ## Use `excluded` to get the rejected value
     ///
     /// ```rust
-    /// # #[macro_use] extern crate diesel;
     /// # include!("on_conflict_docs_setup.rs");
     /// #
+    /// # #[cfg(any(feature = "sqlite", feature = "postgres"))]
     /// # fn main() {
     /// #     use self::users::dsl::*;
-    /// use diesel::pg::upsert::excluded;
+    /// use diesel::upsert::excluded;
     ///
     /// #     let conn = establish_connection();
+    /// #     #[cfg(feature = "postgres")]
     /// #     conn.execute("TRUNCATE TABLE users").unwrap();
     /// let user = User { id: 1, name: "Pascal" };
     /// let user2 = User { id: 1, name: "Sean" };
     /// let user3 = User { id: 2, name: "Tess" };
     ///
+    /// # #[cfg(feature = "postgres")]
     /// assert_eq!(Ok(1), diesel::insert_into(users).values(&user).execute(&conn));
     ///
+    /// #[cfg(feature = "postgres")]
     /// let insert_count = diesel::insert_into(users)
     ///     .values(&vec![user2, user3])
     ///     .on_conflict(id)
     ///     .do_update()
     ///     .set(name.eq(excluded(name)))
     ///     .execute(&conn);
+    /// # #[cfg(feature = "postgres")]
     /// assert_eq!(Ok(2), insert_count);
     ///
+    /// # #[cfg(feature = "postgres")]
     /// let users_in_db = users.load(&conn);
+    /// # #[cfg(feature = "postgres")]
     /// assert_eq!(Ok(vec![(1, "Sean".to_string()), (2, "Tess".to_string())]), users_in_db);
     /// # }
+    /// # #[cfg(feature = "mysql")]
+    /// # fn main() {}
     /// ```
     pub fn do_update(self) -> IncompleteDoUpdate<Stmt, Target> {
         IncompleteDoUpdate {
diff --git a/diesel_cli/Cargo.toml b/diesel_cli/Cargo.toml
index 03f6dfd..d2d6db8 100644
--- a/diesel_cli/Cargo.toml
+++ b/diesel_cli/Cargo.toml
@@ -10,6 +10,7 @@
 repository = "https://github.com/diesel-rs/diesel"
 keywords = ["diesel", "migrations", "cli"]
 autotests = false
+edition = "2018"
 
 [[bin]]
 name = "diesel"
@@ -18,16 +19,23 @@
 [dependencies]
 chrono = "0.4"
 clap = "2.27"
-diesel = { version = "~1.4.0", default-features = false }
 dotenv = ">=0.8, <0.11"
 heck = "0.3.1"
-migrations_internals = "~1.4.0"
 serde = { version = "1.0.0", features = ["derive"] }
 tempfile = "3.0.0"
 toml = "0.4.6"
 url = { version = "2.1.0", optional = true }
 barrel = { version = ">= 0.5.0", optional = true, features = ["diesel"] }
-libsqlite3-sys = { version = ">=0.8.0, <0.17.0", optional = true, features = ["min_sqlite_version_3_7_16"] }
+libsqlite3-sys = { version = ">=0.8.0, <0.18.0", optional = true, features = ["min_sqlite_version_3_7_16"] }
+
+[dependencies.diesel]
+version = "~2.0.0"
+path = "../diesel"
+default-features = false
+
+[dependencies.migrations_internals]
+version = "~1.4.0"
+path = "../diesel_migrations/migrations_internals"
 
 [dev-dependencies]
 difference = "1.0"
diff --git a/diesel_cli/README.md b/diesel_cli/README.md
index 7c6186a..f2adadf 100644
--- a/diesel_cli/README.md
+++ b/diesel_cli/README.md
@@ -67,7 +67,7 @@
 ways that you can set it:
 
 * Set it as an environment variable manually
-* Set it as an environment variable using [rust-dotenv][rust-dotenv]
+* Set it as an environment variable using [dotenv](https://github.com/dotenv-rs/dotenv#examples)
 * Pass it directly by adding the `--database-url` flag
 
 As an alternative to running migrations with the CLI, you can call
diff --git a/diesel_cli/src/config.rs b/diesel_cli/src/config.rs
index a8573af..ecdf67f 100644
--- a/diesel_cli/src/config.rs
+++ b/diesel_cli/src/config.rs
@@ -1,13 +1,13 @@
 use clap::ArgMatches;
+use serde::Deserialize;
 use std::env;
 use std::error::Error;
 use std::fs;
 use std::io::Read;
-use std::path::PathBuf;
-use toml;
+use std::path::{Path, PathBuf};
 
 use super::find_project_root;
-use print_schema;
+use crate::print_schema;
 
 #[derive(Deserialize, Default)]
 #[serde(deny_unknown_fields)]
@@ -32,12 +32,21 @@
 
         if path.exists() {
             let mut bytes = Vec::new();
-            fs::File::open(path)?.read_to_end(&mut bytes)?;
-            toml::from_slice(&bytes).map_err(Into::into)
+            fs::File::open(&path)?.read_to_end(&mut bytes)?;
+            let mut result = toml::from_slice::<Self>(&bytes)?;
+            result.set_relative_path_base(path.parent().unwrap());
+            Ok(result)
         } else {
             Ok(Self::default())
         }
     }
+
+    fn set_relative_path_base(&mut self, base: &Path) {
+        self.print_schema.set_relative_path_base(base);
+        if let Some(ref mut migration) = self.migrations_directory {
+            migration.set_relative_path_base(base);
+        }
+    }
 }
 
 #[derive(Default, Deserialize)]
@@ -65,6 +74,20 @@
     pub fn import_types(&self) -> Option<&[String]> {
         self.import_types.as_ref().map(|v| &**v)
     }
+
+    fn set_relative_path_base(&mut self, base: &Path) {
+        if let Some(ref mut file) = self.file {
+            if file.is_relative() {
+                *file = base.join(&file);
+            }
+        }
+
+        if let Some(ref mut patch_file) = self.patch_file {
+            if patch_file.is_relative() {
+                *patch_file = base.join(&patch_file);
+            }
+        }
+    }
 }
 
 #[derive(Default, Deserialize)]
@@ -72,3 +95,11 @@
 pub struct MigrationsDirectory {
     pub dir: PathBuf,
 }
+
+impl MigrationsDirectory {
+    fn set_relative_path_base(&mut self, base: &Path) {
+        if self.dir.is_relative() {
+            self.dir = base.join(&self.dir);
+        }
+    }
+}
diff --git a/diesel_cli/src/database.rs b/diesel_cli/src/database.rs
index 628b0a3..cf976ca 100644
--- a/diesel_cli/src/database.rs
+++ b/diesel_cli/src/database.rs
@@ -6,7 +6,7 @@
 use diesel::*;
 use migrations_internals as migrations;
 
-use database_error::{DatabaseError, DatabaseResult};
+use crate::database_error::{DatabaseError, DatabaseResult};
 
 use std::env;
 use std::error::Error;
@@ -134,13 +134,15 @@
         $database_url:expr,
         $($func:ident)::+ ($($args:expr),*)
     ) => {
-        match ::database::InferConnection::establish(&$database_url).unwrap_or_else(handle_error) {
+        match crate::database::InferConnection::establish(&$database_url)
+            .unwrap_or_else(handle_error)
+        {
             #[cfg(feature="postgres")]
-            ::database::InferConnection::Pg(ref conn) => $($func)::+ (conn, $($args),*),
+            crate::database::InferConnection::Pg(ref conn) => $($func)::+ (conn, $($args),*),
             #[cfg(feature="sqlite")]
-            ::database::InferConnection::Sqlite(ref conn) => $($func)::+ (conn, $($args),*),
+            crate::database::InferConnection::Sqlite(ref conn) => $($func)::+ (conn, $($args),*),
             #[cfg(feature="mysql")]
-            ::database::InferConnection::Mysql(ref conn) => $($func)::+ (conn, $($args),*),
+            crate::database::InferConnection::Mysql(ref conn) => $($func)::+ (conn, $($args),*),
         }
     };
 }
diff --git a/diesel_cli/src/database_error.rs b/diesel_cli/src/database_error.rs
index 6cd6937..2c467ea 100644
--- a/diesel_cli/src/database_error.rs
+++ b/diesel_cli/src/database_error.rs
@@ -2,6 +2,7 @@
 
 use std::convert::From;
 use std::error::Error;
+use std::path::PathBuf;
 use std::{fmt, io};
 
 use self::DatabaseError::*;
@@ -10,7 +11,7 @@
 
 #[derive(Debug)]
 pub enum DatabaseError {
-    CargoTomlNotFound,
+    ProjectRootNotFound(PathBuf),
     DatabaseUrlMissing,
     IoError(io::Error),
     QueryError(result::Error),
@@ -40,8 +41,8 @@
 impl fmt::Display for DatabaseError {
     fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
         match *self {
-            CargoTomlNotFound => {
-                f.write_str("Unable to find Cargo.toml in this directory or any parent directories.")
+            ProjectRootNotFound(ref p) => {
+                write!(f, "Unable to find diesel.toml or Cargo.toml in {:?} or any parent directories.", p)
             }
             DatabaseUrlMissing => {
                 f.write_str("The --database-url argument must be passed, or the DATABASE_URL environment variable must be set.")
@@ -65,7 +66,7 @@
 impl PartialEq for DatabaseError {
     fn eq(&self, other: &Self) -> bool {
         match (self, other) {
-            (&CargoTomlNotFound, &CargoTomlNotFound) => true,
+            (&ProjectRootNotFound(_), &ProjectRootNotFound(_)) => true,
             _ => false,
         }
     }
diff --git a/diesel_cli/src/infer_schema_internals/data_structures.rs b/diesel_cli/src/infer_schema_internals/data_structures.rs
index d39950f..1e4fdd7 100644
--- a/diesel_cli/src/infer_schema_internals/data_structures.rs
+++ b/diesel_cli/src/infer_schema_internals/data_structures.rs
@@ -53,9 +53,9 @@
 #[derive(Debug)]
 pub struct ColumnDefinition {
     pub sql_name: String,
+    pub rust_name: String,
     pub ty: ColumnType,
     pub docs: String,
-    pub rust_name: Option<String>,
 }
 
 impl ColumnInformation {
diff --git a/diesel_cli/src/infer_schema_internals/foreign_keys.rs b/diesel_cli/src/infer_schema_internals/foreign_keys.rs
index e272770..401727a 100644
--- a/diesel_cli/src/infer_schema_internals/foreign_keys.rs
+++ b/diesel_cli/src/infer_schema_internals/foreign_keys.rs
@@ -3,7 +3,7 @@
 use super::data_structures::ForeignKeyConstraint;
 use super::inference::get_primary_keys;
 use super::table_data::TableName;
-use database::InferConnection;
+use crate::database::InferConnection;
 
 pub fn remove_unsafe_foreign_keys_for_codegen(
     database_url: &str,
diff --git a/diesel_cli/src/infer_schema_internals/inference.rs b/diesel_cli/src/infer_schema_internals/inference.rs
index 3302b6d..b6ebcc6 100644
--- a/diesel_cli/src/infer_schema_internals/inference.rs
+++ b/diesel_cli/src/infer_schema_internals/inference.rs
@@ -4,7 +4,7 @@
 
 use super::data_structures::*;
 use super::table_data::*;
-use database::InferConnection;
+use crate::database::InferConnection;
 
 static RESERVED_NAMES: &[&str] = &[
     "abstract", "alignof", "as", "become", "box", "break", "const", "continue", "crate", "do",
@@ -14,7 +14,7 @@
     "type", "typeof", "unsafe", "unsized", "use", "virtual", "where", "while", "yield",
 ];
 
-fn is_reserved(name: &str) -> bool {
+fn is_reserved_name(name: &str) -> bool {
     RESERVED_NAMES.contains(&name)
         || (
             // Names ending in an underscore are not considered reserved so that we
@@ -23,6 +23,38 @@
         )
 }
 
+fn contains_unmappable_chars(name: &str) -> bool {
+    // Rust identifier names are restricted to [a-zA-Z0-9_].
+    !name.chars().all(|c| c.is_ascii_alphanumeric() || c == '_')
+}
+
+pub fn rust_name_for_sql_name(sql_name: &str) -> String {
+    if is_reserved_name(sql_name) {
+        format!("{}_", sql_name)
+    } else if contains_unmappable_chars(sql_name) {
+        // Map each non-alphanumeric character ([^a-zA-Z0-9]) to an underscore.
+        let mut rust_name: String = sql_name
+            .chars()
+            .map(|c| if c.is_ascii_alphanumeric() { c } else { '_' })
+            .collect();
+
+        // Iteratively remove adjoining underscores ("__").
+        let mut last_len = rust_name.len();
+        'remove_adjoining: loop {
+            rust_name = rust_name.replace("__", "_");
+            if rust_name.len() == last_len {
+                // No more underscore pairs left.
+                break 'remove_adjoining;
+            }
+            last_len = rust_name.len();
+        }
+
+        rust_name
+    } else {
+        sql_name.to_string()
+    }
+}
+
 pub fn load_table_names(
     database_url: &str,
     schema_name: Option<&str>,
@@ -119,7 +151,7 @@
     constraints.map(|mut ct| {
         ct.sort();
         ct.iter_mut().for_each(|foreign_key_constraint| {
-            if is_reserved(&foreign_key_constraint.foreign_key_rust_name) {
+            if is_reserved_name(&foreign_key_constraint.foreign_key_rust_name) {
                 foreign_key_constraint.foreign_key_rust_name =
                     format!("{}_", foreign_key_constraint.foreign_key_rust_name);
             }
@@ -144,29 +176,19 @@
         "Representation of the `{}` table.
 
         (Automatically generated by Diesel.)",
-        name
+        name.full_sql_name(),
     );
     let primary_key = get_primary_keys(&connection, &name)?;
     let primary_key = primary_key
         .iter()
-        .map(|k| {
-            if is_reserved(&k) {
-                format!("{}_", k)
-            } else {
-                k.clone()
-            }
-        })
+        .map(|k| rust_name_for_sql_name(&k))
         .collect();
 
     let column_data = get_column_information(&connection, &name)?
         .into_iter()
         .map(|c| {
             let ty = determine_column_type(&c, &connection)?;
-            let rust_name = if is_reserved(&c.column_name) {
-                Some(format!("{}_", c.column_name))
-            } else {
-                None
-            };
+            let rust_name = rust_name_for_sql_name(&c.column_name);
 
             Ok(ColumnDefinition {
                 docs: doc_comment!(
@@ -176,7 +198,7 @@
 
                     (Automatically generated by Diesel.)",
                     c.column_name,
-                    name,
+                    name.full_sql_name(),
                     ty
                 ),
                 sql_name: c.column_name,
diff --git a/diesel_cli/src/infer_schema_internals/information_schema.rs b/diesel_cli/src/infer_schema_internals/information_schema.rs
index 0e75c68..337bebd 100644
--- a/diesel_cli/src/infer_schema_internals/information_schema.rs
+++ b/diesel_cli/src/infer_schema_internals/information_schema.rs
@@ -3,7 +3,8 @@
 
 use diesel::backend::Backend;
 use diesel::deserialize::FromSql;
-use diesel::expression::NonAggregate;
+use diesel::dsl::*;
+use diesel::expression::{is_aggregate, ValidGrouping};
 #[cfg(feature = "mysql")]
 use diesel::mysql::Mysql;
 #[cfg(feature = "postgres")]
@@ -11,21 +12,21 @@
 use diesel::query_builder::{QueryFragment, QueryId};
 use diesel::*;
 
+use self::information_schema::{columns, key_column_usage, table_constraints, tables};
 use super::data_structures::*;
+use super::inference;
 use super::table_data::TableName;
 
 pub trait UsesInformationSchema: Backend {
-    type TypeColumn: SelectableExpression<
-            self::information_schema::columns::table,
-            SqlType = sql_types::Text,
-        > + NonAggregate
+    type TypeColumn: SelectableExpression<self::information_schema::columns::table, SqlType = sql_types::Text>
+        + ValidGrouping<(), IsAggregate = is_aggregate::No>
         + QueryId
         + QueryFragment<Self>;
 
     fn type_column() -> Self::TypeColumn;
     fn default_schema<C>(conn: &C) -> QueryResult<String>
     where
-        C: Connection,
+        C: Connection<Backend = Self>,
         String: FromSql<sql_types::Text, C::Backend>;
 }
 
@@ -52,7 +53,7 @@
 
     fn default_schema<C>(conn: &C) -> QueryResult<String>
     where
-        C: Connection,
+        C: Connection<Backend = Self>,
         String: FromSql<sql_types::Text, C::Backend>,
     {
         no_arg_sql_function!(database, sql_types::VarChar);
@@ -62,6 +63,8 @@
 
 #[allow(clippy::module_inception)]
 mod information_schema {
+    use diesel::prelude::{allow_tables_to_appear_in_same_query, table};
+
     table! {
         information_schema.tables (table_schema, table_name) {
             table_schema -> VarChar,
@@ -116,11 +119,31 @@
     allow_tables_to_appear_in_same_query!(key_column_usage, table_constraints);
 }
 
-pub fn get_table_data<Conn>(conn: &Conn, table: &TableName) -> QueryResult<Vec<ColumnInformation>>
+pub fn get_table_data<'a, Conn>(
+    conn: &Conn,
+    table: &'a TableName,
+) -> QueryResult<Vec<ColumnInformation>>
 where
     Conn: Connection,
     Conn::Backend: UsesInformationSchema,
     String: FromSql<sql_types::Text, Conn::Backend>,
+    Order<
+        Filter<
+            Filter<
+                Select<
+                    columns::table,
+                    (
+                        columns::column_name,
+                        <Conn::Backend as UsesInformationSchema>::TypeColumn,
+                        columns::is_nullable,
+                    ),
+                >,
+                Eq<columns::table_name, &'a String>,
+            >,
+            Eq<columns::table_schema, Cow<'a, String>>,
+        >,
+        columns::ordinal_position,
+    >: QueryFragment<Conn::Backend>,
 {
     use self::information_schema::columns::dsl::*;
 
@@ -132,20 +155,39 @@
     let type_column = Conn::Backend::type_column();
     columns
         .select((column_name, type_column, is_nullable))
-        .filter(table_name.eq(&table.name))
+        .filter(table_name.eq(&table.sql_name))
         .filter(table_schema.eq(schema_name))
         .order(ordinal_position)
         .load(conn)
 }
 
-pub fn get_primary_keys<Conn>(conn: &Conn, table: &TableName) -> QueryResult<Vec<String>>
+pub fn get_primary_keys<'a, Conn>(conn: &Conn, table: &'a TableName) -> QueryResult<Vec<String>>
 where
     Conn: Connection,
     Conn::Backend: UsesInformationSchema,
     String: FromSql<sql_types::Text, Conn::Backend>,
+    Order<
+        Filter<
+            Filter<
+                Filter<
+                    Select<key_column_usage::table, key_column_usage::column_name>,
+                    EqAny<
+                        key_column_usage::constraint_name,
+                        Filter<
+                            Select<table_constraints::table, table_constraints::constraint_name>,
+                            Eq<table_constraints::constraint_type, &'static str>,
+                        >,
+                    >,
+                >,
+                Eq<key_column_usage::table_name, &'a String>,
+            >,
+            Eq<key_column_usage::table_schema, Cow<'a, String>>,
+        >,
+        key_column_usage::ordinal_position,
+    >: QueryFragment<Conn::Backend>,
 {
     use self::information_schema::key_column_usage::dsl::*;
-    use self::information_schema::table_constraints::{self, constraint_type};
+    use self::information_schema::table_constraints::constraint_type;
 
     let pk_query = table_constraints::table
         .select(table_constraints::constraint_name)
@@ -159,25 +201,37 @@
     key_column_usage
         .select(column_name)
         .filter(constraint_name.eq_any(pk_query))
-        .filter(table_name.eq(&table.name))
+        .filter(table_name.eq(&table.sql_name))
         .filter(table_schema.eq(schema_name))
         .order(ordinal_position)
         .load(conn)
 }
 
-pub fn load_table_names<Conn>(
+pub fn load_table_names<'a, Conn>(
     connection: &Conn,
-    schema_name: Option<&str>,
+    schema_name: Option<&'a str>,
 ) -> Result<Vec<TableName>, Box<dyn Error>>
 where
     Conn: Connection,
     Conn::Backend: UsesInformationSchema,
     String: FromSql<sql_types::Text, Conn::Backend>,
+    Filter<
+        Filter<
+            Filter<
+                Select<tables::table, tables::table_name>,
+                Eq<tables::table_schema, Cow<'a, str>>,
+            >,
+            NotLike<tables::table_name, &'static str>,
+        >,
+        Like<tables::table_type, &'static str>,
+    >: QueryFragment<Conn::Backend>,
 {
     use self::information_schema::tables::dsl::*;
 
     let default_schema = Conn::Backend::default_schema(connection)?;
-    let db_schema_name = schema_name.unwrap_or(&default_schema);
+    let db_schema_name = schema_name
+        .map(Cow::Borrowed)
+        .unwrap_or_else(|| Cow::Owned(default_schema.clone()));
 
     let mut table_names = tables
         .select(table_name)
@@ -189,7 +243,8 @@
     Ok(table_names
         .into_iter()
         .map(|name| TableName {
-            name,
+            rust_name: inference::rust_name_for_sql_name(&name),
+            sql_name: name,
             schema: schema_name
                 .filter(|&schema| schema != default_schema)
                 .map(|schema| schema.to_owned()),
@@ -199,20 +254,15 @@
 
 #[allow(clippy::similar_names)]
 #[cfg(feature = "postgres")]
-pub fn load_foreign_key_constraints<Conn>(
-    connection: &Conn,
-    schema_name: Option<&str>,
-) -> QueryResult<Vec<ForeignKeyConstraint>>
-where
-    Conn: Connection,
-    Conn::Backend: UsesInformationSchema,
-    String: FromSql<sql_types::Text, Conn::Backend>,
-{
+pub fn load_foreign_key_constraints<'a>(
+    connection: &PgConnection,
+    schema_name: Option<&'a str>,
+) -> QueryResult<Vec<ForeignKeyConstraint>> {
     use self::information_schema::key_column_usage as kcu;
     use self::information_schema::referential_constraints as rc;
     use self::information_schema::table_constraints as tc;
 
-    let default_schema = Conn::Backend::default_schema(connection)?;
+    let default_schema = Pg::default_schema(connection)?;
     let schema_name = schema_name.unwrap_or(&default_schema);
 
     let constraint_names = tc::table
diff --git a/diesel_cli/src/infer_schema_internals/mysql.rs b/diesel_cli/src/infer_schema_internals/mysql.rs
index 4f8b7b3..bce2a1f 100644
--- a/diesel_cli/src/infer_schema_internals/mysql.rs
+++ b/diesel_cli/src/infer_schema_internals/mysql.rs
@@ -8,6 +8,8 @@
 use super::table_data::TableName;
 
 mod information_schema {
+    use diesel::prelude::{allow_tables_to_appear_in_same_query, table};
+
     table! {
         information_schema.table_constraints (constraint_schema, constraint_name) {
             table_schema -> VarChar,
diff --git a/diesel_cli/src/infer_schema_internals/sqlite.rs b/diesel_cli/src/infer_schema_internals/sqlite.rs
index e3aea89..765a903 100644
--- a/diesel_cli/src/infer_schema_internals/sqlite.rs
+++ b/diesel_cli/src/infer_schema_internals/sqlite.rs
@@ -68,7 +68,7 @@
     let rows = tables
         .into_iter()
         .map(|child_table| {
-            let query = format!("PRAGMA FOREIGN_KEY_LIST('{}')", child_table.name);
+            let query = format!("PRAGMA FOREIGN_KEY_LIST('{}')", child_table.sql_name);
             Ok(sql::<pragma_foreign_key_list::SqlType>(&query)
                 .load::<ForeignKeyListRow>(connection)?
                 .into_iter()
@@ -85,14 +85,14 @@
                 .collect())
         })
         .collect::<QueryResult<Vec<Vec<_>>>>()?;
-    Ok(rows.into_iter().flat_map(|x| x).collect())
+    Ok(rows.into_iter().flatten().collect())
 }
 
 pub fn get_table_data(
     conn: &SqliteConnection,
     table: &TableName,
 ) -> QueryResult<Vec<ColumnInformation>> {
-    let query = format!("PRAGMA TABLE_INFO('{}')", &table.name);
+    let query = format!("PRAGMA TABLE_INFO('{}')", &table.sql_name);
     sql::<pragma_table_info::SqlType>(&query).load(conn)
 }
 
@@ -119,7 +119,7 @@
 }
 
 pub fn get_primary_keys(conn: &SqliteConnection, table: &TableName) -> QueryResult<Vec<String>> {
-    let query = format!("PRAGMA TABLE_INFO('{}')", &table.name);
+    let query = format!("PRAGMA TABLE_INFO('{}')", &table.sql_name);
     let results = sql::<pragma_table_info::SqlType>(&query).load::<FullTableInfo>(conn)?;
     Ok(results
         .into_iter()
diff --git a/diesel_cli/src/infer_schema_internals/table_data.rs b/diesel_cli/src/infer_schema_internals/table_data.rs
index 6a14332..f6c6bad 100644
--- a/diesel_cli/src/infer_schema_internals/table_data.rs
+++ b/diesel_cli/src/infer_schema_internals/table_data.rs
@@ -4,17 +4,22 @@
 use std::str::FromStr;
 
 use super::data_structures::ColumnDefinition;
+use super::inference;
 
 #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
 pub struct TableName {
-    pub name: String,
+    pub sql_name: String,
+    pub rust_name: String,
     pub schema: Option<String>,
 }
 
 impl TableName {
     pub fn from_name<T: Into<String>>(name: T) -> Self {
+        let name = name.into();
+
         TableName {
-            name: name.into(),
+            rust_name: inference::rust_name_for_sql_name(&name),
+            sql_name: name,
             schema: None,
         }
     }
@@ -24,8 +29,11 @@
         T: Into<String>,
         U: Into<String>,
     {
+        let name = name.into();
+
         TableName {
-            name: name.into(),
+            rust_name: inference::rust_name_for_sql_name(&name),
+            sql_name: name,
             schema: Some(schema.into()),
         }
     }
@@ -36,6 +44,13 @@
             self.schema = None;
         }
     }
+
+    pub fn full_sql_name(&self) -> String {
+        match self.schema {
+            Some(ref schema_name) => format!("{}.{}", schema_name, self.sql_name),
+            None => self.sql_name.to_string(),
+        }
+    }
 }
 
 impl<ST, DB> Queryable<ST, DB> for TableName
@@ -53,8 +68,8 @@
 impl fmt::Display for TableName {
     fn fmt(&self, out: &mut fmt::Formatter) -> Result<(), fmt::Error> {
         match self.schema {
-            Some(ref schema_name) => write!(out, "{}.{}", schema_name, self.name),
-            None => write!(out, "{}", self.name),
+            Some(ref schema_name) => write!(out, "{}.{}", schema_name, self.rust_name),
+            None => write!(out, "{}", self.rust_name),
         }
     }
 }
diff --git a/diesel_cli/src/main.rs b/diesel_cli/src/main.rs
index ed64799..c6fb7bc 100644
--- a/diesel_cli/src/main.rs
+++ b/diesel_cli/src/main.rs
@@ -3,37 +3,19 @@
 // Clippy lints
 #![allow(clippy::option_map_unwrap_or_else, clippy::option_map_unwrap_or)]
 #![warn(
-    clippy::wrong_pub_self_convention,
+    clippy::if_not_else,
+    clippy::items_after_statements,
     clippy::mut_mut,
     clippy::non_ascii_literal,
     clippy::similar_names,
     clippy::unicode_not_nfc,
-    clippy::if_not_else,
-    clippy::items_after_statements,
-    clippy::used_underscore_binding
+    clippy::used_underscore_binding,
+    clippy::wrong_pub_self_convention
 )]
 #![cfg_attr(test, allow(clippy::result_unwrap_used))]
 
-extern crate chrono;
-#[macro_use]
-extern crate clap;
-#[macro_use]
-extern crate diesel;
-extern crate dotenv;
-extern crate heck;
-extern crate migrations_internals;
-#[macro_use]
-extern crate serde;
-extern crate tempfile;
-extern crate toml;
-#[cfg(feature = "url")]
-extern crate url;
-
 mod config;
 
-#[cfg(feature = "barrel-migrations")]
-extern crate barrel;
-
 mod database_error;
 #[macro_use]
 mod database;
@@ -55,11 +37,11 @@
 
 use self::config::Config;
 use self::database_error::{DatabaseError, DatabaseResult};
-use migrations::MigrationError;
+use crate::migrations::MigrationError;
 use migrations_internals::TIMESTAMP_FORMAT;
 
 fn main() {
-    use self::dotenv::dotenv;
+    use dotenv::dotenv;
     dotenv().ok();
 
     let matches = cli::build_cli().get_matches();
@@ -216,8 +198,7 @@
                 Config::read(matches)
                     .unwrap_or_else(handle_error)
                     .migrations_directory?
-                    .dir
-                    .to_owned(),
+                    .dir,
             )
         });
 
@@ -248,7 +229,7 @@
         create_migrations_directory(&dir)?;
     }
 
-    Ok(dir.to_owned())
+    Ok(dir)
 }
 
 fn create_config_file(matches: &ArgMatches) -> DatabaseResult<()> {
@@ -265,11 +246,11 @@
 fn run_database_command(matches: &ArgMatches) -> Result<(), Box<dyn Error>> {
     match matches.subcommand() {
         ("setup", Some(args)) => {
-            let migrations_dir = migrations_dir(args).unwrap_or_else(handle_error);
+            let migrations_dir = migrations_dir(matches).unwrap_or_else(handle_error);
             database::setup_database(args, &migrations_dir)?;
         }
         ("reset", Some(args)) => {
-            let migrations_dir = migrations_dir(args).unwrap_or_else(handle_error);
+            let migrations_dir = migrations_dir(matches).unwrap_or_else(handle_error);
             database::reset_database(args, &migrations_dir)?;
             regenerate_schema_if_file_specified(matches)?;
         }
@@ -287,6 +268,8 @@
 }
 
 fn generate_completions_command(matches: &ArgMatches) {
+    use clap::value_t;
+
     let shell = value_t!(matches, "SHELL", Shell).unwrap_or_else(|e| e.exit());
     cli::build_cli().gen_completions_to("diesel", shell, &mut stdout());
 }
@@ -294,7 +277,7 @@
 /// Looks for a migrations directory in the current path and all parent paths,
 /// and creates one in the same directory as the Cargo.toml if it can't find
 /// one. It also sticks a .gitkeep in the directory so git will pick it up.
-/// Returns a `DatabaseError::CargoTomlNotFound` if no Cargo.toml is found.
+/// Returns a `DatabaseError::ProjectRootNotFound` if no Cargo.toml is found.
 fn create_migrations_directory(path: &Path) -> DatabaseResult<PathBuf> {
     println!("Creating migrations directory at: {}", path.display());
     fs::create_dir(path)?;
@@ -303,19 +286,22 @@
 }
 
 fn find_project_root() -> DatabaseResult<PathBuf> {
-    search_for_cargo_toml_directory(&env::current_dir()?)
+    let current_dir = env::current_dir()?;
+    search_for_directory_containing_file(&current_dir, "diesel.toml")
+        .or_else(|_| search_for_directory_containing_file(&current_dir, "Cargo.toml"))
 }
 
 /// Searches for the directory that holds the project's Cargo.toml, and returns
-/// the path if it found it, or returns a `DatabaseError::CargoTomlNotFound`.
-fn search_for_cargo_toml_directory(path: &Path) -> DatabaseResult<PathBuf> {
-    let toml_path = path.join("Cargo.toml");
+/// the path if it found it, or returns a `DatabaseError::ProjectRootNotFound`.
+fn search_for_directory_containing_file(path: &Path, file: &str) -> DatabaseResult<PathBuf> {
+    let toml_path = path.join(file);
     if toml_path.is_file() {
         Ok(path.to_owned())
     } else {
         path.parent()
-            .map(search_for_cargo_toml_directory)
-            .unwrap_or(Err(DatabaseError::CargoTomlNotFound))
+            .map(|p| search_for_directory_containing_file(p, file))
+            .unwrap_or_else(|| Err(DatabaseError::ProjectRootNotFound(path.into())))
+            .map_err(|_| DatabaseError::ProjectRootNotFound(path.into()))
     }
 }
 
@@ -376,8 +362,8 @@
 }
 
 fn run_infer_schema(matches: &ArgMatches) -> Result<(), Box<dyn Error>> {
-    use infer_schema_internals::TableName;
-    use print_schema::*;
+    use crate::infer_schema_internals::TableName;
+    use crate::print_schema::*;
 
     let database_url = database::database_url(matches);
     let mut config = Config::read(matches)?.print_schema;
@@ -468,7 +454,7 @@
 mod tests {
     extern crate tempfile;
 
-    use database_error::DatabaseError;
+    use crate::database_error::DatabaseError;
 
     use self::tempfile::Builder;
 
@@ -476,7 +462,7 @@
     use std::path::PathBuf;
 
     use super::convert_absolute_path_to_relative;
-    use super::search_for_cargo_toml_directory;
+    use super::search_for_directory_containing_file;
 
     #[test]
     fn toml_directory_find_cargo_toml() {
@@ -488,7 +474,7 @@
 
         assert_eq!(
             Ok(temp_path.clone()),
-            search_for_cargo_toml_directory(&temp_path)
+            search_for_directory_containing_file(&temp_path, "Cargo.toml")
         );
     }
 
@@ -498,8 +484,8 @@
         let temp_path = dir.path().canonicalize().unwrap();
 
         assert_eq!(
-            Err(DatabaseError::CargoTomlNotFound),
-            search_for_cargo_toml_directory(&temp_path)
+            Err(DatabaseError::ProjectRootNotFound(temp_path.clone())),
+            search_for_directory_containing_file(&temp_path, "Cargo.toml")
         );
     }
 
diff --git a/diesel_cli/src/print_schema.rs b/diesel_cli/src/print_schema.rs
index 7375443..cad269b 100644
--- a/diesel_cli/src/print_schema.rs
+++ b/diesel_cli/src/print_schema.rs
@@ -1,6 +1,6 @@
-use config;
+use crate::config;
 
-use infer_schema_internals::*;
+use crate::infer_schema_internals::*;
 use serde::de::{self, MapAccess, Visitor};
 use serde::{Deserialize, Deserializer};
 use std::error::Error;
@@ -163,7 +163,11 @@
                 let mut out = PadAdapter::new(f);
                 writeln!(out)?;
                 for table in &self.tables {
-                    writeln!(out, "{},", table.name.name)?;
+                    if table.name.rust_name == table.name.sql_name {
+                        writeln!(out, "{},", table.name.sql_name)?;
+                    } else {
+                        writeln!(out, "{},", table.name.rust_name)?;
+                    }
                 }
             }
             writeln!(f, ");")?;
@@ -199,7 +203,16 @@
                 }
             }
 
+            if self.table.name.rust_name != self.table.name.sql_name {
+                writeln!(
+                    out,
+                    r#"#[sql_name = "{}"]"#,
+                    self.table.name.full_sql_name()
+                )?;
+            }
+
             write!(out, "{} (", self.table.name)?;
+
             for (i, pk) in self.table.primary_key.iter().enumerate() {
                 if i != 0 {
                     write!(out, ", ")?;
@@ -237,11 +250,11 @@
                         writeln!(out, "///{}{}", if d.is_empty() { "" } else { " " }, d)?;
                     }
                 }
-                if let Some(ref rust_name) = column.rust_name {
-                    writeln!(out, r#"#[sql_name = "{}"]"#, column.sql_name)?;
-                    writeln!(out, "{} -> {},", rust_name, column.ty)?;
-                } else {
+                if column.rust_name == column.sql_name {
                     writeln!(out, "{} -> {},", column.sql_name, column.ty)?;
+                } else {
+                    writeln!(out, r#"#[sql_name = "{}"]"#, column.sql_name)?;
+                    writeln!(out, "{} -> {},", column.rust_name, column.ty)?;
                 }
             }
         }
@@ -254,10 +267,14 @@
 
 impl<'a> Display for Joinable<'a> {
     fn fmt(&self, f: &mut Formatter) -> fmt::Result {
+        let child_table_name = &self.0.child_table.rust_name;
+
+        let parent_table_name = &self.0.parent_table.rust_name;
+
         write!(
             f,
             "joinable!({} -> {} ({}));",
-            self.0.child_table.name, self.0.parent_table.name, self.0.foreign_key_rust_name,
+            child_table_name, parent_table_name, self.0.foreign_key_rust_name,
         )
     }
 }
diff --git a/diesel_cli/tests/completion_generation.rs b/diesel_cli/tests/completion_generation.rs
index b60e803..f4bd039 100644
--- a/diesel_cli/tests/completion_generation.rs
+++ b/diesel_cli/tests/completion_generation.rs
@@ -1,4 +1,4 @@
-use support::project;
+use crate::support::project;
 
 #[test]
 fn can_generate_deprecated_bash_completion() {
diff --git a/diesel_cli/tests/database_drop.rs b/diesel_cli/tests/database_drop.rs
index a047e2f..91a24d73 100644
--- a/diesel_cli/tests/database_drop.rs
+++ b/diesel_cli/tests/database_drop.rs
@@ -1,4 +1,4 @@
-use support::{database, project};
+use crate::support::{database, project};
 
 #[test]
 fn database_drop_drops_database() {
diff --git a/diesel_cli/tests/database_reset.rs b/diesel_cli/tests/database_reset.rs
index b88c92a..83e4436 100644
--- a/diesel_cli/tests/database_reset.rs
+++ b/diesel_cli/tests/database_reset.rs
@@ -1,7 +1,7 @@
 #[cfg(feature = "postgres")]
 extern crate url;
 
-use support::{database, project};
+use crate::support::{database, project};
 
 #[test]
 fn reset_drops_the_database() {
diff --git a/diesel_cli/tests/database_setup.rs b/diesel_cli/tests/database_setup.rs
index 158af59..eefee8d 100644
--- a/diesel_cli/tests/database_setup.rs
+++ b/diesel_cli/tests/database_setup.rs
@@ -1,4 +1,4 @@
-use support::{database, project};
+use crate::support::{database, project};
 
 #[test]
 fn database_setup_creates_database() {
@@ -85,6 +85,39 @@
 }
 
 #[test]
+fn database_setup_respects_migration_dir_by_arg_to_database() {
+    let p = project("database_setup_respects_migration_dir_by_arg_to_database")
+        .folder("foo")
+        .build();
+
+    let db = database(&p.database_url());
+
+    p.create_migration_in_directory(
+        "foo",
+        "12345_create_users_table",
+        "CREATE TABLE users ( id INTEGER )",
+        "DROP TABLE users",
+    );
+
+    // sanity check
+    assert!(!db.exists());
+
+    let result = p
+        .command("database")
+        .arg("--migration-dir=foo")
+        .arg("setup")
+        .run();
+
+    assert!(result.is_success(), "Result was unsuccessful {:?}", result);
+    assert!(
+        result.stdout().contains("Running migration 12345"),
+        "Unexpected stdout {}",
+        result.stdout()
+    );
+    assert!(db.table_exists("users"));
+}
+
+#[test]
 fn database_setup_respects_migration_dir_by_arg() {
     let p = project("database_setup_respects_migration_dir_by_arg")
         .folder("foo")
diff --git a/diesel_cli/tests/database_url_errors.rs b/diesel_cli/tests/database_url_errors.rs
index 08068c8..be6f92e 100644
--- a/diesel_cli/tests/database_url_errors.rs
+++ b/diesel_cli/tests/database_url_errors.rs
@@ -1,5 +1,5 @@
 #[allow(unused_imports)]
-use support::{database, project};
+use crate::support::{database, project};
 
 #[test]
 #[cfg(not(feature = "sqlite"))]
diff --git a/diesel_cli/tests/exit_codes.rs b/diesel_cli/tests/exit_codes.rs
index fa0bd1c..57c813a 100644
--- a/diesel_cli/tests/exit_codes.rs
+++ b/diesel_cli/tests/exit_codes.rs
@@ -1,4 +1,4 @@
-use support::project;
+use crate::support::project;
 
 #[test]
 fn errors_dont_cause_panic() {
diff --git a/diesel_cli/tests/migration_generate.rs b/diesel_cli/tests/migration_generate.rs
index d0b458f..0b59f4f 100644
--- a/diesel_cli/tests/migration_generate.rs
+++ b/diesel_cli/tests/migration_generate.rs
@@ -1,8 +1,8 @@
 use chrono::prelude::*;
 use regex::Regex;
 
+use crate::support::project;
 use migrations_internals::TIMESTAMP_FORMAT;
-use support::project;
 
 #[test]
 fn migration_generate_creates_a_migration_with_the_proper_name() {
diff --git a/diesel_cli/tests/migration_list.rs b/diesel_cli/tests/migration_list.rs
index 2bfb0bb..c13e5ed 100644
--- a/diesel_cli/tests/migration_list.rs
+++ b/diesel_cli/tests/migration_list.rs
@@ -2,8 +2,8 @@
 use std::thread::sleep;
 use std::time::Duration;
 
+use crate::support::{database, project};
 use migrations_internals::TIMESTAMP_FORMAT;
-use support::{database, project};
 
 #[test]
 fn migration_list_lists_pending_applied_migrations() {
diff --git a/diesel_cli/tests/migration_redo.rs b/diesel_cli/tests/migration_redo.rs
index fb00702..1554c0a 100644
--- a/diesel_cli/tests/migration_redo.rs
+++ b/diesel_cli/tests/migration_redo.rs
@@ -1,4 +1,4 @@
-use support::project;
+use crate::support::project;
 
 #[test]
 fn migration_redo_runs_the_last_migration_down_and_up() {
diff --git a/diesel_cli/tests/migration_revert.rs b/diesel_cli/tests/migration_revert.rs
index ba080f1..8ba1847 100644
--- a/diesel_cli/tests/migration_revert.rs
+++ b/diesel_cli/tests/migration_revert.rs
@@ -1,4 +1,4 @@
-use support::{database, project};
+use crate::support::{database, project};
 
 #[test]
 fn migration_revert_runs_the_last_migration_down() {
diff --git a/diesel_cli/tests/migration_run.rs b/diesel_cli/tests/migration_run.rs
index da2e8e8..f58b6f2 100644
--- a/diesel_cli/tests/migration_run.rs
+++ b/diesel_cli/tests/migration_run.rs
@@ -1,8 +1,8 @@
+use crate::support::{database, project};
 use diesel::dsl::sql;
 use diesel::sql_types::Bool;
 use diesel::{select, RunQueryDsl};
 use std::path::Path;
-use support::{database, project};
 
 #[test]
 fn migration_run_runs_pending_migrations() {
@@ -502,7 +502,8 @@
     assert!(
         result
             .stderr()
-            .contains("Command would result in changes to src/my_schema.rs"),
+            .contains("Command would result in changes to")
+            && result.stderr().contains("src/my_schema.rs"),
         "Unexpected stderr {}",
         result.stderr()
     );
diff --git a/diesel_cli/tests/print_schema.rs b/diesel_cli/tests/print_schema.rs
index 0fbd52b..f2a2883 100644
--- a/diesel_cli/tests/print_schema.rs
+++ b/diesel_cli/tests/print_schema.rs
@@ -2,7 +2,7 @@
 use std::io::prelude::*;
 use std::path::{Path, PathBuf};
 
-use support::{database, project};
+use crate::support::{database, project};
 
 #[test]
 fn run_infer_schema_without_docs() {
@@ -115,6 +115,33 @@
     );
 }
 
+#[test]
+fn print_schema_with_unmappable_names() {
+    test_print_schema("print_schema_with_unmappable_names", vec!["--with-docs"]);
+}
+
+#[test]
+#[cfg(feature = "postgres")]
+fn print_schema_with_unmappable_names_and_schema_name() {
+    test_print_schema(
+        "print_schema_with_unmappable_names_and_schema_name",
+        vec!["--with-docs", "--schema", "custom_schema"],
+    )
+}
+
+#[test]
+fn schema_file_is_relative_to_project_root() {
+    let p = project("schema_file_is_relative_to_project_root")
+        .folder("foo")
+        .build();
+    let _db = database(&p.database_url());
+
+    p.command("setup").run();
+    p.command("migration").arg("run").cd("foo").run();
+
+    assert!(p.has_file("src/schema.rs"));
+}
+
 #[cfg(feature = "sqlite")]
 const BACKEND: &str = "sqlite";
 #[cfg(feature = "postgres")]
diff --git a/diesel_cli/tests/print_schema/print_schema_with_unmappable_names/diesel.toml b/diesel_cli/tests/print_schema/print_schema_with_unmappable_names/diesel.toml
new file mode 100644
index 0000000..750e5ba
--- /dev/null
+++ b/diesel_cli/tests/print_schema/print_schema_with_unmappable_names/diesel.toml
@@ -0,0 +1,3 @@
+[print_schema]
+file = "src/schema.rs"
+with_docs = true
diff --git a/diesel_cli/tests/print_schema/print_schema_with_unmappable_names/mysql/expected.rs b/diesel_cli/tests/print_schema/print_schema_with_unmappable_names/mysql/expected.rs
new file mode 100644
index 0000000..2fe08ba
--- /dev/null
+++ b/diesel_cli/tests/print_schema/print_schema_with_unmappable_names/mysql/expected.rs
@@ -0,0 +1,62 @@
+table! {
+    /// Representation of the `self` table.
+    ///
+    /// (Automatically generated by Diesel.)
+    #[sql_name = "self"]
+    self_ (id) {
+        /// The `id` column of the `self` table.
+        ///
+        /// Its SQL type is `Integer`.
+        ///
+        /// (Automatically generated by Diesel.)
+        id -> Integer,
+    }
+}
+
+table! {
+    /// Representation of the `user-has::complex>>>role` table.
+    ///
+    /// (Automatically generated by Diesel.)
+    #[sql_name = "user-has::complex>>>role"]
+    user_has_complex_role (id) {
+        /// The `user` column of the `user-has::complex>>>role` table.
+        ///
+        /// Its SQL type is `Integer`.
+        ///
+        /// (Automatically generated by Diesel.)
+        user -> Integer,
+        /// The `role` column of the `user-has::complex>>>role` table.
+        ///
+        /// Its SQL type is `Integer`.
+        ///
+        /// (Automatically generated by Diesel.)
+        role -> Integer,
+        /// The `id` column of the `user-has::complex>>>role` table.
+        ///
+        /// Its SQL type is `Integer`.
+        ///
+        /// (Automatically generated by Diesel.)
+        id -> Integer,
+        /// The `created at` column of the `user-has::complex>>>role` table.
+        ///
+        /// Its SQL type is `Timestamp`.
+        ///
+        /// (Automatically generated by Diesel.)
+        #[sql_name = "created at"]
+        created_at -> Timestamp,
+        /// The `expiry date` column of the `user-has::complex>>>role` table.
+        ///
+        /// Its SQL type is `Nullable<Timestamp>`.
+        ///
+        /// (Automatically generated by Diesel.)
+        #[sql_name = "expiry date"]
+        expiry_date -> Nullable<Timestamp>,
+    }
+}
+
+joinable!(user_has_complex_role -> self_ (user));
+
+allow_tables_to_appear_in_same_query!(
+    self_,
+    user_has_complex_role,
+);
diff --git a/diesel_cli/tests/print_schema/print_schema_with_unmappable_names/mysql/schema.sql b/diesel_cli/tests/print_schema/print_schema_with_unmappable_names/mysql/schema.sql
new file mode 100644
index 0000000..479f37f
--- /dev/null
+++ b/diesel_cli/tests/print_schema/print_schema_with_unmappable_names/mysql/schema.sql
@@ -0,0 +1,9 @@
+CREATE TABLE self (id INTEGER PRIMARY KEY);
+CREATE TABLE `user-has::complex>>>role` (
+  user INTEGER NOT NULL,
+  role INTEGER NOT NULL,
+  id INTEGER PRIMARY KEY,
+  `created at` TIMESTAMP NOT NULL,
+  `expiry date` TIMESTAMP NULL DEFAULT NULL,
+  FOREIGN KEY (user) REFERENCES self(id)
+);
diff --git a/diesel_cli/tests/print_schema/print_schema_with_unmappable_names/postgres/expected.rs b/diesel_cli/tests/print_schema/print_schema_with_unmappable_names/postgres/expected.rs
new file mode 100644
index 0000000..1662dba
--- /dev/null
+++ b/diesel_cli/tests/print_schema/print_schema_with_unmappable_names/postgres/expected.rs
@@ -0,0 +1,62 @@
+table! {
+    /// Representation of the `self` table.
+    ///
+    /// (Automatically generated by Diesel.)
+    #[sql_name = "self"]
+    self_ (id) {
+        /// The `id` column of the `self` table.
+        ///
+        /// Its SQL type is `Int4`.
+        ///
+        /// (Automatically generated by Diesel.)
+        id -> Int4,
+    }
+}
+
+table! {
+    /// Representation of the `user-has::complex>>>role` table.
+    ///
+    /// (Automatically generated by Diesel.)
+    #[sql_name = "user-has::complex>>>role"]
+    user_has_complex_role (id) {
+        /// The `user` column of the `user-has::complex>>>role` table.
+        ///
+        /// Its SQL type is `Int4`.
+        ///
+        /// (Automatically generated by Diesel.)
+        user -> Int4,
+        /// The `role` column of the `user-has::complex>>>role` table.
+        ///
+        /// Its SQL type is `Int4`.
+        ///
+        /// (Automatically generated by Diesel.)
+        role -> Int4,
+        /// The `id` column of the `user-has::complex>>>role` table.
+        ///
+        /// Its SQL type is `Int4`.
+        ///
+        /// (Automatically generated by Diesel.)
+        id -> Int4,
+        /// The `created at` column of the `user-has::complex>>>role` table.
+        ///
+        /// Its SQL type is `Timestamp`.
+        ///
+        /// (Automatically generated by Diesel.)
+        #[sql_name = "created at"]
+        created_at -> Timestamp,
+        /// The `expiry date` column of the `user-has::complex>>>role` table.
+        ///
+        /// Its SQL type is `Nullable<Timestamp>`.
+        ///
+        /// (Automatically generated by Diesel.)
+        #[sql_name = "expiry date"]
+        expiry_date -> Nullable<Timestamp>,
+    }
+}
+
+joinable!(user_has_complex_role -> self_ (user));
+
+allow_tables_to_appear_in_same_query!(
+    self_,
+    user_has_complex_role,
+);
diff --git a/diesel_cli/tests/print_schema/print_schema_with_unmappable_names/postgres/schema.sql b/diesel_cli/tests/print_schema/print_schema_with_unmappable_names/postgres/schema.sql
new file mode 100644
index 0000000..6433525
--- /dev/null
+++ b/diesel_cli/tests/print_schema/print_schema_with_unmappable_names/postgres/schema.sql
@@ -0,0 +1,8 @@
+CREATE TABLE self (id SERIAL PRIMARY KEY);
+CREATE TABLE "user-has::complex>>>role" (
+  "user" INTEGER NOT NULL REFERENCES self,
+  role INTEGER NOT NULL,
+  id SERIAL PRIMARY KEY,
+  "created at" TIMESTAMP NOT NULL,
+  "expiry date" TIMESTAMP
+);
diff --git a/diesel_cli/tests/print_schema/print_schema_with_unmappable_names/sqlite/expected.rs b/diesel_cli/tests/print_schema/print_schema_with_unmappable_names/sqlite/expected.rs
new file mode 100644
index 0000000..2fe08ba
--- /dev/null
+++ b/diesel_cli/tests/print_schema/print_schema_with_unmappable_names/sqlite/expected.rs
@@ -0,0 +1,62 @@
+table! {
+    /// Representation of the `self` table.
+    ///
+    /// (Automatically generated by Diesel.)
+    #[sql_name = "self"]
+    self_ (id) {
+        /// The `id` column of the `self` table.
+        ///
+        /// Its SQL type is `Integer`.
+        ///
+        /// (Automatically generated by Diesel.)
+        id -> Integer,
+    }
+}
+
+table! {
+    /// Representation of the `user-has::complex>>>role` table.
+    ///
+    /// (Automatically generated by Diesel.)
+    #[sql_name = "user-has::complex>>>role"]
+    user_has_complex_role (id) {
+        /// The `user` column of the `user-has::complex>>>role` table.
+        ///
+        /// Its SQL type is `Integer`.
+        ///
+        /// (Automatically generated by Diesel.)
+        user -> Integer,
+        /// The `role` column of the `user-has::complex>>>role` table.
+        ///
+        /// Its SQL type is `Integer`.
+        ///
+        /// (Automatically generated by Diesel.)
+        role -> Integer,
+        /// The `id` column of the `user-has::complex>>>role` table.
+        ///
+        /// Its SQL type is `Integer`.
+        ///
+        /// (Automatically generated by Diesel.)
+        id -> Integer,
+        /// The `created at` column of the `user-has::complex>>>role` table.
+        ///
+        /// Its SQL type is `Timestamp`.
+        ///
+        /// (Automatically generated by Diesel.)
+        #[sql_name = "created at"]
+        created_at -> Timestamp,
+        /// The `expiry date` column of the `user-has::complex>>>role` table.
+        ///
+        /// Its SQL type is `Nullable<Timestamp>`.
+        ///
+        /// (Automatically generated by Diesel.)
+        #[sql_name = "expiry date"]
+        expiry_date -> Nullable<Timestamp>,
+    }
+}
+
+joinable!(user_has_complex_role -> self_ (user));
+
+allow_tables_to_appear_in_same_query!(
+    self_,
+    user_has_complex_role,
+);
diff --git a/diesel_cli/tests/print_schema/print_schema_with_unmappable_names/sqlite/schema.sql b/diesel_cli/tests/print_schema/print_schema_with_unmappable_names/sqlite/schema.sql
new file mode 100644
index 0000000..4f55b96
--- /dev/null
+++ b/diesel_cli/tests/print_schema/print_schema_with_unmappable_names/sqlite/schema.sql
@@ -0,0 +1,8 @@
+CREATE TABLE self (id INTEGER NOT NULL PRIMARY KEY);
+CREATE TABLE "user-has::complex>>>role" (
+  "user" INTEGER NOT NULL REFERENCES self(id),
+  role INTEGER NOT NULL,
+  id INTEGER NOT NULL PRIMARY KEY,
+  "created at" TIMESTAMP NOT NULL,
+  "expiry date" TIMESTAMP
+);
diff --git a/diesel_cli/tests/print_schema/print_schema_with_unmappable_names_and_schema_name/diesel.toml b/diesel_cli/tests/print_schema/print_schema_with_unmappable_names_and_schema_name/diesel.toml
new file mode 100644
index 0000000..c976c2c
--- /dev/null
+++ b/diesel_cli/tests/print_schema/print_schema_with_unmappable_names_and_schema_name/diesel.toml
@@ -0,0 +1,4 @@
+[print_schema]
+file = "src/schema.rs"
+with_docs = true
+schema = "custom_schema"
diff --git a/diesel_cli/tests/print_schema/print_schema_with_unmappable_names_and_schema_name/postgres/expected.rs b/diesel_cli/tests/print_schema/print_schema_with_unmappable_names_and_schema_name/postgres/expected.rs
new file mode 100644
index 0000000..3d59566
--- /dev/null
+++ b/diesel_cli/tests/print_schema/print_schema_with_unmappable_names_and_schema_name/postgres/expected.rs
@@ -0,0 +1,64 @@
+pub mod custom_schema {
+    table! {
+        /// Representation of the `custom_schema.self` table.
+        ///
+        /// (Automatically generated by Diesel.)
+        #[sql_name = "custom_schema.self"]
+        custom_schema.self_ (id) {
+            /// The `id` column of the `custom_schema.self` table.
+            ///
+            /// Its SQL type is `Int4`.
+            ///
+            /// (Automatically generated by Diesel.)
+            id -> Int4,
+        }
+    }
+
+    table! {
+        /// Representation of the `custom_schema.user-has::complex>>>role` table.
+        ///
+        /// (Automatically generated by Diesel.)
+        #[sql_name = "custom_schema.user-has::complex>>>role"]
+        custom_schema.user_has_complex_role (id) {
+            /// The `user` column of the `custom_schema.user-has::complex>>>role` table.
+            ///
+            /// Its SQL type is `Int4`.
+            ///
+            /// (Automatically generated by Diesel.)
+            user -> Int4,
+            /// The `role` column of the `custom_schema.user-has::complex>>>role` table.
+            ///
+            /// Its SQL type is `Int4`.
+            ///
+            /// (Automatically generated by Diesel.)
+            role -> Int4,
+            /// The `id` column of the `custom_schema.user-has::complex>>>role` table.
+            ///
+            /// Its SQL type is `Int4`.
+            ///
+            /// (Automatically generated by Diesel.)
+            id -> Int4,
+            /// The `created at` column of the `custom_schema.user-has::complex>>>role` table.
+            ///
+            /// Its SQL type is `Timestamp`.
+            ///
+            /// (Automatically generated by Diesel.)
+            #[sql_name = "created at"]
+            created_at -> Timestamp,
+            /// The `expiry date` column of the `custom_schema.user-has::complex>>>role` table.
+            ///
+            /// Its SQL type is `Nullable<Timestamp>`.
+            ///
+            /// (Automatically generated by Diesel.)
+            #[sql_name = "expiry date"]
+            expiry_date -> Nullable<Timestamp>,
+        }
+    }
+
+    joinable!(user_has_complex_role -> self_ (user));
+
+    allow_tables_to_appear_in_same_query!(
+        self_,
+        user_has_complex_role,
+    );
+}
diff --git a/diesel_cli/tests/print_schema/print_schema_with_unmappable_names_and_schema_name/postgres/schema.sql b/diesel_cli/tests/print_schema/print_schema_with_unmappable_names_and_schema_name/postgres/schema.sql
new file mode 100644
index 0000000..de4bf90
--- /dev/null
+++ b/diesel_cli/tests/print_schema/print_schema_with_unmappable_names_and_schema_name/postgres/schema.sql
@@ -0,0 +1,9 @@
+CREATE SCHEMA custom_schema;
+CREATE TABLE custom_schema.self (id SERIAL PRIMARY KEY);
+CREATE TABLE custom_schema."user-has::complex>>>role" (
+  "user" INTEGER NOT NULL REFERENCES custom_schema.self,
+  role INTEGER NOT NULL,
+  id SERIAL PRIMARY KEY,
+  "created at" TIMESTAMP NOT NULL,
+  "expiry date" TIMESTAMP
+);
diff --git a/diesel_cli/tests/setup.rs b/diesel_cli/tests/setup.rs
index ebea699..607487b 100644
--- a/diesel_cli/tests/setup.rs
+++ b/diesel_cli/tests/setup.rs
@@ -1,7 +1,7 @@
 #[cfg(feature = "postgres")]
 use std::path::Path;
 
-use support::{database, project};
+use crate::support::{database, project};
 
 #[test]
 fn setup_creates_database() {
diff --git a/diesel_cli/tests/support/command.rs b/diesel_cli/tests/support/command.rs
index 08c2a55..a9cb7d9 100644
--- a/diesel_cli/tests/support/command.rs
+++ b/diesel_cli/tests/support/command.rs
@@ -36,6 +36,11 @@
         self
     }
 
+    pub fn cd<P: AsRef<Path>>(mut self, path: P) -> Self {
+        self.cwd.push(path);
+        self
+    }
+
     pub fn run(self) -> CommandResult {
         let output = self.build_command().output().unwrap();
         CommandResult { output: output }
diff --git a/diesel_compile_tests/Cargo.toml b/diesel_compile_tests/Cargo.toml
index 177fbda..54489cb 100644
--- a/diesel_compile_tests/Cargo.toml
+++ b/diesel_compile_tests/Cargo.toml
@@ -6,9 +6,5 @@
 [workspace]
 
 [dependencies]
-diesel = { version = "1.4.0", default-features = false, features = ["extras", "sqlite", "postgres", "mysql", "unstable"] }
-compiletest_rs = "=0.3.22"
-
-[replace]
-"diesel:1.4.3" = { path = "../diesel" }
-"diesel_derives:1.4.1" = { path = "../diesel_derives" }
+diesel = { version = "2.0.0", default-features = false, features = ["extras", "sqlite", "postgres", "mysql", "unstable"], path = "../diesel" }
+compiletest_rs = "=0.4"
diff --git a/diesel_compile_tests/rust-toolchain b/diesel_compile_tests/rust-toolchain
index 4390c8a..1bc2439 100644
--- a/diesel_compile_tests/rust-toolchain
+++ b/diesel_compile_tests/rust-toolchain
@@ -1 +1 @@
-nightly-2019-08-01
+nightly-2020-05-01
diff --git a/diesel_compile_tests/tests/compile-fail/array_expressions_must_be_correct_type.rs b/diesel_compile_tests/tests/compile-fail/array_expressions_must_be_correct_type.rs
index 1eb8505..39184e5 100644
--- a/diesel_compile_tests/tests/compile-fail/array_expressions_must_be_correct_type.rs
+++ b/diesel_compile_tests/tests/compile-fail/array_expressions_must_be_correct_type.rs
@@ -16,6 +16,5 @@
     //~| ERROR E0277
     //~| ERROR E0277
     //~| ERROR E0277
-    //~| ERROR E0277
     select(array((1f64, 3f64))).get_result::<Vec<f64>>(&connection);
 }
diff --git a/diesel_compile_tests/tests/compile-fail/cannot_load_default_select_with_group_by.rs b/diesel_compile_tests/tests/compile-fail/cannot_load_default_select_with_group_by.rs
new file mode 100644
index 0000000..5c7bd9a
--- /dev/null
+++ b/diesel_compile_tests/tests/compile-fail/cannot_load_default_select_with_group_by.rs
@@ -0,0 +1,19 @@
+#[macro_use]
+extern crate diesel;
+
+use diesel::*;
+use diesel::dsl::count;
+
+table! {
+    users {
+        id -> Integer,
+        name -> Text,
+    }
+}
+
+fn main() {
+    let conn = PgConnection::establish("").unwrap();
+    let _ = users::table.group_by(users::name)
+        .load::<(i32, String)>(&conn);
+    //~^ ERROR ValidGrouping
+}
diff --git a/diesel_compile_tests/tests/compile-fail/cannot_mix_aggregate_and_non_aggregate_selects.rs b/diesel_compile_tests/tests/compile-fail/cannot_mix_aggregate_and_non_aggregate_selects.rs
index c0b7a6f..360114d 100644
--- a/diesel_compile_tests/tests/compile-fail/cannot_mix_aggregate_and_non_aggregate_selects.rs
+++ b/diesel_compile_tests/tests/compile-fail/cannot_mix_aggregate_and_non_aggregate_selects.rs
@@ -14,5 +14,5 @@
     use self::users::dsl::*;
 
     let source = users.select((id, count(users.star())));
-    //~^ ERROR E0277
+    //~^ ERROR MixedAggregates
 }
diff --git a/diesel_compile_tests/tests/compile-fail/cannot_pass_aggregate_to_where.rs b/diesel_compile_tests/tests/compile-fail/cannot_pass_aggregate_to_where.rs
index 437a0ea..e8f1f91 100644
--- a/diesel_compile_tests/tests/compile-fail/cannot_pass_aggregate_to_where.rs
+++ b/diesel_compile_tests/tests/compile-fail/cannot_pass_aggregate_to_where.rs
@@ -14,5 +14,5 @@
     use self::users::dsl::*;
 
     let source = users.filter(count(id).gt(3));
-    //~^ ERROR NonAggregate
+    //~^ ERROR MixedAggregates
 }
diff --git a/diesel_compile_tests/tests/compile-fail/cannot_update_target_with_methods_other_than_filter_called.rs b/diesel_compile_tests/tests/compile-fail/cannot_update_target_with_methods_other_than_filter_called.rs
index 642978c..fbbd69e 100644
--- a/diesel_compile_tests/tests/compile-fail/cannot_update_target_with_methods_other_than_filter_called.rs
+++ b/diesel_compile_tests/tests/compile-fail/cannot_update_target_with_methods_other_than_filter_called.rs
@@ -15,12 +15,8 @@
 
     let command = update(users.select(id)).set(id.eq(1));
     //~^ ERROR E0277
-    //~| NOTE
-    //~| NOTE IntoUpdateTarget
-    //~| NOTE
+    //~| ERROR E0277
     let command = update(users.order(id)).set(id.eq(1));
     //~^ ERROR E0277
-    //~| NOTE
-    //~| NOTE IntoUpdateTarget
-    //~| NOTE
+    //~| ERROR E0277
 }
diff --git a/diesel_compile_tests/tests/compile-fail/custom_returning_requires_nonaggregate.rs b/diesel_compile_tests/tests/compile-fail/custom_returning_requires_nonaggregate.rs
index 3f1ff63..983d580 100644
--- a/diesel_compile_tests/tests/compile-fail/custom_returning_requires_nonaggregate.rs
+++ b/diesel_compile_tests/tests/compile-fail/custom_returning_requires_nonaggregate.rs
@@ -20,11 +20,11 @@
     use self::users::dsl::*;
 
     let stmt = update(users.filter(id.eq(1))).set(name.eq("Bill")).returning(count(id));
-    //~^ ERROR NonAggregate
+    //~^ ERROR MixedAggregates
 
     let new_user = NewUser {
         name: "Foobar".to_string(),
     };
     let stmt = insert_into(users).values(&new_user).returning((name, count(name)));
-    //~^ ERROR NonAggregate
+    //~^ ERROR MixedAggregates
 }
diff --git a/diesel_compile_tests/tests/compile-fail/exists_can_only_take_subselects.rs b/diesel_compile_tests/tests/compile-fail/exists_can_only_take_subselects.rs
index 1996c23..2c1fed3 100644
--- a/diesel_compile_tests/tests/compile-fail/exists_can_only_take_subselects.rs
+++ b/diesel_compile_tests/tests/compile-fail/exists_can_only_take_subselects.rs
@@ -23,5 +23,5 @@
     users::table.filter(exists(true));
     //~^ ERROR SelectQuery
     users::table.filter(exists(users::id));
-    //~^ ERROR SelectQuery
+    //~^ ERROR E0277
 }
diff --git a/diesel_compile_tests/tests/compile-fail/filter_requires_bool_nonaggregate_expression.rs b/diesel_compile_tests/tests/compile-fail/filter_requires_bool_nonaggregate_expression.rs
index fbd14f3..b262cc0 100644
--- a/diesel_compile_tests/tests/compile-fail/filter_requires_bool_nonaggregate_expression.rs
+++ b/diesel_compile_tests/tests/compile-fail/filter_requires_bool_nonaggregate_expression.rs
@@ -16,5 +16,5 @@
     let _ = users::table.filter(users::name);
     //~^ ERROR type mismatch resolving `<users::columns::name as diesel::Expression>::SqlType == diesel::sql_types::Bool`
     let _ = users::table.filter(sum(users::id).eq(1));
-    //~^ ERROR NonAggregate
+    //~^ ERROR MixedAggregates
 }
diff --git a/diesel_compile_tests/tests/compile-fail/numeric_ops_require_numeric_column.rs b/diesel_compile_tests/tests/compile-fail/numeric_ops_require_numeric_column.rs
index a74f9df..80de57c 100644
--- a/diesel_compile_tests/tests/compile-fail/numeric_ops_require_numeric_column.rs
+++ b/diesel_compile_tests/tests/compile-fail/numeric_ops_require_numeric_column.rs
@@ -14,5 +14,5 @@
     use self::users::dsl::*;
 
     let _ = users.select(name + name);
-    //~^ ERROR binary operation `+` cannot be applied to type `users::columns::name`
+    //~^ ERROR cannot add `users::columns::name` to `users::columns::name`
 }
diff --git a/diesel_compile_tests/tests/compile-fail/pg_on_conflict_requires_valid_conflict_target.rs b/diesel_compile_tests/tests/compile-fail/pg_on_conflict_requires_valid_conflict_target.rs
index db75eda..8c2e2b9 100644
--- a/diesel_compile_tests/tests/compile-fail/pg_on_conflict_requires_valid_conflict_target.rs
+++ b/diesel_compile_tests/tests/compile-fail/pg_on_conflict_requires_valid_conflict_target.rs
@@ -1,7 +1,7 @@
 #[macro_use] extern crate diesel;
 
 use diesel::*;
-use diesel::pg::upsert::*;
+use diesel::upsert::*;
 
 table! {
     users {
diff --git a/diesel_compile_tests/tests/compile-fail/pg_specific_expressions_cant_be_used_in_a_sqlite_query.rs b/diesel_compile_tests/tests/compile-fail/pg_specific_expressions_cant_be_used_in_a_sqlite_query.rs
index 03792c8..5a64265 100644
--- a/diesel_compile_tests/tests/compile-fail/pg_specific_expressions_cant_be_used_in_a_sqlite_query.rs
+++ b/diesel_compile_tests/tests/compile-fail/pg_specific_expressions_cant_be_used_in_a_sqlite_query.rs
@@ -3,6 +3,7 @@
 use diesel::*;
 use diesel::sql_types::*;
 use diesel::dsl::*;
+use diesel::upsert::on_constraint;
 
 table! {
     users {
@@ -32,7 +33,7 @@
         .load::<i32>(&connection);
     //~^ ERROR type mismatch resolving `<diesel::SqliteConnection as diesel::Connection>::Backend == diesel::pg::Pg`
     insert_into(users).values(&NewUser("Sean"))
-        .on_conflict_do_nothing()
+        .on_conflict(on_constraint("name"))
         .execute(&connection);
-    //~^ ERROR type mismatch resolving `<diesel::SqliteConnection as diesel::Connection>::Backend == diesel::pg::Pg`
+    //~^ ERROR no method named `execute` found
 }
diff --git a/diesel_compile_tests/tests/compile-fail/pg_upsert_do_update_requires_valid_update.rs b/diesel_compile_tests/tests/compile-fail/pg_upsert_do_update_requires_valid_update.rs
index 0413872..6240ef4 100644
--- a/diesel_compile_tests/tests/compile-fail/pg_upsert_do_update_requires_valid_update.rs
+++ b/diesel_compile_tests/tests/compile-fail/pg_upsert_do_update_requires_valid_update.rs
@@ -1,7 +1,7 @@
 #[macro_use] extern crate diesel;
 
 use diesel::*;
-use diesel::pg::upsert::*;
+use diesel::upsert::*;
 
 table! {
     users {
diff --git a/diesel_compile_tests/tests/ui/as_changeset_bad_column_name.stderr b/diesel_compile_tests/tests/ui/as_changeset_bad_column_name.stderr
index 69a4846..37ee63c 100644
--- a/diesel_compile_tests/tests/ui/as_changeset_bad_column_name.stderr
+++ b/diesel_compile_tests/tests/ui/as_changeset_bad_column_name.stderr
@@ -9,6 +9,8 @@
    |
 14 |     #[column_name = "hair_color"]
    |                     ^^^^^^^^^^^^ not found in `users`
+   |
+   = note: this error originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info)
 
 error[E0425]: cannot find value `name` in module `users`
   --> $DIR/as_changeset_bad_column_name.rs:13:5
@@ -21,22 +23,36 @@
    |
 14 |     #[column_name = "hair_color"]
    |                     ^^^^^^^^^^^^ not found in `users`
+   |
+   = note: this error originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info)
 
 error[E0412]: cannot find type `name` in module `users`
   --> $DIR/as_changeset_bad_column_name.rs:20:34
    |
 20 | struct UserTuple(#[column_name = "name"] String);
    |                                  ^^^^^^ not found in `users`
+   |
+   = note: this error originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info)
 
 error[E0425]: cannot find value `name` in module `users`
   --> $DIR/as_changeset_bad_column_name.rs:20:34
    |
 20 | struct UserTuple(#[column_name = "name"] String);
    |                                  ^^^^^^ not found in `users`
+   |
+   = note: this error originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info)
 
 error[E0601]: `main` function not found in crate `as_changeset_bad_column_name`
-  |
-  = note: consider adding a `main` function to `$DIR/as_changeset_bad_column_name.rs`
+  --> $DIR/as_changeset_bad_column_name.rs:1:1
+   |
+1  | / #[macro_use]
+2  | | extern crate diesel;
+3  | |
+4  | | table! {
+...  |
+19 | | #[table_name = "users"]
+20 | | struct UserTuple(#[column_name = "name"] String);
+   | |_________________________________________________^ consider adding a `main` function to `$DIR/as_changeset_bad_column_name.rs`
 
 error: aborting due to 7 previous errors
 
diff --git a/diesel_compile_tests/tests/ui/as_changeset_bad_column_name_syntax.stderr b/diesel_compile_tests/tests/ui/as_changeset_bad_column_name_syntax.stderr
index 3dbd44a..925a4bb 100644
--- a/diesel_compile_tests/tests/ui/as_changeset_bad_column_name_syntax.stderr
+++ b/diesel_compile_tests/tests/ui/as_changeset_bad_column_name_syntax.stderr
@@ -17,8 +17,16 @@
    |       ^^^^^^^^^^^ not found in `users`
 
 error[E0601]: `main` function not found in crate `as_changeset_bad_column_name_syntax`
-  |
-  = note: consider adding a `main` function to `$DIR/as_changeset_bad_column_name_syntax.rs`
+  --> $DIR/as_changeset_bad_column_name_syntax.rs:1:1
+   |
+1  | / #[macro_use]
+2  | | extern crate diesel;
+3  | |
+4  | | table! {
+...  |
+15 | |     name: String,
+16 | | }
+   | |_^ consider adding a `main` function to `$DIR/as_changeset_bad_column_name_syntax.rs`
 
 error: aborting due to 4 previous errors
 
diff --git a/diesel_compile_tests/tests/ui/as_changeset_bad_primary_key_syntax.stderr b/diesel_compile_tests/tests/ui/as_changeset_bad_primary_key_syntax.stderr
index 50bd073f..35df9b5 100644
--- a/diesel_compile_tests/tests/ui/as_changeset_bad_primary_key_syntax.stderr
+++ b/diesel_compile_tests/tests/ui/as_changeset_bad_primary_key_syntax.stderr
@@ -4,7 +4,7 @@
 12 | #[primary_key(id, bar = "baz", qux(id))]
    |                   ^^^^^^^^^^^
 
-error: Expected `qux` found `qux (id)`
+error: Expected `qux` found `qux(id)`
   --> $DIR/as_changeset_bad_primary_key_syntax.rs:12:32
    |
 12 | #[primary_key(id, bar = "baz", qux(id))]
@@ -15,6 +15,8 @@
    |
 13 | struct UserForm {
    |        ^^^^^^^^ use of undeclared type or module `user_forms`
+   |
+   = note: this error originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info)
 
 error: aborting due to 3 previous errors
 
diff --git a/diesel_compile_tests/tests/ui/as_changeset_deprecated_column_name.stderr b/diesel_compile_tests/tests/ui/as_changeset_deprecated_column_name.stderr
index 18c0bb9..6fab5ba 100644
--- a/diesel_compile_tests/tests/ui/as_changeset_deprecated_column_name.stderr
+++ b/diesel_compile_tests/tests/ui/as_changeset_deprecated_column_name.stderr
@@ -10,3 +10,5 @@
 15 |     #[column_name(name)]
    |       ^^^^^^^^^^^^^^^^^
 
+warning: 2 warnings emitted
+
diff --git a/diesel_compile_tests/tests/ui/as_changeset_missing_column_name_tuple_struct.stderr b/diesel_compile_tests/tests/ui/as_changeset_missing_column_name_tuple_struct.stderr
index 7b355a3..704206b 100644
--- a/diesel_compile_tests/tests/ui/as_changeset_missing_column_name_tuple_struct.stderr
+++ b/diesel_compile_tests/tests/ui/as_changeset_missing_column_name_tuple_struct.stderr
@@ -35,8 +35,16 @@
    |                                                  ^^^^^^ not found in `users`
 
 error[E0601]: `main` function not found in crate `as_changeset_missing_column_name_tuple_struct`
-  |
-  = note: consider adding a `main` function to `$DIR/as_changeset_missing_column_name_tuple_struct.rs`
+  --> $DIR/as_changeset_missing_column_name_tuple_struct.rs:1:1
+   |
+1  | / #[macro_use]
+2  | | extern crate diesel;
+3  | |
+4  | | table! {
+...  |
+13 | | #[table_name = "users"]
+14 | | struct User(i32, #[column_name = "name"] String, String);
+   | |_________________________________________________________^ consider adding a `main` function to `$DIR/as_changeset_missing_column_name_tuple_struct.rs`
 
 error: aborting due to 7 previous errors
 
diff --git a/diesel_compile_tests/tests/ui/as_changeset_missing_table_import.stderr b/diesel_compile_tests/tests/ui/as_changeset_missing_table_import.stderr
index c1bea10..7069041 100644
--- a/diesel_compile_tests/tests/ui/as_changeset_missing_table_import.stderr
+++ b/diesel_compile_tests/tests/ui/as_changeset_missing_table_import.stderr
@@ -3,12 +3,16 @@
   |
 5 | struct User {
   |        ^^^^ use of undeclared type or module `users`
+  |
+  = note: this error originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info)
 
 error[E0433]: failed to resolve: use of undeclared type or module `users`
   --> $DIR/as_changeset_missing_table_import.rs:11:16
    |
 11 | #[table_name = "users"]
    |                ^^^^^^^ use of undeclared type or module `users`
+   |
+   = note: this error originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info)
 
 error: aborting due to 2 previous errors
 
diff --git a/diesel_compile_tests/tests/ui/as_changeset_on_non_struct.stderr b/diesel_compile_tests/tests/ui/as_changeset_on_non_struct.stderr
index 4102024..38ac2fb 100644
--- a/diesel_compile_tests/tests/ui/as_changeset_on_non_struct.stderr
+++ b/diesel_compile_tests/tests/ui/as_changeset_on_non_struct.stderr
@@ -3,10 +3,20 @@
    |
 12 | #[derive(AsChangeset)]
    |          ^^^^^^^^^^^
+   |
+   = note: this error originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info)
 
 error[E0601]: `main` function not found in crate `as_changeset_on_non_struct`
-  |
-  = note: consider adding a `main` function to `$DIR/as_changeset_on_non_struct.rs`
+  --> $DIR/as_changeset_on_non_struct.rs:1:1
+   |
+1  | / #[macro_use]
+2  | | extern crate diesel;
+3  | |
+4  | | table! {
+...  |
+13 | | #[table_name = "users"]
+14 | | enum User {}
+   | |____________^ consider adding a `main` function to `$DIR/as_changeset_on_non_struct.rs`
 
 error: aborting due to 2 previous errors
 
diff --git a/diesel_compile_tests/tests/ui/as_changeset_struct_with_only_primary_key.stderr b/diesel_compile_tests/tests/ui/as_changeset_struct_with_only_primary_key.stderr
index a58d533..22e6886 100644
--- a/diesel_compile_tests/tests/ui/as_changeset_struct_with_only_primary_key.stderr
+++ b/diesel_compile_tests/tests/ui/as_changeset_struct_with_only_primary_key.stderr
@@ -6,16 +6,27 @@
    |
    = help: If you want to change the primary key of a row, you should do so with `.set(table::id.eq(new_id))`.
    = note: `#[derive(AsChangeset)]` never changes the primary key of a row.
+   = note: this error originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info)
 
 error[E0601]: `main` function not found in crate `as_changeset_struct_with_only_primary_key`
-  |
-  = note: consider adding a `main` function to `$DIR/as_changeset_struct_with_only_primary_key.rs`
+  --> $DIR/as_changeset_struct_with_only_primary_key.rs:1:1
+   |
+1  | / #[macro_use] extern crate diesel;
+2  | |
+3  | | table! {
+4  | |     foo {
+...  |
+20 | |     id: i32,
+21 | | }
+   | |_^ consider adding a `main` function to `$DIR/as_changeset_struct_with_only_primary_key.rs`
 
-error[E0277]: the trait bound `(): diesel::query_builder::AsChangeset` is not satisfied
+error[E0277]: the trait bound `(): diesel::AsChangeset` is not satisfied
   --> $DIR/as_changeset_struct_with_only_primary_key.rs:17:10
    |
 17 | #[derive(AsChangeset)]
-   |          ^^^^^^^^^^^ the trait `diesel::query_builder::AsChangeset` is not implemented for `()`
+   |          ^^^^^^^^^^^ the trait `diesel::AsChangeset` is not implemented for `()`
+   |
+   = note: this error originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info)
 
 error: aborting due to 3 previous errors
 
diff --git a/diesel_compile_tests/tests/ui/as_expression_bad_sql_type.rs b/diesel_compile_tests/tests/ui/as_expression_bad_sql_type.rs
index 858001a..8915707 100644
--- a/diesel_compile_tests/tests/ui/as_expression_bad_sql_type.rs
+++ b/diesel_compile_tests/tests/ui/as_expression_bad_sql_type.rs
@@ -1,5 +1,6 @@
 #[macro_use]
 extern crate diesel;
+use diesel::expression::AsExpression;
 
 #[derive(AsExpression)]
 #[sql_type(Foo)]
diff --git a/diesel_compile_tests/tests/ui/as_expression_bad_sql_type.stderr b/diesel_compile_tests/tests/ui/as_expression_bad_sql_type.stderr
index b1d03b9..1506dd4 100644
--- a/diesel_compile_tests/tests/ui/as_expression_bad_sql_type.stderr
+++ b/diesel_compile_tests/tests/ui/as_expression_bad_sql_type.stderr
@@ -1,25 +1,25 @@
 error: `sql_type` must be in the form `sql_type = "value"`
- --> $DIR/as_expression_bad_sql_type.rs:5:3
+ --> $DIR/as_expression_bad_sql_type.rs:6:3
   |
-5 | #[sql_type(Foo)]
+6 | #[sql_type(Foo)]
   |   ^^^^^^^^^^^^^
 
 error: `sql_type` must be in the form `sql_type = "value"`
- --> $DIR/as_expression_bad_sql_type.rs:6:3
+ --> $DIR/as_expression_bad_sql_type.rs:7:3
   |
-6 | #[sql_type]
+7 | #[sql_type]
   |   ^^^^^^^^
 
 error: Invalid Rust type
- --> $DIR/as_expression_bad_sql_type.rs:7:14
-  |
-7 | #[sql_type = "@%&&*"]
-  |              ^^^^^^^
-
-error: Invalid Rust type
  --> $DIR/as_expression_bad_sql_type.rs:8:14
   |
-8 | #[sql_type = "1omg"]
+8 | #[sql_type = "@%&&*"]
+  |              ^^^^^^^
+
+error: Invalid Rust type
+ --> $DIR/as_expression_bad_sql_type.rs:9:14
+  |
+9 | #[sql_type = "1omg"]
   |              ^^^^^^
 
 error: aborting due to 4 previous errors
diff --git a/diesel_compile_tests/tests/ui/belongs_to_incorrect_lifetime_syntax.stderr b/diesel_compile_tests/tests/ui/belongs_to_incorrect_lifetime_syntax.stderr
index 8a53c97..23d0e4e 100644
--- a/diesel_compile_tests/tests/ui/belongs_to_incorrect_lifetime_syntax.stderr
+++ b/diesel_compile_tests/tests/ui/belongs_to_incorrect_lifetime_syntax.stderr
@@ -1,6 +1,8 @@
 error[E0261]: use of undeclared lifetime name `'a`
   --> $DIR/belongs_to_incorrect_lifetime_syntax.rs:25:23
    |
+24 | #[derive(Associations)]
+   |          - help: consider introducing lifetime `'a` here: `'a,`
 25 | #[belongs_to(parent = "Foo<'a>")]
    |                       ^^^^^^^^^ undeclared lifetime
 
diff --git a/diesel_compile_tests/tests/ui/belongs_to_invalid_option_syntax.stderr b/diesel_compile_tests/tests/ui/belongs_to_invalid_option_syntax.stderr
index 9d92db1..b85aa9a 100644
--- a/diesel_compile_tests/tests/ui/belongs_to_invalid_option_syntax.stderr
+++ b/diesel_compile_tests/tests/ui/belongs_to_invalid_option_syntax.stderr
@@ -48,5 +48,5 @@
 35 | #[belongs_to(Baz, foreign_key = "bar_id", random_option)]
    |   ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-error: aborting due to 5 previous errors
+error: aborting due to 5 previous errors; 2 warnings emitted
 
diff --git a/diesel_compile_tests/tests/ui/belongs_to_missing_foreign_key_column.stderr b/diesel_compile_tests/tests/ui/belongs_to_missing_foreign_key_column.stderr
index bbfe4ce..e82241f 100644
--- a/diesel_compile_tests/tests/ui/belongs_to_missing_foreign_key_column.stderr
+++ b/diesel_compile_tests/tests/ui/belongs_to_missing_foreign_key_column.stderr
@@ -15,12 +15,16 @@
    |
 20 | #[belongs_to(Bar, foreign_key = "bar_id")]
    |                                 ^^^^^^^^ not found in `foo`
+   |
+   = note: this error originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info)
 
 error[E0425]: cannot find value `bar_id` in module `foo`
   --> $DIR/belongs_to_missing_foreign_key_column.rs:20:33
    |
 20 | #[belongs_to(Bar, foreign_key = "bar_id")]
    |                                 ^^^^^^^^ not found in `foo`
+   |
+   = note: this error originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info)
 
 error[E0412]: cannot find type `bar_id` in module `foo`
   --> $DIR/belongs_to_missing_foreign_key_column.rs:27:14
@@ -39,12 +43,16 @@
    |
 35 | #[belongs_to(Bar, foreign_key = "bar_id")]
    |                                 ^^^^^^^^ not found in `foo`
+   |
+   = note: this error originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info)
 
 error[E0425]: cannot find value `bar_id` in module `foo`
   --> $DIR/belongs_to_missing_foreign_key_column.rs:35:33
    |
 35 | #[belongs_to(Bar, foreign_key = "bar_id")]
    |                                 ^^^^^^^^ not found in `foo`
+   |
+   = note: this error originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info)
 
 error: aborting due to 8 previous errors
 
diff --git a/diesel_compile_tests/tests/ui/belongs_to_missing_foreign_key_field.stderr b/diesel_compile_tests/tests/ui/belongs_to_missing_foreign_key_field.stderr
index 567c61d..0bc028f 100644
--- a/diesel_compile_tests/tests/ui/belongs_to_missing_foreign_key_field.stderr
+++ b/diesel_compile_tests/tests/ui/belongs_to_missing_foreign_key_field.stderr
@@ -9,6 +9,8 @@
   |
 8 | #[belongs_to(Bar, foreign_key = "bar_id")]
   |                                 ^^^^^^^^
+  |
+  = note: this error originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info)
 
 error: No field with column name bar_id
   --> $DIR/belongs_to_missing_foreign_key_field.rs:12:14
@@ -21,6 +23,8 @@
    |
 13 | #[belongs_to(Bar, foreign_key = "bar_id")]
    |                                 ^^^^^^^^
+   |
+   = note: this error originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info)
 
 error: aborting due to 4 previous errors
 
diff --git a/diesel_compile_tests/tests/ui/belongs_to_second_parent.stderr b/diesel_compile_tests/tests/ui/belongs_to_second_parent.stderr
index 61a4787..97b729a 100644
--- a/diesel_compile_tests/tests/ui/belongs_to_second_parent.stderr
+++ b/diesel_compile_tests/tests/ui/belongs_to_second_parent.stderr
@@ -8,3 +8,5 @@
 29 | #[belongs_to(Bar, Baz)]
    |   ^^^^^^^^^^^^^^^^^^^^
 
+warning: 1 warning emitted
+
diff --git a/diesel_compile_tests/tests/ui/identifiable_missing_pk_field.stderr b/diesel_compile_tests/tests/ui/identifiable_missing_pk_field.stderr
index 7edb77b..12ddc18 100644
--- a/diesel_compile_tests/tests/ui/identifiable_missing_pk_field.stderr
+++ b/diesel_compile_tests/tests/ui/identifiable_missing_pk_field.stderr
@@ -3,12 +3,16 @@
    |
 10 | #[derive(Identifiable)]
    |          ^^^^^^^^^^^^
+   |
+   = note: this error originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info)
 
 error: No field with column name id
   --> $DIR/identifiable_missing_pk_field.rs:15:10
    |
 15 | #[derive(Identifiable)]
    |          ^^^^^^^^^^^^
+   |
+   = note: this error originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info)
 
 error: No field with column name bar
   --> $DIR/identifiable_missing_pk_field.rs:23:15
diff --git a/diesel_compile_tests/tests/ui/insert_from_select_with_on_conflict_without_where_clause_not_supported_on_sqlite.rs b/diesel_compile_tests/tests/ui/insert_from_select_with_on_conflict_without_where_clause_not_supported_on_sqlite.rs
new file mode 100644
index 0000000..f554226
--- /dev/null
+++ b/diesel_compile_tests/tests/ui/insert_from_select_with_on_conflict_without_where_clause_not_supported_on_sqlite.rs
@@ -0,0 +1,22 @@
+extern crate diesel;
+
+use diesel::*;
+
+table! {
+    users {
+        id -> Integer,
+    }
+}
+
+fn main() {
+    let connection = SqliteConnection::establish("").unwrap();
+
+    users::table.select(users::id)
+        .insert_into(users::table)
+        .into_columns(users::id)
+        .on_conflict(users::id)
+        .do_nothing()
+        .execute(&connection)
+        .unwrap();
+
+}
diff --git a/diesel_compile_tests/tests/ui/insert_from_select_with_on_conflict_without_where_clause_not_supported_on_sqlite.stderr b/diesel_compile_tests/tests/ui/insert_from_select_with_on_conflict_without_where_clause_not_supported_on_sqlite.stderr
new file mode 100644
index 0000000..14799e1
--- /dev/null
+++ b/diesel_compile_tests/tests/ui/insert_from_select_with_on_conflict_without_where_clause_not_supported_on_sqlite.stderr
@@ -0,0 +1,9 @@
+error[E0271]: type mismatch resolving `<diesel::SqliteConnection as diesel::Connection>::Backend == diesel::pg::Pg`
+  --> $DIR/insert_from_select_with_on_conflict_without_where_clause_not_supported_on_sqlite.rs:19:10
+   |
+19 |         .execute(&connection)
+   |          ^^^^^^^ expected struct `diesel::sqlite::Sqlite`, found struct `diesel::pg::Pg`
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0271`.
diff --git a/diesel_compile_tests/tests/ui/insertable_empty_struct.stderr b/diesel_compile_tests/tests/ui/insertable_empty_struct.stderr
index b6f1bab..c20eb7e 100644
--- a/diesel_compile_tests/tests/ui/insertable_empty_struct.stderr
+++ b/diesel_compile_tests/tests/ui/insertable_empty_struct.stderr
@@ -5,6 +5,7 @@
    |          ^^^^^^^^^^
    |
    = help: Use `insert_into(users::table).default_values()` if you want `DEFAULT VALUES`
+   = note: this error originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info)
 
 error: aborting due to previous error
 
diff --git a/diesel_compile_tests/tests/ui/insertable_missing_table_or_column.stderr b/diesel_compile_tests/tests/ui/insertable_missing_table_or_column.stderr
index de44efc..af62686f 100644
--- a/diesel_compile_tests/tests/ui/insertable_missing_table_or_column.stderr
+++ b/diesel_compile_tests/tests/ui/insertable_missing_table_or_column.stderr
@@ -3,12 +3,16 @@
    |
 11 | struct Post {
    |        ^^^^ use of undeclared type or module `posts`
+   |
+   = note: this error originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info)
 
 error[E0433]: failed to resolve: use of undeclared type or module `posts`
   --> $DIR/insertable_missing_table_or_column.rs:16:16
    |
 16 | #[table_name = "posts"]
    |                ^^^^^^^ use of undeclared type or module `posts`
+   |
+   = note: this error originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info)
 
 error[E0412]: cannot find type `name` in module `users`
   --> $DIR/insertable_missing_table_or_column.rs:24:5
@@ -27,12 +31,16 @@
    |
 30 |     #[column_name = "name"]
    |                     ^^^^^^ not found in `users`
+   |
+   = note: this error originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info)
 
 error[E0425]: cannot find value `name` in module `users`
   --> $DIR/insertable_missing_table_or_column.rs:30:21
    |
 30 |     #[column_name = "name"]
    |                     ^^^^^^ not found in `users`
+   |
+   = note: this error originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info)
 
 error: aborting due to 6 previous errors
 
diff --git a/diesel_compile_tests/tests/ui/mysql_does_not_support_offset_without_limit.rs b/diesel_compile_tests/tests/ui/mysql_does_not_support_offset_without_limit.rs
new file mode 100644
index 0000000..385030d
--- /dev/null
+++ b/diesel_compile_tests/tests/ui/mysql_does_not_support_offset_without_limit.rs
@@ -0,0 +1,21 @@
+#[macro_use]
+extern crate diesel;
+
+use diesel::*;
+use diesel::dsl::*;
+
+table! {
+    users {
+        id -> Integer,
+        name -> VarChar,
+    }
+}
+
+
+
+fn main() {
+    let connection = MysqlConnection::establish("").unwrap();
+    users::table.offset(42).get_result::<(i32, String)>(&connection);
+
+    users::table.offset(42).into_boxed().get_result::<(i32, String)>(&connection);
+}
diff --git a/diesel_compile_tests/tests/ui/mysql_does_not_support_offset_without_limit.stderr b/diesel_compile_tests/tests/ui/mysql_does_not_support_offset_without_limit.stderr
new file mode 100644
index 0000000..08ccca5
--- /dev/null
+++ b/diesel_compile_tests/tests/ui/mysql_does_not_support_offset_without_limit.stderr
@@ -0,0 +1,32 @@
+error[E0277]: the trait bound `diesel::query_builder::LimitOffsetClause<diesel::query_builder::NoLimitClause, diesel::query_builder::OffsetClause<diesel::expression::bound::Bound<diesel::sql_types::BigInt, i64>>>: diesel::query_builder::QueryFragment<diesel::mysql::Mysql>` is not satisfied
+  --> $DIR/mysql_does_not_support_offset_without_limit.rs:18:29
+   |
+18 |     users::table.offset(42).get_result::<(i32, String)>(&connection);
+   |                             ^^^^^^^^^^ the trait `diesel::query_builder::QueryFragment<diesel::mysql::Mysql>` is not implemented for `diesel::query_builder::LimitOffsetClause<diesel::query_builder::NoLimitClause, diesel::query_builder::OffsetClause<diesel::expression::bound::Bound<diesel::sql_types::BigInt, i64>>>`
+   |
+   = help: the following implementations were found:
+             <diesel::query_builder::LimitOffsetClause<L, O> as diesel::query_builder::QueryFragment<diesel::pg::Pg>>
+             <diesel::query_builder::LimitOffsetClause<diesel::query_builder::LimitClause<L>, diesel::query_builder::NoOffsetClause> as diesel::query_builder::QueryFragment<diesel::mysql::Mysql>>
+             <diesel::query_builder::LimitOffsetClause<diesel::query_builder::LimitClause<L>, diesel::query_builder::NoOffsetClause> as diesel::query_builder::QueryFragment<diesel::sqlite::Sqlite>>
+             <diesel::query_builder::LimitOffsetClause<diesel::query_builder::LimitClause<L>, diesel::query_builder::OffsetClause<O>> as diesel::query_builder::QueryFragment<diesel::mysql::Mysql>>
+           and 4 others
+   = note: required because of the requirements on the impl of `diesel::query_builder::QueryFragment<diesel::mysql::Mysql>` for `diesel::query_builder::SelectStatement<users::table, diesel::query_builder::select_clause::DefaultSelectClause, diesel::query_builder::distinct_clause::NoDistinctClause, diesel::query_builder::where_clause::NoWhereClause, diesel::query_builder::order_clause::NoOrderClause, diesel::query_builder::LimitOffsetClause<diesel::query_builder::NoLimitClause, diesel::query_builder::OffsetClause<diesel::expression::bound::Bound<diesel::sql_types::BigInt, i64>>>>`
+   = note: required because of the requirements on the impl of `diesel::query_dsl::LoadQuery<diesel::MysqlConnection, (i32, std::string::String)>` for `diesel::query_builder::SelectStatement<users::table, diesel::query_builder::select_clause::DefaultSelectClause, diesel::query_builder::distinct_clause::NoDistinctClause, diesel::query_builder::where_clause::NoWhereClause, diesel::query_builder::order_clause::NoOrderClause, diesel::query_builder::LimitOffsetClause<diesel::query_builder::NoLimitClause, diesel::query_builder::OffsetClause<diesel::expression::bound::Bound<diesel::sql_types::BigInt, i64>>>>`
+
+error[E0277]: the trait bound `diesel::query_builder::LimitOffsetClause<diesel::query_builder::NoLimitClause, diesel::query_builder::OffsetClause<diesel::expression::bound::Bound<diesel::sql_types::BigInt, i64>>>: diesel::query_builder::IntoBoxedClause<'_, diesel::mysql::Mysql>` is not satisfied
+  --> $DIR/mysql_does_not_support_offset_without_limit.rs:20:29
+   |
+20 |     users::table.offset(42).into_boxed().get_result::<(i32, String)>(&connection);
+   |                             ^^^^^^^^^^ the trait `diesel::query_builder::IntoBoxedClause<'_, diesel::mysql::Mysql>` is not implemented for `diesel::query_builder::LimitOffsetClause<diesel::query_builder::NoLimitClause, diesel::query_builder::OffsetClause<diesel::expression::bound::Bound<diesel::sql_types::BigInt, i64>>>`
+   |
+   = help: the following implementations were found:
+             <diesel::query_builder::LimitOffsetClause<L, O> as diesel::query_builder::IntoBoxedClause<'a, diesel::pg::Pg>>
+             <diesel::query_builder::LimitOffsetClause<diesel::query_builder::LimitClause<L>, diesel::query_builder::NoOffsetClause> as diesel::query_builder::IntoBoxedClause<'a, diesel::mysql::Mysql>>
+             <diesel::query_builder::LimitOffsetClause<diesel::query_builder::LimitClause<L>, diesel::query_builder::NoOffsetClause> as diesel::query_builder::IntoBoxedClause<'a, diesel::sqlite::Sqlite>>
+             <diesel::query_builder::LimitOffsetClause<diesel::query_builder::LimitClause<L>, diesel::query_builder::OffsetClause<O>> as diesel::query_builder::IntoBoxedClause<'a, diesel::mysql::Mysql>>
+           and 4 others
+   = note: required because of the requirements on the impl of `diesel::query_dsl::boxed_dsl::BoxedDsl<'_, diesel::mysql::Mysql>` for `diesel::query_builder::SelectStatement<users::table, diesel::query_builder::select_clause::DefaultSelectClause, diesel::query_builder::distinct_clause::NoDistinctClause, diesel::query_builder::where_clause::NoWhereClause, diesel::query_builder::order_clause::NoOrderClause, diesel::query_builder::LimitOffsetClause<diesel::query_builder::NoLimitClause, diesel::query_builder::OffsetClause<diesel::expression::bound::Bound<diesel::sql_types::BigInt, i64>>>>`
+
+error: aborting due to 2 previous errors
+
+For more information about this error, try `rustc --explain E0277`.
diff --git a/diesel_compile_tests/tests/ui/sql_type_bad_options.rs b/diesel_compile_tests/tests/ui/sql_type_bad_options.rs
index 131d33c..6a1d4e3 100644
--- a/diesel_compile_tests/tests/ui/sql_type_bad_options.rs
+++ b/diesel_compile_tests/tests/ui/sql_type_bad_options.rs
@@ -1,6 +1,7 @@
-#[macro_use]
 extern crate diesel;
 
+use diesel::sql_types::SqlType;
+
 #[derive(SqlType)]
 #[postgres]
 struct Type1;
diff --git a/diesel_compile_tests/tests/ui/sql_type_bad_options.stderr b/diesel_compile_tests/tests/ui/sql_type_bad_options.stderr
index fb0eb88..b03e52d 100644
--- a/diesel_compile_tests/tests/ui/sql_type_bad_options.stderr
+++ b/diesel_compile_tests/tests/ui/sql_type_bad_options.stderr
@@ -1,50 +1,50 @@
 error: `postgres` must be in the form `postgres(...)`
- --> $DIR/sql_type_bad_options.rs:5:3
+ --> $DIR/sql_type_bad_options.rs:6:3
   |
-5 | #[postgres]
+6 | #[postgres]
   |   ^^^^^^^^
 
 warning: Option oid has no effect
- --> $DIR/sql_type_bad_options.rs:9:31
-  |
-9 | #[postgres(type_name = "foo", oid = "2", array_oid = "3")]
-  |                               ^^^^^^^^^
+  --> $DIR/sql_type_bad_options.rs:10:31
+   |
+10 | #[postgres(type_name = "foo", oid = "2", array_oid = "3")]
+   |                               ^^^^^^^^^
 
 warning: Option array_oid has no effect
- --> $DIR/sql_type_bad_options.rs:9:42
-  |
-9 | #[postgres(type_name = "foo", oid = "2", array_oid = "3")]
-  |                                          ^^^^^^^^^^^^^^^
+  --> $DIR/sql_type_bad_options.rs:10:42
+   |
+10 | #[postgres(type_name = "foo", oid = "2", array_oid = "3")]
+   |                                          ^^^^^^^^^^^^^^^
 
 error: Missing required option `array_oid`
-  --> $DIR/sql_type_bad_options.rs:13:3
+  --> $DIR/sql_type_bad_options.rs:14:3
    |
-13 | #[postgres(oid = "2")]
+14 | #[postgres(oid = "2")]
    |   ^^^^^^^^^^^^^^^^^^^
 
 error: Expected a number
-  --> $DIR/sql_type_bad_options.rs:17:18
+  --> $DIR/sql_type_bad_options.rs:18:18
    |
-17 | #[postgres(oid = "NaN", array_oid = "1")]
+18 | #[postgres(oid = "NaN", array_oid = "1")]
    |                  ^^^^^
 
 warning: Option ary_oid has no effect
-  --> $DIR/sql_type_bad_options.rs:21:25
+  --> $DIR/sql_type_bad_options.rs:22:25
    |
-21 | #[postgres(oid = "NaN", ary_oid = "1")]
+22 | #[postgres(oid = "NaN", ary_oid = "1")]
    |                         ^^^^^^^^^^^^^
 
 error: Missing required option `array_oid`
-  --> $DIR/sql_type_bad_options.rs:21:3
+  --> $DIR/sql_type_bad_options.rs:22:3
    |
-21 | #[postgres(oid = "NaN", ary_oid = "1")]
+22 | #[postgres(oid = "NaN", ary_oid = "1")]
    |   ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 error: `postgres` must be in the form `postgres(...)`
-  --> $DIR/sql_type_bad_options.rs:25:3
+  --> $DIR/sql_type_bad_options.rs:26:3
    |
-25 | #[postgres = "foo"]
+26 | #[postgres = "foo"]
    |   ^^^^^^^^^^^^^^^^
 
-error: aborting due to 5 previous errors
+error: aborting due to 5 previous errors; 3 warnings emitted
 
diff --git a/diesel_compile_tests/tests/ui/upsert_with_multiple_values_not_supported_on_sqlite.rs b/diesel_compile_tests/tests/ui/upsert_with_multiple_values_not_supported_on_sqlite.rs
new file mode 100644
index 0000000..7ec7095
--- /dev/null
+++ b/diesel_compile_tests/tests/ui/upsert_with_multiple_values_not_supported_on_sqlite.rs
@@ -0,0 +1,18 @@
+extern crate diesel;
+
+use diesel::*;
+
+table! {
+    users {
+        id -> Integer,
+    }
+}
+
+fn main() {
+    let connection = SqliteConnection::establish("").unwrap();
+
+    diesel::insert_into(users::table)
+        .values(vec![users::id.eq(42), users::id.eq(43)])
+        .on_conflict_do_nothing()
+        .execute(&connection);
+}
diff --git a/diesel_compile_tests/tests/ui/upsert_with_multiple_values_not_supported_on_sqlite.stderr b/diesel_compile_tests/tests/ui/upsert_with_multiple_values_not_supported_on_sqlite.stderr
new file mode 100644
index 0000000..e38a926
--- /dev/null
+++ b/diesel_compile_tests/tests/ui/upsert_with_multiple_values_not_supported_on_sqlite.stderr
@@ -0,0 +1,14 @@
+error[E0277]: the trait bound `diesel::sqlite::Sqlite: diesel::backend::SupportsDefaultKeyword` is not satisfied
+  --> $DIR/upsert_with_multiple_values_not_supported_on_sqlite.rs:17:10
+   |
+17 |         .execute(&connection);
+   |          ^^^^^^^ the trait `diesel::backend::SupportsDefaultKeyword` is not implemented for `diesel::sqlite::Sqlite`
+   |
+   = note: required because of the requirements on the impl of `diesel::query_builder::QueryFragment<diesel::sqlite::Sqlite>` for `diesel::insertable::OwnedBatchInsert<diesel::query_builder::ValuesClause<diesel::insertable::ColumnInsertValue<users::columns::id, diesel::expression::bound::Bound<diesel::sql_types::Integer, i32>>, users::table>, users::table>`
+   = note: required because of the requirements on the impl of `diesel::query_builder::QueryFragment<diesel::sqlite::Sqlite>` for `diesel::query_builder::upsert::on_conflict_clause::OnConflictValues<diesel::insertable::OwnedBatchInsert<diesel::query_builder::ValuesClause<diesel::insertable::ColumnInsertValue<users::columns::id, diesel::expression::bound::Bound<diesel::sql_types::Integer, i32>>, users::table>, users::table>, diesel::query_builder::upsert::on_conflict_target::NoConflictTarget, diesel::query_builder::upsert::on_conflict_actions::DoNothing>`
+   = note: required because of the requirements on the impl of `diesel::query_builder::QueryFragment<diesel::sqlite::Sqlite>` for `diesel::query_builder::InsertStatement<users::table, diesel::query_builder::upsert::on_conflict_clause::OnConflictValues<diesel::insertable::OwnedBatchInsert<diesel::query_builder::ValuesClause<diesel::insertable::ColumnInsertValue<users::columns::id, diesel::expression::bound::Bound<diesel::sql_types::Integer, i32>>, users::table>, users::table>, diesel::query_builder::upsert::on_conflict_target::NoConflictTarget, diesel::query_builder::upsert::on_conflict_actions::DoNothing>>`
+   = note: required because of the requirements on the impl of `diesel::query_dsl::load_dsl::ExecuteDsl<diesel::SqliteConnection, diesel::sqlite::Sqlite>` for `diesel::query_builder::InsertStatement<users::table, diesel::query_builder::upsert::on_conflict_clause::OnConflictValues<diesel::insertable::OwnedBatchInsert<diesel::query_builder::ValuesClause<diesel::insertable::ColumnInsertValue<users::columns::id, diesel::expression::bound::Bound<diesel::sql_types::Integer, i32>>, users::table>, users::table>, diesel::query_builder::upsert::on_conflict_target::NoConflictTarget, diesel::query_builder::upsert::on_conflict_actions::DoNothing>>`
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0277`.
diff --git a/diesel_derives/Cargo.toml b/diesel_derives/Cargo.toml
index 451f670..e0e606f 100644
--- a/diesel_derives/Cargo.toml
+++ b/diesel_derives/Cargo.toml
@@ -1,6 +1,6 @@
 [package]
 name = "diesel_derives"
-version = "1.4.1"
+version = "2.0.0"
 authors = ["Sean Griffin <sean@seantheprogrammer.com>"]
 license = "MIT OR Apache-2.0"
 description = "You should not use this crate directly, it is internal to Diesel."
@@ -16,9 +16,12 @@
 
 [dev-dependencies]
 cfg-if = "0.1.0"
-diesel = "1.4.0"
 dotenv = "0.10.0"
 
+[dev-dependencies.diesel]
+version = "~2.0.0"
+path = "../diesel"
+
 [lib]
 proc-macro = true
 
diff --git a/diesel_derives/src/lib.rs b/diesel_derives/src/lib.rs
index 5c3361d..aae4f53 100644
--- a/diesel_derives/src/lib.rs
+++ b/diesel_derives/src/lib.rs
@@ -3,6 +3,7 @@
 #![deny(warnings, missing_copy_implementations)]
 // Clippy lints
 #![allow(
+    clippy::needless_doctest_main,
     clippy::needless_pass_by_value,
     clippy::option_map_unwrap_or_else,
     clippy::option_map_unwrap_or
@@ -42,15 +43,51 @@
 mod from_sql_row;
 mod identifiable;
 mod insertable;
-mod non_aggregate;
 mod query_id;
 mod queryable;
 mod queryable_by_name;
 mod sql_function;
 mod sql_type;
+mod valid_grouping;
 
 use diagnostic_shim::*;
 
+/// Implements `AsChangeset`
+///
+/// To implement `AsChangeset` this derive needs to know the corresponding table
+/// type. By default it uses the `snake_case` type name with an added `s`.
+/// It is possible to change this default by using `#[table_name = "something"]`.
+/// In both cases the module for that table must be in scope.
+/// For example, to derive this for a struct called `User`, you will
+/// likely need a line such as `use schema::users;`
+///
+/// If a field name of your struct differs
+/// from the name of the corresponding column, you can annotate the field with
+/// `#[column_name = "some_column_name"]`.
+///
+/// By default, any `Option` fields on the struct are skipped if their value is
+/// `None`. If you would like to assign `NULL` to the field instead, you can
+/// annotate your struct with `#[changeset_options(treat_none_as_null =
+/// "true")]`.
+///
+/// # Attributes
+///
+/// ## Optional type attributes
+///
+/// * `#[table_name = "some_table"]`, specifies the table for which the
+/// current type is a changeset. Requires that `some_table` is in scope.
+/// If this attribute is not used, the type name converted to
+/// `snake_case` with an added `s` is used as table name
+/// * `#[changeset_options(treat_none_as_null = "true")]`, specifies that
+/// the derive should threat `None` values as `NULL`. By default
+/// `Option::<T>::None` is just skipped. To insert a `NULL` using default
+/// behavior use `Option::<Option<T>>::Some(None)`
+///
+/// ## Optional field attributes
+///
+/// * `#[column_name = "some_column_name"]`, overrides the column name
+/// of the current field to `some_column_name`. By default the field
+/// name is used as column name.
 #[proc_macro_derive(
     AsChangeset,
     attributes(table_name, primary_key, column_name, changeset_options)
@@ -59,61 +96,700 @@
     expand_proc_macro(input, as_changeset::derive)
 }
 
+/// Implements all required variants of `AsExpression`
+///
+/// This derive will generate the following impls:
+///
+/// - `impl AsExpression<SqlType> for YourType`
+/// - `impl AsExpression<Nullable<SqlType>> for YourType`
+/// - `impl AsExpression<SqlType> for &'a YourType`
+/// - `impl AsExpression<Nullable<SqlType>> for &'a YourType`
+/// - `impl AsExpression<SqlType> for &'a &'b YourType`
+/// - `impl AsExpression<Nullable<SqlType>> for &'a &'b YourType`
+///
+/// If your type is unsized,
+/// you can specify this by adding the annotation `#[diesel(not_sized)]`
+/// as attribute on the type. This will skip the impls for non-reference types.
+///
+/// # Attributes:
+///
+/// ## Required type attributes
+///
+/// * `#[sql_type = "SqlType"]`, to specify the sql type of the
+///  generated implementations. If the attribute exists multiple times
+///  impls for each sql type are generated.
+///
+/// ## Optional type attribute
+///
+/// * `#[diesel(not_sized)]`, to skip generating impls that require
+///   that the type is `Sized`
 #[proc_macro_derive(AsExpression, attributes(diesel, sql_type))]
 pub fn derive_as_expression(input: TokenStream) -> TokenStream {
     expand_proc_macro(input, as_expression::derive)
 }
 
+/// Implement required traits for the associations API
+///
+/// This derive implement support for diesels associations api. Check the
+/// module level documentation of the `diesel::associations` module for details.
+///
+/// # Attributes
+///
+/// # Required type attributes
+///
+/// * `#[belongs_to(User)]`, to specify a child-to-parent relation ship
+/// between the current type and the specified parent type (`User`).
+/// If this attribute is given multiple times, multiple relation ships
+/// are generated.
+/// * `#[belongs_to(User, foreign_key = "mykey")]`, variant of the attribute
+/// above. Allows to specify the name of the foreign key. If the foreign key
+/// is not specified explicitly, the remote lower case type name with an
+/// appended `_id` is used as foreign key name. (`user_id` in this example
+/// case)
+///
+/// # Optional type attributes
+///
+/// * `#[table_name = "some_table_name"]` specifies the table this
+///    type belongs to. Requires that `some_table_name` is in scope.
+///    If this attribute is not used, the type name converted to
+///    `snake_case` with an added `s` is used as table name
+///
+/// # Optional field attributes
+///
+/// * `#[column_name = "some_table_name"]`, overrides the column the current
+/// field maps to to `some_table_name`. By default the field name is used
+/// as column name. Only useful for the foreign key field.
+///
 #[proc_macro_derive(Associations, attributes(belongs_to, column_name, table_name))]
 pub fn derive_associations(input: TokenStream) -> TokenStream {
     expand_proc_macro(input, associations::derive)
 }
 
+/// Implement numeric operators for the current query node
 #[proc_macro_derive(DieselNumericOps)]
 pub fn derive_diesel_numeric_ops(input: TokenStream) -> TokenStream {
     expand_proc_macro(input, diesel_numeric_ops::derive)
 }
 
+/// Implements `FromSqlRow` and `Queryable`
+///
+/// This derive is mostly useful to implement support deserializing
+/// into rust types not supported by diesel itself.
+///
+/// There are no options or special considerations needed for this derive.
 #[proc_macro_derive(FromSqlRow, attributes(diesel))]
 pub fn derive_from_sql_row(input: TokenStream) -> TokenStream {
     expand_proc_macro(input, from_sql_row::derive)
 }
 
+/// Implements `Identifiable` for references of the current type
+///
+/// By default, the primary key field is assumed to be a single field called `id`.
+/// If it's not, you can put `#[primary_key(your_id)]` on your struct.
+/// If you have a composite primary key, the syntax is `#[primary_key(id1, id2)]`.
+///
+/// By default, `#[derive(Identifiable)]` will assume that your table
+/// name is the plural form of your struct name.
+/// Diesel uses very simple pluralization rules.
+/// It only adds an `s` to the end, and converts `CamelCase` to `snake_case`.
+/// If your table name does not follow this convention
+/// or the plural form isn't just an `s`,
+/// you can specify the table name with `#[table_name = "some_table_name"]`.
+/// In both cases the module for that table must be in scope.
+/// For example, to derive this for a struct called `User`, you will
+/// likely need a line such as `use schema::users;`
+/// Our rules for inferring table names is considered public API.
+/// It will never change without a major version bump.
+///
+/// # Attributes
+///
+/// ## Optional type attributes
+///
+/// * `#[table_name = "some_table_name"]` specifies the table this
+///    type belongs to. Requires that `some_table_name` is in scope.
+///    If this attribute is not used, the type name converted to
+///    `snake_case` with an added `s` is used as table name
+/// * `#[primary_key(id1, id2)]` to specify the struct field that
+///    that corresponds to the primary key. If not used, `id` will be
+///    assumed as primary key field
+///
+///
 #[proc_macro_derive(Identifiable, attributes(table_name, primary_key, column_name))]
 pub fn derive_identifiable(input: TokenStream) -> TokenStream {
     expand_proc_macro(input, identifiable::derive)
 }
 
+/// Implements `Insertable`
+///
+/// To implement `Insertable` this derive needs to know the corresponding table
+/// type. By default it uses the `snake_case` type name with an added `s`.
+/// It is possible to change this default by using `#[table_name = "something"]`.
+/// In both cases the module for that table must be in scope.
+/// For example, to derive this for a struct called `User`, you will
+/// likely need a line such as `use schema::users;`
+///
+/// If a field name of your
+/// struct differs from the name of the corresponding column,
+/// you can annotate the field with `#[column_name = "some_column_name"]`.
+///
+/// Your struct can also contain fields which implement `Insertable`. This is
+/// useful when you want to have one field map to more than one column (for
+/// example, an enum that maps to a label and a value column). Add
+/// `#[diesel(embed)]` to any such fields.
+///
+/// # Attributes
+///
+/// ## Optional type attributes
+///
+/// * `#[table_name = "some_table_name"]`, specifies the table this type
+/// is insertable into. Requires that `some_table_name` is in scope.
+/// If this attribute is not used, the type name converted to
+/// `snake_case` with an added `s` is used as table name
+///
+/// ## Optional field attributes
+///
+/// * `#[column_name = "some_table_name"]`, overrides the column the current
+/// field maps to `some_table_name`. By default the field name is used
+/// as column name
+/// * `#[diesel(embed)]`, specifies that the current field maps not only
+/// to single database field, but is a struct that implements `Insertable`
 #[proc_macro_derive(Insertable, attributes(table_name, column_name, diesel))]
 pub fn derive_insertable(input: TokenStream) -> TokenStream {
     expand_proc_macro(input, insertable::derive)
 }
 
+#[doc(hidden)]
 #[proc_macro_derive(NonAggregate)]
 pub fn derive_non_aggregate(input: TokenStream) -> TokenStream {
-    expand_proc_macro(input, non_aggregate::derive)
+    eprintln!(
+        "#[derive(NonAggregate)] is deprecated. Please use \
+         `#[derive(ValidGrouping)]` instead.)"
+    );
+    expand_proc_macro(input, valid_grouping::derive)
 }
 
+/// Implements `QueryId`
+///
+/// For example, given this struct:
+///
+/// ```rust
+/// # extern crate diesel;
+/// #[derive(diesel::query_builder::QueryId)]
+/// pub struct And<Left, Right> {
+///     left: Left,
+///     right: Right,
+/// }
+/// ```
+///
+/// the following implementation will be generated
+///
+/// ```rust
+/// # extern crate diesel;
+/// # struct And<Left, Right>(Left, Right);
+/// # use diesel::query_builder::QueryId;
+/// impl<Left, Right> QueryId for And<Left, Right>
+/// where
+///     Left: QueryId,
+///     Right: QueryId,
+/// {
+///     type QueryId = And<Left::QueryId, Right::QueryId>;
+///
+///     const HAS_STATIC_QUERY_ID: bool = Left::HAS_STATIC_QUERY_ID && Right::HAS_STATIC_QUERY_ID;
+/// }
+/// ```
+///
+/// If the SQL generated by a struct is not uniquely identifiable by its type,
+/// meaning that `HAS_STATIC_QUERY_ID` should always be false,
+/// you should not derive this trait.
+/// In that case you should implement it manually instead.
 #[proc_macro_derive(QueryId)]
 pub fn derive_query_id(input: TokenStream) -> TokenStream {
     expand_proc_macro(input, query_id::derive)
 }
 
+/// Implements `Queryable`
+///
+/// This trait can only be derived for structs, not enums.
+///
+/// **When this trait is derived, it will assume that the order of fields on your
+/// struct match the order of the fields in the query. This means that field
+/// order is significant if you are using `#[derive(Queryable)]`. Field name has
+/// no effect.**
+///
+/// To provide custom deserialization behavior for a field, you can use
+/// `#[diesel(deserialize_as = "SomeType")]`. If this attribute is present, Diesel
+/// will deserialize the corresponding field into `SomeType`, rather than the
+/// actual field type on your struct and then call `.into` to convert it to the
+/// actual field type. This can be used to add custom behavior for a
+/// single field, or use types that are otherwise unsupported by Diesel.
+///
+/// # Attributes
+///
+/// ## Optional field attributes:
+///
+/// * `#[diesel(deserialize_as = "Type")]`, instead of deserializing directly
+///   into the field type, the implementation will deserialize into `Type`.
+///   Then `Type` is converted via `.into()` into the field type. By default
+///   this derive will deserialize directly into the field type
 #[proc_macro_derive(Queryable, attributes(column_name, diesel))]
 pub fn derive_queryable(input: TokenStream) -> TokenStream {
     expand_proc_macro(input, queryable::derive)
 }
 
+/// Implements `QueryableByName`
+///
+/// To derive this trait, Diesel needs to know the SQL type of each field. You
+/// can do this by either annotating your struct with `#[table_name =
+/// "some_table"]` (in which case the SQL type will be
+/// `diesel::dsl::SqlTypeOf<table_name::column_name>`), or by annotating each
+/// field with `#[sql_type = "SomeType"]`.
+///
+/// If you are using `#[table_name]`, the module for that table must be in
+/// scope. For example, to derive this for a struct called `User`, you will
+/// likely need a line such as `use schema::users;`
+///
+/// If the name of a field on your struct is different than the column in your
+/// `table!` declaration, or if you are deriving this trait on a tuple struct,
+/// you can annotate the field with `#[column_name = "some_column"]`. For tuple
+/// structs, all fields must have this annotation.
+///
+/// If a field is another struct which implements `QueryableByName`,
+/// instead of a column, you can annotate that struct with `#[diesel(embed)]`.
+/// Then all fields contained by that inner struct are loaded into
+/// the embedded struct.
+///
+/// To provide custom deserialization behavior for a field, you can use
+/// `#[diesel(deserialize_as = "SomeType")]`. If this attribute is present, Diesel
+/// will deserialize the corresponding field into `SomeType`, rather than the
+/// actual field type on your struct and then call `.into` to convert it to the
+/// actual field type. This can be used to add custom behavior for a
+/// single field, or use types that are otherwise unsupported by Diesel.
+///
+/// # Attributes
+///
+/// ## Type attributes
+///
+/// * `#[table_name = "some_table"]`, to specify that this type contains
+///   columns for the specified table. If no field attributes are specified
+///   the derive will use the sql type of the corresponding column.
+///
+/// ## Field attributes
+/// * `#[column_name = "some_column"]`, overrides the column name for
+///    a given field. If not set, the name of the field is used as column
+///    name. This attribute is required on tuple structs, if
+///    `#[table_name = "some_table"]` is used, otherwise it's optional.
+/// * `#[sql_type = "SomeType"]`, assumes `SomeType` as sql type of the
+///    corresponding field. This attributes has precedence over all other
+///    variants to specify the sql type.
+/// * `#[diesel(deserialize_as = "Type")]`, instead of deserializing directly
+///   into the field type, the implementation will deserialize into `Type`.
+///   Then `Type` is converted via `.into()` into the field type. By default
+///   this derive will deserialize directly into the field type
+/// * `#[diesel(embed)]`, specifies that the current field maps not only
+///   single database column, but is a type that implements
+///   `QueryableByName` on it's own
 #[proc_macro_derive(QueryableByName, attributes(table_name, column_name, sql_type, diesel))]
 pub fn derive_queryable_by_name(input: TokenStream) -> TokenStream {
     expand_proc_macro(input, queryable_by_name::derive)
 }
 
+/// Implement necessary traits for adding a new sql type
+///
+/// This trait implements all necessary traits to define a
+/// new sql type. This is useful for adding support for unsupported
+/// or custom types on sql side. The sql type will be usable for
+/// all backends you specified via the attributes listed below.
+///
+/// This derive will implement `NotNull`, `HasSqlType` and `SingleValue`.
+/// When using this deriving,
+/// you need to specify how the type is represented on various backends.
+/// You don't need to specify every backend,
+/// only the ones supported by your type.
+///
+/// For PostgreSQL, add  `#[postgres(type_name = "pg_type_name")]`
+/// or `#[postgres(oid = "some_oid", array_oid = "some_oid")]` for
+/// builtin types.
+/// For MySQL, specify which variant of `MysqlType` should be used
+/// by adding `#[mysql_type = "Variant"]`.
+/// For SQLite, specify which variant of `SqliteType` should be used
+/// by adding `#[sqlite_type = "Variant"]`.
+///
+/// # Attributes
+///
+/// ## Type attributes
+///
+/// * `#[postgres(type_name = "TypeName")]` specifies support for
+/// a postgresql type with the name `TypeName`. Prefer this variant
+/// for types with no stable OID (== everything but the builtin types)
+/// * `#[postgres(oid = 42, array_oid = 142)]`, specifies support for a
+/// postgresql type with the given `oid` and `array_oid`. This variant
+/// should only be used with types that have a stable OID.
+/// * `#[sqlite_type = "TypeName"]`, specifies support for a sqlite type
+/// with the given name. `TypeName` needs to be one of the possible values
+/// in `SqliteType`
+/// * `#[mysql_type = "TypeName"]`, specifies support for a mysql type
+/// with the given name. `TypeName` needs to be one of the possible values
+/// in `MysqlType`
 #[proc_macro_derive(SqlType, attributes(postgres, sqlite_type, mysql_type))]
 pub fn derive_sql_type(input: TokenStream) -> TokenStream {
     expand_proc_macro(input, sql_type::derive)
 }
 
+/// Implements `ValidGrouping`
+///
+/// This trait can be automatically derived for structs with no type parameters
+/// which are never aggregate, as well as for structs which are `NonAggregate`
+/// when all type parameters are `NonAggregate`. For example:
+///
+/// ```ignore
+/// #[derive(ValidGrouping)]
+/// struct LiteralOne;
+///
+/// #[derive(ValidGrouping)]
+/// struct Plus<Lhs, Rhs>(Lhs, Rhs);
+///
+/// // The following impl will be generated:
+///
+/// impl<GroupByClause> ValidGrouping<GroupByClause> for LiteralOne {
+///     type IsAggregate = is_aggregate::Never;
+/// }
+///
+/// impl<Lhs, Rhs, GroupByClause> ValidGrouping<GroupByClause> for Plus<Lhs, Rhs>
+/// where
+///     Lhs: ValidGrouping<GroupByClause>,
+///     Rhs: ValidGrouping<GroupByClause>,
+///     Lhs::IsAggregate: MixedAggregates<Rhs::IsAggregate>,
+/// {
+///     type IsAggregate = <Lhs::IsAggregate as MixedAggregates<Rhs::IsAggregate>>::Output;
+/// }
+/// ```
+///
+/// For types which are always considered aggregate (such as an aggregate
+/// function), annotate your struct with `#[diesel(aggregate)]` to set `IsAggregate`
+/// explicitly to `is_aggregate::Yes`.
+///
+/// # Attributes
+///
+/// ## Optional type attributes
+///
+/// * `#[diesel(aggregate)]` for cases where the type represents an aggregating
+///   SQL expression
+#[proc_macro_derive(ValidGrouping, attributes(diesel))]
+pub fn derive_valid_grouping(input: TokenStream) -> TokenStream {
+    expand_proc_macro(input, valid_grouping::derive)
+}
+
+/// Declare a sql function for use in your code.
+///
+/// Diesel only provides support for a very small number of SQL functions.
+/// This macro enables you to add additional functions from the SQL standard,
+/// as well as any custom functions your application might have.
+///
+/// The syntax for this macro is very similar to that of a normal Rust function,
+/// except the argument and return types will be the SQL types being used.
+/// Typically these types will come from [`diesel::sql_types`](../diesel/sql_types/index.html)
+///
+/// This macro will generate two items. A function with the name that you've
+/// given, and a module with a helper type representing the return type of your
+/// function. For example, this invocation:
+///
+/// ```ignore
+/// sql_function!(fn lower(x: Text) -> Text);
+/// ```
+///
+/// will generate this code:
+///
+/// ```ignore
+/// pub fn lower<X>(x: X) -> lower::HelperType<X> {
+///     ...
+/// }
+///
+/// pub(crate) mod lower {
+///     pub type HelperType<X> = ...;
+/// }
+/// ```
+///
+/// If you are using this macro for part of a library, where the function is
+/// part of your public API, it is highly recommended that you re-export this
+/// helper type with the same name as your function. This is the standard
+/// structure:
+///
+/// ```ignore
+/// pub mod functions {
+///     use super::types::*;
+///     use diesel::sql_types::*;
+///
+///     sql_function! {
+///         /// Represents the Pg `LENGTH` function used with `tsvector`s.
+///         fn length(x: TsVector) -> Integer;
+///     }
+/// }
+///
+/// pub mod helper_types {
+///     /// The return type of `length(expr)`
+///     pub type Length<Expr> = functions::length::HelperType<Expr>;
+/// }
+///
+/// pub mod dsl {
+///     pub use functions::*;
+///     pub use helper_types::*;
+/// }
+/// ```
+///
+/// Most attributes given to this macro will be put on the generated function
+/// (including doc comments).
+///
+/// # Adding Doc Comments
+///
+/// ```no_run
+/// # extern crate diesel;
+/// # use diesel::*;
+/// #
+/// # table! { crates { id -> Integer, name -> VarChar, } }
+/// #
+/// use diesel::sql_types::Text;
+///
+/// sql_function! {
+///     /// Represents the `canon_crate_name` SQL function, created in
+///     /// migration ....
+///     fn canon_crate_name(a: Text) -> Text;
+/// }
+///
+/// # fn main() {
+/// # use self::crates::dsl::*;
+/// let target_name = "diesel";
+/// crates.filter(canon_crate_name(name).eq(canon_crate_name(target_name)));
+/// // This will generate the following SQL
+/// // SELECT * FROM crates WHERE canon_crate_name(crates.name) = canon_crate_name($1)
+/// # }
+/// ```
+///
+/// # Special Attributes
+///
+/// There are a handful of special attributes that Diesel will recognize. They
+/// are:
+///
+/// - `#[aggregate]`
+///   - Indicates that this is an aggregate function, and that `NonAggregate`
+///     should not be implemented.
+/// - `#[sql_name="name"]`
+///   - The SQL to be generated is different than the Rust name of the function.
+///     This can be used to represent functions which can take many argument
+///     types, or to capitalize function names.
+///
+/// Functions can also be generic. Take the definition of `sum` for an example
+/// of all of this:
+///
+/// ```no_run
+/// # extern crate diesel;
+/// # use diesel::*;
+/// #
+/// # table! { crates { id -> Integer, name -> VarChar, } }
+/// #
+/// use diesel::sql_types::Foldable;
+///
+/// sql_function! {
+///     #[aggregate]
+///     #[sql_name = "SUM"]
+///     fn sum<ST: Foldable>(expr: ST) -> ST::Sum;
+/// }
+///
+/// # fn main() {
+/// # use self::crates::dsl::*;
+/// crates.select(sum(id));
+/// # }
+/// ```
+///
+/// # Use with SQLite
+///
+/// On most backends, the implementation of the function is defined in a
+/// migration using `CREATE FUNCTION`. On SQLite, the function is implemented in
+/// Rust instead. You must call `register_impl` or
+/// `register_nondeterministic_impl` with every connection before you can use
+/// the function.
+///
+/// These functions will only be generated if the `sqlite` feature is enabled,
+/// and the function is not generic. Generic functions and variadic functions
+/// are not supported on SQLite.
+///
+/// ```rust
+/// # extern crate diesel;
+/// # use diesel::*;
+/// #
+/// # #[cfg(feature = "sqlite")]
+/// # fn main() {
+/// #     run_test().unwrap();
+/// # }
+/// #
+/// # #[cfg(not(feature = "sqlite"))]
+/// # fn main() {
+/// # }
+/// #
+/// use diesel::sql_types::{Integer, Double};
+/// sql_function!(fn add_mul(x: Integer, y: Integer, z: Double) -> Double);
+///
+/// # #[cfg(feature = "sqlite")]
+/// # fn run_test() -> Result<(), Box<::std::error::Error>> {
+/// let connection = SqliteConnection::establish(":memory:")?;
+///
+/// add_mul::register_impl(&connection, |x: i32, y: i32, z: f64| {
+///     (x + y) as f64 * z
+/// })?;
+///
+/// let result = select(add_mul(1, 2, 1.5))
+///     .get_result::<f64>(&connection)?;
+/// assert_eq!(4.5, result);
+/// #     Ok(())
+/// # }
+/// ```
+///
+/// ## Custom Aggregate Functions
+///
+/// Custom aggregate functions can be created in SQLite by adding an `#[aggregate]`
+/// attribute inside of `sql_function`. `register_impl` needs to be called on
+/// the generated function with a type implementing the
+/// [SqliteAggregateFunction](../diesel/sqlite/trait.SqliteAggregateFunction.html)
+/// trait as a type parameter as shown in the examples below.
+///
+/// ```rust
+/// # extern crate diesel;
+/// # use diesel::*;
+/// #
+/// # #[cfg(feature = "sqlite")]
+/// # fn main() {
+/// #   run().unwrap();
+/// # }
+/// #
+/// # #[cfg(not(feature = "sqlite"))]
+/// # fn main() {
+/// # }
+/// use diesel::sql_types::Integer;
+/// # #[cfg(feature = "sqlite")]
+/// use diesel::sqlite::SqliteAggregateFunction;
+///
+/// sql_function! {
+///     #[aggregate]
+///     fn my_sum(x: Integer) -> Integer;
+/// }
+///
+/// #[derive(Default)]
+/// struct MySum { sum: i32 }
+///
+/// # #[cfg(feature = "sqlite")]
+/// impl SqliteAggregateFunction<i32> for MySum {
+///     type Output = i32;
+///
+///     fn step(&mut self, expr: i32) {
+///         self.sum += expr;
+///     }
+///
+///     fn finalize(aggregator: Option<Self>) -> Self::Output {
+///         aggregator.map(|a| a.sum).unwrap_or_default()
+///     }
+/// }
+/// # table! {
+/// #     players {
+/// #         id -> Integer,
+/// #         score -> Integer,
+/// #     }
+/// # }
+///
+/// # #[cfg(feature = "sqlite")]
+/// fn run() -> Result<(), Box<dyn (::std::error::Error)>> {
+/// #    use self::players::dsl::*;
+///     let connection = SqliteConnection::establish(":memory:")?;
+/// #    connection.execute("create table players (id integer primary key autoincrement, score integer)").unwrap();
+/// #    connection.execute("insert into players (score) values (10), (20), (30)").unwrap();
+///
+///     my_sum::register_impl::<MySum, _>(&connection)?;
+///
+///     let total_score = players.select(my_sum(score))
+///         .get_result::<i32>(&connection)?;
+///
+///     println!("The total score of all the players is: {}", total_score);
+///
+/// #    assert_eq!(60, total_score);
+///     Ok(())
+/// }
+/// ```
+///
+/// With multiple function arguments the arguments are passed as a tuple to `SqliteAggregateFunction`
+///
+/// ```rust
+/// # extern crate diesel;
+/// # use diesel::*;
+/// #
+/// # #[cfg(feature = "sqlite")]
+/// # fn main() {
+/// #   run().unwrap();
+/// # }
+/// #
+/// # #[cfg(not(feature = "sqlite"))]
+/// # fn main() {
+/// # }
+/// use diesel::sql_types::{Float, Nullable};
+/// # #[cfg(feature = "sqlite")]
+/// use diesel::sqlite::SqliteAggregateFunction;
+///
+/// sql_function! {
+///     #[aggregate]
+///     fn range_max(x0: Float, x1: Float) -> Nullable<Float>;
+/// }
+///
+/// #[derive(Default)]
+/// struct RangeMax<T> { max_value: Option<T> }
+///
+/// # #[cfg(feature = "sqlite")]
+/// impl<T: Default + PartialOrd + Copy + Clone> SqliteAggregateFunction<(T, T)> for RangeMax<T> {
+///     type Output = Option<T>;
+///
+///     fn step(&mut self, (x0, x1): (T, T)) {
+/// #        let max = if x0 >= x1 {
+/// #            x0
+/// #        } else {
+/// #            x1
+/// #        };
+/// #
+/// #        self.max_value = match self.max_value {
+/// #            Some(current_max_value) if max > current_max_value => Some(max),
+/// #            None => Some(max),
+/// #            _ => self.max_value,
+/// #        };
+///         // Compare self.max_value to x0 and x1
+///     }
+///
+///     fn finalize(aggregator: Option<Self>) -> Self::Output {
+///         aggregator?.max_value
+///     }
+/// }
+/// # table! {
+/// #     student_avgs {
+/// #         id -> Integer,
+/// #         s1_avg -> Float,
+/// #         s2_avg -> Float,
+/// #     }
+/// # }
+///
+/// # #[cfg(feature = "sqlite")]
+/// fn run() -> Result<(), Box<dyn (::std::error::Error)>> {
+/// #    use self::student_avgs::dsl::*;
+///     let connection = SqliteConnection::establish(":memory:")?;
+/// #    connection.execute("create table student_avgs (id integer primary key autoincrement, s1_avg float, s2_avg float)").unwrap();
+/// #    connection.execute("insert into student_avgs (s1_avg, s2_avg) values (85.5, 90), (79.8, 80.1)").unwrap();
+///
+///     range_max::register_impl::<RangeMax<f32>, _, _>(&connection)?;
+///
+///     let result = student_avgs.select(range_max(s1_avg, s2_avg))
+///         .get_result::<Option<f32>>(&connection)?;
+///
+///     if let Some(max_semeseter_avg) = result {
+///         println!("The largest semester average is: {}", max_semeseter_avg);
+///     }
+///
+/// #    assert_eq!(Some(90f32), result);
+///     Ok(())
+/// }
+/// ```
 #[proc_macro]
 pub fn sql_function_proc(input: TokenStream) -> TokenStream {
     expand_proc_macro(input, sql_function::expand)
diff --git a/diesel_derives/src/meta.rs b/diesel_derives/src/meta.rs
index f3bfc1d..9d045de 100644
--- a/diesel_derives/src/meta.rs
+++ b/diesel_derives/src/meta.rs
@@ -82,7 +82,7 @@
     }
 
     pub fn ident_value(&self) -> Result<syn::Ident, Diagnostic> {
-        let maybe_attr = self.nested().ok().and_then(|mut n| n.nth(0));
+        let maybe_attr = self.nested().ok().and_then(|mut n| n.next());
         let maybe_path = maybe_attr.as_ref().and_then(|m| m.path().ok());
         match maybe_path {
             Some(x) => {
diff --git a/diesel_derives/src/non_aggregate.rs b/diesel_derives/src/non_aggregate.rs
deleted file mode 100644
index e09b882..0000000
--- a/diesel_derives/src/non_aggregate.rs
+++ /dev/null
@@ -1,30 +0,0 @@
-use proc_macro2::*;
-use syn;
-
-use util::*;
-
-pub fn derive(mut item: syn::DeriveInput) -> Result<TokenStream, Diagnostic> {
-    let type_params = item
-        .generics
-        .type_params()
-        .map(|param| param.ident.clone())
-        .collect::<Vec<_>>();
-    for type_param in type_params {
-        let where_clause = item.generics.make_where_clause();
-        where_clause
-            .predicates
-            .push(parse_quote!(#type_param: NonAggregate));
-    }
-
-    let (impl_generics, ty_generics, where_clause) = item.generics.split_for_impl();
-    let struct_name = &item.ident;
-
-    Ok(wrap_in_dummy_mod(quote! {
-        use diesel::expression::NonAggregate;
-
-        impl #impl_generics NonAggregate for #struct_name #ty_generics
-        #where_clause
-        {
-        }
-    }))
-}
diff --git a/diesel_derives/src/sql_function.rs b/diesel_derives/src/sql_function.rs
index e3c5b6d..a41af4f 100644
--- a/diesel_derives/src/sql_function.rs
+++ b/diesel_derives/src/sql_function.rs
@@ -47,9 +47,11 @@
         .type_params()
         .map(|type_param| type_param.ident.clone())
         .collect::<Vec<_>>();
+
     for StrictFnArg { name, .. } in args {
         generics.params.push(parse_quote!(#name));
     }
+
     let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
     // Even if we force an empty where clause, it still won't print the where
     // token with no bounds.
@@ -66,12 +68,12 @@
     let args_iter = args.iter();
     let mut tokens = quote! {
         use diesel::{self, QueryResult};
-        use diesel::expression::{AsExpression, Expression, SelectableExpression, AppearsOnTable};
+        use diesel::expression::{AsExpression, Expression, SelectableExpression, AppearsOnTable, ValidGrouping};
         use diesel::query_builder::{QueryFragment, AstPass};
         use diesel::sql_types::*;
         use super::*;
 
-        #[derive(Debug, Clone, Copy, QueryId, DieselNumericOps)]
+        #[derive(Debug, Clone, Copy, diesel::query_builder::QueryId, diesel::sql_types::DieselNumericOps)]
         pub struct #fn_name #ty_generics {
             #(pub(in super) #args_iter,)*
             #(pub(in super) #type_args: ::std::marker::PhantomData<#type_args>,)*
@@ -123,15 +125,91 @@
         }
     };
 
-    if !is_aggregate {
+    if is_aggregate {
         tokens = quote! {
             #tokens
 
-            impl #impl_generics diesel::expression::NonAggregate
+            impl #impl_generics_internal ValidGrouping<__DieselInternal>
                 for #fn_name #ty_generics
-            #where_clause
-                #(#arg_name: diesel::expression::NonAggregate,)*
             {
+                type IsAggregate = diesel::expression::is_aggregate::Yes;
+            }
+        };
+        if cfg!(feature = "sqlite") && type_args.is_empty() {
+            tokens = quote! {
+                #tokens
+
+                use diesel::sqlite::{Sqlite, SqliteConnection};
+                use diesel::serialize::ToSql;
+                use diesel::deserialize::Queryable;
+                use diesel::sqlite::SqliteAggregateFunction;
+                use diesel::sql_types::IntoNullable;
+            };
+
+            match arg_name.len() {
+                x if x > 1 => {
+                    tokens = quote! {
+                        #tokens
+
+                        #[allow(dead_code)]
+                        /// Registers an implementation for this aggregate function on the given connection
+                        ///
+                        /// This function must be called for every `SqliteConnection` before
+                        /// this SQL function can be used on SQLite. The implementation must be
+                        /// deterministic (returns the same result given the same arguments).
+                        pub fn register_impl<A, #(#arg_name,)*>(
+                            conn: &SqliteConnection
+                        ) -> QueryResult<()>
+                            where
+                            A: SqliteAggregateFunction<(#(#arg_name,)*)> + Send + 'static,
+                            A::Output: ToSql<#return_type, Sqlite>,
+                            (#(#arg_name,)*): Queryable<(#(#arg_type,)*), Sqlite>,
+                        {
+                            conn.register_aggregate_function::<(#(#arg_type,)*), #return_type, _, _, A>(#sql_name)
+                        }
+                    };
+                }
+                x if x == 1 => {
+                    let arg_name = arg_name[0];
+                    let arg_type = arg_type[0];
+
+                    tokens = quote! {
+                        #tokens
+
+                        #[allow(dead_code)]
+                        /// Registers an implementation for this aggregate function on the given connection
+                        ///
+                        /// This function must be called for every `SqliteConnection` before
+                        /// this SQL function can be used on SQLite. The implementation must be
+                        /// deterministic (returns the same result given the same arguments).
+                        pub fn register_impl<A, #arg_name>(
+                            conn: &SqliteConnection
+                        ) -> QueryResult<()>
+                            where
+                            A: SqliteAggregateFunction<#arg_name> + Send + 'static,
+                            A::Output: ToSql<#return_type, Sqlite>,
+                            #arg_name: Queryable<#arg_type, Sqlite>,
+                            {
+                                conn.register_aggregate_function::<#arg_type, #return_type, _, _, A>(#sql_name)
+                            }
+                    };
+                }
+                _ => (),
+            }
+        }
+    } else {
+        tokens = quote! {
+            #tokens
+
+            #[derive(ValidGrouping)]
+            pub struct __Derived<#(#arg_name,)*>(#(#arg_name,)*);
+
+            impl #impl_generics_internal ValidGrouping<__DieselInternal>
+                for #fn_name #ty_generics
+            where
+                __Derived<#(#arg_name,)*>: ValidGrouping<__DieselInternal>,
+            {
+                type IsAggregate = <__Derived<#(#arg_name,)*> as ValidGrouping<__DieselInternal>>::IsAggregate;
             }
         };
 
diff --git a/diesel_derives/src/util.rs b/diesel_derives/src/util.rs
index fe4dc644..057a03b 100644
--- a/diesel_derives/src/util.rs
+++ b/diesel_derives/src/util.rs
@@ -8,6 +8,12 @@
     quote! {
         #[allow(unused_imports)]
         const _: () = {
+            // This import is not actually redundant. When using diesel_derives
+            // inside of diesel, `diesel` doesn't exist as an extern crate, and
+            // to work around that it contains a private
+            // `mod diesel { pub use super::*; }` that this import will then
+            // refer to. In all other cases, this imports refers to the extern
+            // crate diesel.
             use diesel;
 
             #item
@@ -46,7 +52,7 @@
 pub fn ty_for_foreign_derive(item: &DeriveInput, flags: &MetaItem) -> Result<Type, Diagnostic> {
     if flags.has_flag("foreign_derive") {
         match item.data {
-            Data::Struct(ref body) => match body.fields.iter().nth(0) {
+            Data::Struct(ref body) => match body.fields.iter().next() {
                 Some(field) => Ok(field.ty.clone()),
                 None => Err(flags
                     .span()
diff --git a/diesel_derives/src/valid_grouping.rs b/diesel_derives/src/valid_grouping.rs
new file mode 100644
index 0000000..45ec153
--- /dev/null
+++ b/diesel_derives/src/valid_grouping.rs
@@ -0,0 +1,69 @@
+use proc_macro2::*;
+use syn;
+
+use meta::*;
+use util::*;
+
+pub fn derive(mut item: syn::DeriveInput) -> Result<TokenStream, Diagnostic> {
+    let flags =
+        MetaItem::with_name(&item.attrs, "diesel").unwrap_or_else(|| MetaItem::empty("diesel"));
+    let struct_ty = ty_for_foreign_derive(&item, &flags)?;
+    let type_params = item
+        .generics
+        .type_params()
+        .map(|param| param.ident.clone())
+        .collect::<Vec<_>>();
+    for type_param in type_params {
+        let where_clause = item.generics.make_where_clause();
+        where_clause
+            .predicates
+            .push(parse_quote!(#type_param: ValidGrouping<__GroupByClause>));
+    }
+
+    let is_aggregate = flags.has_flag("aggregate");
+
+    if is_aggregate {
+        item.generics.params.push(parse_quote!(__GroupByClause));
+        let (impl_generics, _, where_clause) = item.generics.split_for_impl();
+        Ok(wrap_in_dummy_mod(quote! {
+            use diesel::expression::{ValidGrouping, MixedAggregates, is_aggregate};
+
+            impl #impl_generics ValidGrouping<__GroupByClause> for #struct_ty
+            #where_clause
+            {
+                type IsAggregate = is_aggregate::Yes;
+            }
+        }))
+    } else {
+        let mut aggregates = item
+            .generics
+            .type_params()
+            .map(|t| parse_quote!(#t::IsAggregate))
+            .collect::<Vec<syn::Type>>()
+            .into_iter();
+        let is_aggregate = aggregates
+            .next()
+            .map(|first| {
+                let where_clause = item.generics.make_where_clause();
+                aggregates.fold(first, |left, right| {
+                    where_clause.predicates.push(parse_quote!(
+                        #left: MixedAggregates<#right>
+                    ));
+                    parse_quote!(<#left as MixedAggregates<#right>>::Output)
+                })
+            })
+            .unwrap_or_else(|| parse_quote!(is_aggregate::Never));
+        item.generics.params.push(parse_quote!(__GroupByClause));
+        let (impl_generics, _, where_clause) = item.generics.split_for_impl();
+
+        Ok(wrap_in_dummy_mod(quote! {
+            use diesel::expression::{ValidGrouping, MixedAggregates, is_aggregate};
+
+            impl #impl_generics ValidGrouping<__GroupByClause> for #struct_ty
+            #where_clause
+            {
+                type IsAggregate = #is_aggregate;
+            }
+        }))
+    }
+}
diff --git a/diesel_migrations/Cargo.toml b/diesel_migrations/Cargo.toml
index 6a1c406..481217d 100644
--- a/diesel_migrations/Cargo.toml
+++ b/diesel_migrations/Cargo.toml
@@ -7,16 +7,23 @@
 documentation = "https://docs.rs/crate/diesel_migrations"
 homepage = "https://diesel.rs"
 
+[dependencies.migrations_internals]
+version = "~1.4.0"
+path = "migrations_internals"
 
-[dependencies]
-migrations_internals = "~1.4.0"
-migrations_macros = "~1.4.0"
+[dependencies.migrations_macros]
+version = "~1.4.0"
+path = "migrations_macros"
 
 [dev-dependencies]
-diesel = { version = "1.4.0", default-features = false }
 dotenv = ">=0.8, <0.11"
 cfg-if = "0.1.0"
 
+[dev-dependencies.diesel]
+version = "~2.0.0"
+path = "../diesel"
+default-features = false
+
 [features]
 default = []
 sqlite = []
diff --git a/diesel_migrations/migrations_internals/Cargo.toml b/diesel_migrations/migrations_internals/Cargo.toml
index 384fa5b..7f6986a 100644
--- a/diesel_migrations/migrations_internals/Cargo.toml
+++ b/diesel_migrations/migrations_internals/Cargo.toml
@@ -1,15 +1,19 @@
 [package]
 name = "migrations_internals"
-version = "1.4.0"
+version = "1.4.1"
 authors = ["Sean Griffin <sean@seantheprogrammer.com>"]
 license = "MIT OR Apache-2.0"
 description = "Internal implementation of diesels migration mechanism"
 homepage = "https://diesel.rs"
 
 [dependencies]
-diesel = { version = "~1.4.0", default-features = false }
 barrel = { version = ">= 0.5.0", optional = true, features = ["diesel"] }
 
+[dependencies.diesel]
+version = "~2.0.0"
+path = "../../diesel"
+default-features = false
+
 [dev-dependencies]
 tempfile = "3.0.0"
 
diff --git a/diesel_migrations/migrations_internals/src/connection.rs b/diesel_migrations/migrations_internals/src/connection.rs
index b02a605..0452a66 100644
--- a/diesel_migrations/migrations_internals/src/connection.rs
+++ b/diesel_migrations/migrations_internals/src/connection.rs
@@ -1,9 +1,10 @@
 use diesel::deserialize::FromSql;
 use diesel::expression::bound::Bound;
+use diesel::helper_types::{max, Limit, Select};
 use diesel::insertable::ColumnInsertValue;
 use diesel::prelude::*;
-use diesel::query_builder::{InsertStatement, ValuesClause};
-use diesel::query_dsl::methods::ExecuteDsl;
+use diesel::query_builder::{InsertStatement, QueryFragment, ValuesClause};
+use diesel::query_dsl::methods::{self, ExecuteDsl, LoadQuery};
 use diesel::sql_types::VarChar;
 use std::collections::HashSet;
 use std::iter::FromIterator;
@@ -32,6 +33,9 @@
             __diesel_schema_migrations,
         >,
     >: ExecuteDsl<T>,
+    __diesel_schema_migrations: methods::SelectDsl<version>,
+    Select<__diesel_schema_migrations, version>: LoadQuery<T, String>,
+    Limit<Select<__diesel_schema_migrations, max<version>>>: QueryFragment<T::Backend>,
 {
     fn previously_run_migration_versions(&self) -> QueryResult<HashSet<String>> {
         __diesel_schema_migrations
diff --git a/diesel_migrations/migrations_internals/src/lib.rs b/diesel_migrations/migrations_internals/src/lib.rs
index d18505b..91fc43b 100644
--- a/diesel_migrations/migrations_internals/src/lib.rs
+++ b/diesel_migrations/migrations_internals/src/lib.rs
@@ -384,8 +384,9 @@
         Ok(migration_path)
     } else {
         path.parent()
-            .map(search_for_migrations_directory)
-            .unwrap_or(Err(MigrationError::MigrationDirectoryNotFound))
+            .map(|p| search_for_migrations_directory(p))
+            .unwrap_or_else(|| Err(MigrationError::MigrationDirectoryNotFound(path.into())))
+            .map_err(|_| MigrationError::MigrationDirectoryNotFound(path.into()))
     }
 }
 
@@ -403,7 +404,9 @@
         let dir = Builder::new().prefix("diesel").tempdir().unwrap();
 
         assert_eq!(
-            Err(MigrationError::MigrationDirectoryNotFound),
+            Err(MigrationError::MigrationDirectoryNotFound(
+                dir.path().into()
+            )),
             search_for_migrations_directory(dir.path())
         );
     }
diff --git a/diesel_migrations/migrations_internals/src/migration.rs b/diesel_migrations/migrations_internals/src/migration.rs
index b2b6af6..e03dbe0 100644
--- a/diesel_migrations/migrations_internals/src/migration.rs
+++ b/diesel_migrations/migrations_internals/src/migration.rs
@@ -102,7 +102,7 @@
         .unwrap_or_else(|| panic!("Can't get file name from path `{:?}`", path))
         .to_string_lossy()
         .split('_')
-        .nth(0)
+        .next()
         .map(|s| Ok(s.replace('-', "")))
         .unwrap_or_else(|| Err(MigrationError::UnknownMigrationFormat(path.to_path_buf())))
 }
diff --git a/diesel_migrations/migrations_macros/Cargo.toml b/diesel_migrations/migrations_macros/Cargo.toml
index 7406b64..d53a3c2 100644
--- a/diesel_migrations/migrations_macros/Cargo.toml
+++ b/diesel_migrations/migrations_macros/Cargo.toml
@@ -1,6 +1,6 @@
 [package]
 name = "migrations_macros"
-version = "1.4.1"
+version = "1.4.2"
 authors = ["Sean Griffin <sean@seantheprogrammer.com>"]
 license = "MIT OR Apache-2.0"
 description = "Codegeneration macros for diesels embedded migrations"
@@ -8,11 +8,14 @@
 homepage = "https://diesel.rs"
 
 [dependencies]
-migrations_internals = "~1.4.0"
 syn = { version = "1", features = ["extra-traits"] }
 quote = "1"
 proc-macro2 = "1"
 
+[dependencies.migrations_internals]
+version = "~1.4.0"
+path = "../migrations_internals"
+
 [dev-dependencies]
 tempfile = "3.0.0"
 
diff --git a/diesel_tests/Cargo.toml b/diesel_tests/Cargo.toml
index 1caaf84..03d75cf 100644
--- a/diesel_tests/Cargo.toml
+++ b/diesel_tests/Cargo.toml
@@ -9,15 +9,14 @@
 
 [build-dependencies]
 diesel = { path = "../diesel", default-features = false }
-diesel_migrations = { version = "1.4.0" }
+diesel_migrations = { path = "../diesel_migrations" }
 dotenv = ">=0.8, <0.11"
 
 [dependencies]
 assert_matches = "1.0.1"
 chrono = { version = "0.4" }
 diesel = { path = "../diesel", default-features = false, features = ["quickcheck", "chrono", "uuid", "serde_json", "network-address", "numeric", "with-deprecated"] }
-diesel_migrations = { version = "1.4.0" }
-diesel_infer_schema = { version = "1.4.0" }
+diesel_migrations = { path = "../diesel_migrations" }
 dotenv = ">=0.8, <0.11"
 quickcheck = { version = "0.4", features = ["unstable"] }
 uuid = { version = ">=0.7.0, <0.9.0" }
@@ -28,9 +27,9 @@
 [features]
 default = []
 unstable = ["diesel/unstable"]
-postgres = ["diesel/postgres", "diesel_infer_schema/postgres"]
-sqlite = ["diesel/sqlite", "diesel_infer_schema/sqlite"]
-mysql = ["diesel/mysql", "diesel_infer_schema/mysql"]
+postgres = ["diesel/postgres"]
+sqlite = ["diesel/sqlite"]
+mysql = ["diesel/mysql"]
 
 [[test]]
 name = "integration_tests"
diff --git a/diesel_tests/tests/custom_schemas.rs b/diesel_tests/tests/custom_schemas.rs
deleted file mode 100644
index 781ddfd..0000000
--- a/diesel_tests/tests/custom_schemas.rs
+++ /dev/null
@@ -1,91 +0,0 @@
-use crate::schema::connection;
-use diesel::*;
-
-mod using_infer_schema {
-    use super::*;
-    #[cfg(feature = "backend_specific_database_url")]
-    infer_schema!("dotenv:PG_DATABASE_URL", "custom_schema");
-    #[cfg(not(feature = "backend_specific_database_url"))]
-    infer_schema!("dotenv:DATABASE_URL", "custom_schema");
-    use self::custom_schema::users;
-
-    #[derive(Insertable)]
-    #[table_name = "users"]
-    struct NewUser {
-        id: i32,
-    }
-
-    #[test]
-    fn custom_schemas_are_loaded_by_infer_schema() {
-        let conn = connection();
-        insert_into(users::table)
-            .values(&NewUser { id: 1 })
-            .execute(&conn)
-            .unwrap();
-        let users = users::table.select(users::id).load(&conn);
-
-        assert_eq!(Ok(vec![1]), users);
-    }
-}
-
-mod using_infer_table_from_schema {
-    use super::*;
-    mod infer_users {
-        #[cfg(feature = "backend_specific_database_url")]
-        infer_table_from_schema!("dotenv:PG_DATABASE_URL", "custom_schema.users");
-        #[cfg(not(feature = "backend_specific_database_url"))]
-        infer_table_from_schema!("dotenv:DATABASE_URL", "custom_schema.users");
-    }
-    use self::infer_users::users;
-
-    #[derive(Insertable)]
-    #[table_name = "users"]
-    struct NewUser {
-        id: i32,
-    }
-
-    #[test]
-    fn custom_schemas_are_loaded_by_infer_table_from_schema() {
-        let conn = connection();
-        insert_into(users::table)
-            .values(&NewUser { id: 1 })
-            .execute(&conn)
-            .unwrap();
-        let users = users::table.select(users::id).load(&conn);
-
-        assert_eq!(Ok(vec![1]), users);
-    }
-}
-
-mod using_infer_table_from_schema_with_default_schema {
-    use super::*;
-    mod infer_users {
-        #[cfg(feature = "backend_specific_database_url")]
-        infer_table_from_schema!("dotenv:PG_DATABASE_URL", "users");
-        #[cfg(not(feature = "backend_specific_database_url"))]
-        infer_table_from_schema!("dotenv:DATABASE_URL", "users");
-    }
-    use self::infer_users::users;
-
-    #[derive(Insertable)]
-    #[table_name = "users"]
-    struct NewUser<'a> {
-        id: i32,
-        name: &'a str,
-    }
-
-    #[test]
-    fn custom_schemas_are_loaded_by_infer_table_from_schema() {
-        let conn = connection();
-        insert_into(users::table)
-            .values(&NewUser {
-                id: 1,
-                name: "Sean",
-            })
-            .execute(&conn)
-            .unwrap();
-        let users = users::table.select(users::id).load(&conn);
-
-        assert_eq!(Ok(vec![1]), users);
-    }
-}
diff --git a/diesel_tests/tests/errors.rs b/diesel_tests/tests/errors.rs
index 4e18baf..51a0dfc 100644
--- a/diesel_tests/tests/errors.rs
+++ b/diesel_tests/tests/errors.rs
@@ -1,5 +1,7 @@
 use crate::schema::*;
-use diesel::result::DatabaseErrorKind::{ForeignKeyViolation, UniqueViolation};
+#[cfg(not(feature = "mysql"))]
+use diesel::result::DatabaseErrorKind::CheckViolation;
+use diesel::result::DatabaseErrorKind::{ForeignKeyViolation, NotNullViolation, UniqueViolation};
 use diesel::result::Error::DatabaseError;
 use diesel::*;
 
@@ -189,3 +191,86 @@
 
     assert_matches!(result, Err(DatabaseError(ReadOnlyTransaction, _)));
 }
+
+#[test]
+fn not_null_constraints_are_detected() {
+    let connection = connection();
+
+    let failure = insert_into(users::table)
+        .values(users::columns::hair_color.eq("black"))
+        .execute(&connection);
+
+    assert_matches!(failure, Err(DatabaseError(NotNullViolation, _)));
+}
+
+#[test]
+#[cfg(feature = "postgres")]
+fn not_null_constraints_correct_column_name() {
+    let connection = connection();
+
+    let failure = insert_into(users::table)
+        .values(users::columns::hair_color.eq("black"))
+        .execute(&connection);
+
+    match failure {
+        Err(DatabaseError(NotNullViolation, e)) => {
+            assert_eq!(Some("users"), e.table_name());
+            assert_eq!(Some("name"), e.column_name());
+        }
+        _ => panic!(
+            "{:?} did not match Err(DatabaseError(NotNullViolation, e))",
+            failure
+        ),
+    };
+}
+
+#[test]
+#[cfg(not(feature = "mysql"))]
+/// MySQL < 8.0.16 doesn't enforce check constraints
+fn check_constraints_are_detected() {
+    let connection = connection();
+
+    insert_into(users::table)
+        .values(&User::new(1, "Sean"))
+        .execute(&connection)
+        .unwrap();
+
+    let failure = insert_into(pokes::table)
+        .values((
+            pokes::columns::user_id.eq(1),
+            pokes::columns::poke_count.eq(-1),
+        ))
+        .execute(&connection);
+
+    assert_matches!(failure, Err(DatabaseError(CheckViolation, _)));
+}
+
+#[test]
+#[cfg(feature = "postgres")]
+fn check_constraints_correct_constraint_name() {
+    let connection = connection();
+
+    insert_into(users::table)
+        .values(&User::new(1, "Sean"))
+        .execute(&connection)
+        .unwrap();
+
+    let failure = insert_into(pokes::table)
+        .values((
+            pokes::columns::user_id.eq(1),
+            pokes::columns::poke_count.eq(-1),
+        ))
+        .execute(&connection);
+
+    match failure {
+        Err(DatabaseError(CheckViolation, e)) => {
+            assert_eq!(Some("pokes"), e.table_name());
+            assert_eq!(None, e.column_name());
+            assert_eq!(Some("pokes_poke_count_check"), e.constraint_name());
+        }
+        _ => panic!(
+            "{:?} did not match Err(DatabaseError(CheckViolation, e))",
+            failure
+        ),
+    };
+}
diff --git a/diesel_tests/tests/expressions/ops.rs b/diesel_tests/tests/expressions/ops.rs
index b993e09..1d63137 100644
--- a/diesel_tests/tests/expressions/ops.rs
+++ b/diesel_tests/tests/expressions/ops.rs
@@ -199,3 +199,89 @@
     let x = select((2.into_sql::<Integer>() + 3) * 4).get_result::<i32>(&connection());
     assert_eq!(Ok(20), x);
 }
+
+#[test]
+#[cfg(feature = "mysql")]
+fn test_adding_unsigned() {
+    use crate::schema::unsigned_table::dsl::*;
+    let connection = connection();
+    connection
+        .execute("INSERT INTO unsigned_table VALUES (1,1), (2,2)")
+        .unwrap();
+
+    let expected_data = vec![2, 3];
+    let data = unsigned_table.select(value + 1).load(&connection);
+    assert_eq!(Ok(expected_data), data);
+
+    let expected_data = vec![2, 4];
+    let data = unsigned_table.select(value + value).load(&connection);
+    assert_eq!(Ok(expected_data), data);
+}
+
+#[test]
+#[cfg(feature = "mysql")]
+fn test_subtracting_unsigned() {
+    use crate::schema::unsigned_table::dsl::*;
+    let connection = connection();
+    connection
+        .execute("INSERT INTO unsigned_table VALUES (1,1), (2,2)")
+        .unwrap();
+
+    let expected_data = vec![0, 1];
+    let data = unsigned_table.select(value - 1).load(&connection);
+    assert_eq!(Ok(expected_data), data);
+
+    let expected_data = vec![0, 0];
+    let data = unsigned_table.select(value - value).load(&connection);
+    assert_eq!(Ok(expected_data), data);
+}
+
+#[test]
+#[cfg(feature = "mysql")]
+fn test_multiplying_unsigned() {
+    use crate::schema::unsigned_table::dsl::*;
+    let connection = connection();
+    connection
+        .execute("INSERT INTO unsigned_table VALUES (1,1), (2,2)")
+        .unwrap();
+
+    let expected_data = vec![1, 2];
+    let data = unsigned_table.select(value * 1).load(&connection);
+    assert_eq!(Ok(expected_data), data);
+
+    let expected_data = vec![1, 4];
+    let data = unsigned_table.select(value * value).load(&connection);
+    assert_eq!(Ok(expected_data), data);
+}
+
+#[test]
+#[cfg(feature = "mysql")]
+fn test_dividing_unsigned() {
+    use crate::schema::unsigned_table::dsl::*;
+    let connection = connection();
+    connection
+        .execute("INSERT INTO unsigned_table VALUES (1,1), (2,2)")
+        .unwrap();
+
+    let expected_data = vec![1, 2];
+    let data = unsigned_table.select(value / 1).load(&connection);
+    assert_eq!(Ok(expected_data), data);
+
+    let expected_data = vec![1, 1];
+    let data = unsigned_table.select(value / value).load(&connection);
+    assert_eq!(Ok(expected_data), data);
+}
+
+#[test]
+#[cfg(feature = "mysql")]
+fn test_multiple_unsigned() {
+    use crate::schema::unsigned_table::dsl::*;
+    let connection = connection();
+    connection
+        .execute("INSERT INTO unsigned_table VALUES (1,1), (2,2)")
+        .unwrap();
+
+    let expected_data = vec![1, 1];
+    let data = unsigned_table.select(value / id).load(&connection);
+    assert_eq!(Ok(expected_data), data);
+}
diff --git a/diesel_tests/tests/group_by.rs b/diesel_tests/tests/group_by.rs
index f4035ac..668fd14 100644
--- a/diesel_tests/tests/group_by.rs
+++ b/diesel_tests/tests/group_by.rs
@@ -8,9 +8,31 @@
 fn group_by_generates_group_by_sql() {
     let source = users::table
         .group_by(users::name)
-        .select(users::id)
+        .select(users::name)
         .filter(users::hair_color.is_null());
-    let mut expected_sql = "SELECT `users`.`id` FROM `users` \
+    let mut expected_sql = "SELECT `users`.`name` FROM `users` \
+                            WHERE `users`.`hair_color` IS NULL \
+                            GROUP BY `users`.`name` \
+                            -- binds: []"
+        .to_string();
+    if cfg!(feature = "postgres") {
+        expected_sql = expected_sql.replace('`', "\"");
+    }
+
+    assert_eq!(
+        expected_sql,
+        debug_query::<TestBackend, _>(&source).to_string()
+    );
+}
+
+#[test]
+fn group_by_mixed_aggregate_column_and_aggregate_function() {
+    use diesel::dsl::max;
+    let source = users::table
+        .group_by(users::name)
+        .select((max(users::id), users::name))
+        .filter(users::hair_color.is_null());
+    let mut expected_sql = "SELECT max(`users`.`id`), `users`.`name` FROM `users` \
                             WHERE `users`.`hair_color` IS NULL \
                             GROUP BY `users`.`name` \
                             -- binds: []"
@@ -33,9 +55,9 @@
     let source = users::table
         .into_boxed::<TestBackend>()
         .group_by(users::name)
-        .select(users::id)
+        .select(users::name)
         .filter(users::hair_color.is_null());
-    let mut expected_sql = "SELECT `users`.`id` FROM `users` \
+    let mut expected_sql = "SELECT `users`.`name` FROM `users` \
                             WHERE `users`.`hair_color` IS NULL \
                             GROUP BY `users`.`name` \
                             -- binds: []"
diff --git a/diesel_tests/tests/insert_from_select.rs b/diesel_tests/tests/insert_from_select.rs
index 1d8d672..fe5ac61 100644
--- a/diesel_tests/tests/insert_from_select.rs
+++ b/diesel_tests/tests/insert_from_select.rs
@@ -207,17 +207,19 @@
 }
 
 #[test]
-#[cfg(feature = "postgres")]
+#[cfg(any(feature = "postgres", feature = "sqlite"))]
 fn on_conflict_do_nothing_with_select() {
     use crate::schema::posts::dsl::*;
     use crate::schema::users::dsl::{id, name, users};
 
     let conn = connection_with_sean_and_tess_in_users_table();
-    sql_query("CREATE UNIQUE INDEX ON posts (title)")
+
+    sql_query("CREATE UNIQUE INDEX index_on_title ON posts (title)")
         .execute(&conn)
         .unwrap();
     let query = users
         .select((id, name.concat(" says hi")))
+        .filter(id.ge(0)) // Sqlite needs a where claues
         .insert_into(posts)
         .into_columns((user_id, title))
         .on_conflict_do_nothing();
@@ -233,17 +235,19 @@
 }
 
 #[test]
-#[cfg(feature = "postgres")]
+#[cfg(any(feature = "postgres", feature = "sqlite"))]
 fn on_conflict_do_update_with_select() {
     use crate::schema::posts::dsl::*;
     use crate::schema::users::dsl::{id, name, users};
 
     let conn = connection_with_sean_and_tess_in_users_table();
-    sql_query("CREATE UNIQUE INDEX ON posts (title)")
+
+    sql_query("CREATE UNIQUE INDEX index_on_title ON posts (title)")
         .execute(&conn)
         .unwrap();
     let query = users
         .select((id, name.concat(" says hi")))
+        .filter(id.ge(0)) // exists because sqlite needs a where clause
         .insert_into(posts)
         .into_columns((user_id, title))
         .on_conflict(title)
@@ -267,3 +271,51 @@
     ];
     assert_eq!(expected, data);
 }
+
+#[test]
+#[cfg(all(feature = "postgres", feature = "sqlite"))]
+fn on_conflict_do_update_with_boxed_select() {
+    use schema::posts::dsl::*;
+    use schema::users::dsl::{id, name, users};
+
+    let conn = connection_with_sean_and_tess_in_users_table();
+
+    sql_query("CREATE UNIQUE INDEX index_on_title ON posts (title)")
+        .execute(&conn)
+        .unwrap();
+
+    users
+        .select((id, name.concat(" says hi")))
+        .into_boxed()
+        .insert_into(posts)
+        .into_columns((user_id, title))
+        .on_conflict(title)
+        .do_update()
+        .set(body.eq("updated"))
+        .execute(&conn)
+        .unwrap();
+
+    insert_into(users)
+        .values(name.eq("Ruby"))
+        .execute(&conn)
+        .unwrap();
+
+    users
+        .select((id, name.concat(" says hi")))
+        .into_boxed()
+        .insert_into(posts)
+        .into_columns((user_id, title))
+        .on_conflict(title)
+        .do_update()
+        .set(body.eq("updated"))
+        .execute(&conn)
+        .unwrap();
+
+    let data = posts.select((title, body)).load(&conn).unwrap();
+    let expected = vec![
+        (String::from("Sean says hi"), Some(String::from("updated"))),
+        (String::from("Tess says hi"), Some(String::from("updated"))),
+        (String::from("Ruby says hi"), None),
+    ];
+    assert_eq!(expected, data);
+}
diff --git a/diesel_tests/tests/lib.rs b/diesel_tests/tests/lib.rs
index 177d14f..64c3712 100644
--- a/diesel_tests/tests/lib.rs
+++ b/diesel_tests/tests/lib.rs
@@ -6,9 +6,6 @@
 extern crate chrono;
 #[macro_use]
 extern crate diesel;
-#[macro_use]
-#[allow(deprecated)]
-extern crate diesel_infer_schema;
 #[cfg(feature = "sqlite")]
 #[macro_use]
 extern crate diesel_migrations;
@@ -21,8 +18,6 @@
 mod boxed_queries;
 mod connection;
 #[cfg(feature = "postgres")]
-mod custom_schemas;
-#[cfg(feature = "postgres")]
 mod custom_types;
 mod debug;
 mod delete;
@@ -38,6 +33,7 @@
 mod insert_from_select;
 mod internal_details;
 mod joins;
+mod limit_offset;
 mod macros;
 mod order;
 mod perf_details;
@@ -51,6 +47,3 @@
 mod types;
 mod types_roundtrip;
 mod update;
-
-#[cfg(rustfmt)]
-mod postgres_specific_schema;
diff --git a/diesel_tests/tests/limit_offset.rs b/diesel_tests/tests/limit_offset.rs
new file mode 100644
index 0000000..92a963c
--- /dev/null
+++ b/diesel_tests/tests/limit_offset.rs
@@ -0,0 +1,160 @@
+use super::schema::*;
+use diesel::*;
+
+#[test]
+fn limit() {
+    use crate::schema::users::dsl::*;
+
+    let connection = connection();
+    connection
+        .execute("INSERT INTO users (name) VALUES ('Sean'), ('Tess')")
+        .unwrap();
+
+    let expected_data = vec![("Sean".to_string(), None::<String>)];
+    let actual_data: Vec<_> = users
+        .select((name, hair_color))
+        .limit(1)
+        .load(&connection)
+        .unwrap();
+    assert_eq!(expected_data, actual_data);
+}
+
+#[cfg(any(feature = "sqlite", feature = "postgres"))]
+#[test]
+fn offset() {
+    use crate::schema::users::dsl::*;
+
+    let connection = connection();
+    connection
+        .execute("INSERT INTO users (name) VALUES ('Sean'), ('Tess')")
+        .unwrap();
+
+    let expected_data = vec![("Tess".to_string(), None::<String>)];
+    let q = users.select((name, hair_color)).offset(1);
+    let actual_data: Vec<_> = q.load(&connection).unwrap();
+    assert_eq!(expected_data, actual_data);
+}
+
+#[test]
+fn limit_offset() {
+    use crate::schema::users::dsl::*;
+
+    let connection = connection();
+    connection
+        .execute("INSERT INTO users (name) VALUES ('Sean'), ('Tess'), ('Ruby')")
+        .unwrap();
+
+    let expected_data = vec![("Ruby".to_string(), None::<String>)];
+    let q = users.select((name, hair_color)).limit(1).offset(2);
+    let actual_data: Vec<_> = q.load(&connection).unwrap();
+    assert_eq!(expected_data, actual_data);
+}
+
+#[test]
+fn boxed_limit() {
+    use crate::schema::users::dsl::*;
+
+    let connection = connection();
+    connection
+        .execute("INSERT INTO users (name) VALUES ('Sean'), ('Tess')")
+        .unwrap();
+
+    let expected_data = vec![("Sean".to_string(), None::<String>)];
+    let actual_data: Vec<_> = users
+        .into_boxed()
+        .select((name, hair_color))
+        .limit(1)
+        .load(&connection)
+        .unwrap();
+    assert_eq!(expected_data, actual_data);
+
+    let actual_data: Vec<_> = users
+        .select((name, hair_color))
+        .limit(1)
+        .into_boxed()
+        .load(&connection)
+        .unwrap();
+    assert_eq!(expected_data, actual_data);
+}
+
+#[test]
+fn boxed_offset() {
+    use crate::schema::users::dsl::*;
+
+    let connection = connection();
+    connection
+        .execute("INSERT INTO users (name) VALUES ('Sean'), ('Tess')")
+        .unwrap();
+
+    let expected_data = vec![("Tess".to_string(), None::<String>)];
+
+    let actual_data: Vec<_> = users
+        .select((name, hair_color))
+        .into_boxed()
+        .offset(1)
+        .load(&connection)
+        .unwrap();
+    assert_eq!(expected_data, actual_data);
+
+    #[cfg(any(feature = "postgres", feature = "sqlite"))]
+    {
+        let actual_data: Vec<_> = users
+            .select((name, hair_color))
+            .offset(1)
+            .into_boxed()
+            .load(&connection)
+            .unwrap();
+        assert_eq!(expected_data, actual_data);
+    }
+}
+
+#[test]
+fn boxed_limit_offset() {
+    use crate::schema::users::dsl::*;
+
+    let connection = connection();
+    connection
+        .execute("INSERT INTO users (name) VALUES ('Sean'), ('Tess'), ('Ruby')")
+        .unwrap();
+
+    let expected_data = vec![("Ruby".to_string(), None::<String>)];
+
+    let actual_data: Vec<_> = users
+        .into_boxed()
+        .select((name, hair_color))
+        .limit(1)
+        .offset(2)
+        .load(&connection)
+        .unwrap();
+    assert_eq!(expected_data, actual_data);
+
+    let actual_data: Vec<_> = users
+        .select((name, hair_color))
+        .limit(1)
+        .offset(2)
+        .into_boxed()
+        .load(&connection)
+        .unwrap();
+    assert_eq!(expected_data, actual_data);
+
+    let actual_data: Vec<_> = users
+        .select((name, hair_color))
+        .limit(1)
+        .into_boxed()
+        .offset(2)
+        .load(&connection)
+        .unwrap();
+    assert_eq!(expected_data, actual_data);
+
+    #[cfg(any(feature = "postgres", feature = "sqlite"))]
+    {
+        let actual_data: Vec<_> = users
+            .select((name, hair_color))
+            .offset(2)
+            .into_boxed()
+            .limit(1)
+            .load(&connection)
+            .unwrap();
+        assert_eq!(expected_data, actual_data);
+    }
+}
diff --git a/diesel_tests/tests/backend_specifics.rs b/diesel_tests/tests/schema/backend_specifics.rs
similarity index 100%
rename from diesel_tests/tests/backend_specifics.rs
rename to diesel_tests/tests/schema/backend_specifics.rs
diff --git a/diesel_tests/tests/schema/custom_schemas.rs b/diesel_tests/tests/schema/custom_schemas.rs
new file mode 100644
index 0000000..a9ee3fe
--- /dev/null
+++ b/diesel_tests/tests/schema/custom_schemas.rs
@@ -0,0 +1,23 @@
+use crate::schema::connection;
+use diesel::*;
+
+include!("pg_custom_schema.rs");
+use self::custom_schema::users;
+
+#[derive(Insertable)]
+#[table_name = "users"]
+struct NewUser {
+    id: i32,
+}
+
+#[test]
+fn custom_schemas_are_loaded_by_infer_schema() {
+    let conn = connection();
+    insert_into(users::table)
+        .values(&NewUser { id: 1 })
+        .execute(&conn)
+        .unwrap();
+    let users = users::table.select(users::id).load(&conn);
+
+    assert_eq!(Ok(vec![1]), users);
+}
diff --git a/diesel_tests/tests/schema.rs b/diesel_tests/tests/schema/mod.rs
similarity index 92%
rename from diesel_tests/tests/schema.rs
rename to diesel_tests/tests/schema/mod.rs
index 3af2644..f93bbe2 100644
--- a/diesel_tests/tests/schema.rs
+++ b/diesel_tests/tests/schema/mod.rs
@@ -1,18 +1,14 @@
 use diesel::*;
 
-#[cfg(all(feature = "postgres", feature = "backend_specific_database_url"))]
-infer_schema!("dotenv:PG_DATABASE_URL");
-#[cfg(all(feature = "sqlite", feature = "backend_specific_database_url"))]
-infer_schema!("dotenv:SQLITE_DATABASE_URL");
-#[cfg(all(feature = "mysql", feature = "backend_specific_database_url"))]
-infer_schema!("dotenv:MYSQL_DATABASE_URL");
-#[cfg(not(feature = "backend_specific_database_url"))]
-infer_schema!("dotenv:DATABASE_URL");
+#[cfg(feature = "postgres")]
+mod custom_schemas;
 
+#[cfg(feature = "postgres")]
+include!("pg_schema.rs");
 #[cfg(feature = "sqlite")]
-mod test_infer_schema_works_on_empty_database {
-    infer_schema!(":memory:");
-}
+include!("sqlite_schema.rs");
+#[cfg(feature = "mysql")]
+include!("mysql_schema.rs");
 
 #[derive(
     PartialEq, Eq, Debug, Clone, Queryable, Identifiable, Insertable, AsChangeset, QueryableByName,
diff --git a/diesel_tests/tests/schema/mysql_schema.rs b/diesel_tests/tests/schema/mysql_schema.rs
new file mode 100644
index 0000000..60a853c
--- /dev/null
+++ b/diesel_tests/tests/schema/mysql_schema.rs
@@ -0,0 +1,230 @@
+table! {
+    all_the_blobs (id) {
+        id -> Integer,
+        tiny -> Tinyblob,
+        normal -> Blob,
+        medium -> Mediumblob,
+        big -> Longblob,
+    }
+}
+
+table! {
+    comments (id) {
+        id -> Integer,
+        post_id -> Integer,
+        text -> Text,
+    }
+}
+
+table! {
+    composite_fk (id) {
+        id -> Integer,
+        post_id -> Integer,
+        user_id -> Integer,
+    }
+}
+
+table! {
+    cyclic_fk_1 (id) {
+        id -> Integer,
+        cyclic_fk_2_id -> Nullable<Integer>,
+    }
+}
+
+table! {
+    cyclic_fk_2 (id) {
+        id -> Integer,
+        cyclic_fk_1_id -> Nullable<Integer>,
+    }
+}
+
+table! {
+    fk_doesnt_reference_pk (id) {
+        id -> Integer,
+        random -> Nullable<Text>,
+    }
+}
+
+table! {
+    fk_inits (id) {
+        id -> Integer,
+    }
+}
+
+table! {
+    fk_tests (id) {
+        id -> Integer,
+        fk_id -> Integer,
+    }
+}
+
+table! {
+    followings (user_id, post_id) {
+        user_id -> Integer,
+        post_id -> Integer,
+        email_notifications -> Bool,
+    }
+}
+
+table! {
+    likes (comment_id, user_id) {
+        comment_id -> Integer,
+        user_id -> Integer,
+    }
+}
+
+table! {
+    multiple_fks_to_same_table (id) {
+        id -> Integer,
+        post_id_1 -> Nullable<Integer>,
+        post_id_2 -> Nullable<Integer>,
+    }
+}
+
+table! {
+    nullable_doubles (id) {
+        id -> Integer,
+        n -> Nullable<Double>,
+    }
+}
+
+table! {
+    nullable_table (id) {
+        id -> Integer,
+        value -> Nullable<Integer>,
+    }
+}
+
+table! {
+    numbers (n) {
+        n -> Integer,
+    }
+}
+
+table! {
+    points (x, y) {
+        x -> Integer,
+        y -> Integer,
+    }
+}
+
+table! {
+    pokes (user_id) {
+        user_id -> Integer,
+        poke_count -> Integer,
+    }
+}
+
+table! {
+    posts (id) {
+        id -> Integer,
+        user_id -> Integer,
+        title -> Text,
+        body -> Nullable<Text>,
+    }
+}
+
+table! {
+    precision_numbers (n) {
+        n -> Double,
+    }
+}
+
+table! {
+    self_referential_fk (id) {
+        id -> Integer,
+        parent_id -> Integer,
+    }
+}
+
+table! {
+    special_comments (id) {
+        id -> Integer,
+        special_post_id -> Integer,
+    }
+}
+
+table! {
+    special_posts (id) {
+        id -> Integer,
+        user_id -> Integer,
+        title -> Text,
+    }
+}
+
+table! {
+    trees (id) {
+        id -> Integer,
+        parent_id -> Nullable<Integer>,
+    }
+}
+
+table! {
+    unsigned_table (id) {
+        id -> Unsigned<Integer>,
+        value -> Unsigned<Integer>,
+    }
+}
+
+table! {
+    users (id) {
+        id -> Integer,
+        name -> Text,
+        hair_color -> Nullable<Text>,
+    }
+}
+
+table! {
+    users_with_name_pk (name) {
+        name -> Varchar,
+    }
+}
+
+table! {
+    with_keywords (fn_) {
+        #[sql_name = "fn"]
+        fn_ -> Integer,
+        #[sql_name = "let"]
+        let_ -> Integer,
+        #[sql_name = "extern"]
+        extern_ -> Integer,
+    }
+}
+
+joinable!(comments -> posts (post_id));
+joinable!(cyclic_fk_1 -> cyclic_fk_2 (cyclic_fk_2_id));
+joinable!(fk_tests -> fk_inits (fk_id));
+joinable!(followings -> posts (post_id));
+joinable!(followings -> users (user_id));
+joinable!(likes -> comments (comment_id));
+joinable!(likes -> users (user_id));
+joinable!(posts -> users (user_id));
+
+allow_tables_to_appear_in_same_query!(
+    all_the_blobs,
+    comments,
+    composite_fk,
+    cyclic_fk_1,
+    cyclic_fk_2,
+    fk_doesnt_reference_pk,
+    fk_inits,
+    fk_tests,
+    followings,
+    likes,
+    multiple_fks_to_same_table,
+    nullable_doubles,
+    nullable_table,
+    numbers,
+    points,
+    pokes,
+    posts,
+    precision_numbers,
+    self_referential_fk,
+    special_comments,
+    special_posts,
+    trees,
+    unsigned_table,
+    users,
+    users_with_name_pk,
+    with_keywords,
+);
diff --git a/diesel_tests/tests/schema/pg_custom_schema.rs b/diesel_tests/tests/schema/pg_custom_schema.rs
new file mode 100644
index 0000000..6731895
--- /dev/null
+++ b/diesel_tests/tests/schema/pg_custom_schema.rs
@@ -0,0 +1,7 @@
+pub mod custom_schema {
+    table! {
+        custom_schema.users (id) {
+            id -> Int4,
+        }
+    }
+}
diff --git a/diesel_tests/tests/schema/pg_schema.rs b/diesel_tests/tests/schema/pg_schema.rs
new file mode 100644
index 0000000..c838b56
--- /dev/null
+++ b/diesel_tests/tests/schema/pg_schema.rs
@@ -0,0 +1,224 @@
+table! {
+    all_the_ranges (int4) {
+        int4 -> Int4range,
+        int8 -> Int8range,
+        num -> Numrange,
+        ts -> Tsrange,
+        tstz -> Tstzrange,
+        date -> Daterange,
+    }
+}
+
+table! {
+    comments (id) {
+        id -> Int4,
+        post_id -> Int4,
+        text -> Text,
+    }
+}
+
+table! {
+    composite_fk (id) {
+        id -> Int4,
+        post_id -> Int4,
+        user_id -> Int4,
+    }
+}
+
+table! {
+    cyclic_fk_1 (id) {
+        id -> Int4,
+        cyclic_fk_2_id -> Nullable<Int4>,
+    }
+}
+
+table! {
+    cyclic_fk_2 (id) {
+        id -> Int4,
+        cyclic_fk_1_id -> Nullable<Int4>,
+    }
+}
+
+table! {
+    fk_doesnt_reference_pk (id) {
+        id -> Int4,
+        random -> Nullable<Text>,
+    }
+}
+
+table! {
+    fk_inits (id) {
+        id -> Int4,
+    }
+}
+
+table! {
+    fk_tests (id) {
+        id -> Int4,
+        fk_id -> Int4,
+    }
+}
+
+table! {
+    followings (user_id, post_id) {
+        user_id -> Int4,
+        post_id -> Int4,
+        email_notifications -> Bool,
+    }
+}
+
+table! {
+    likes (comment_id, user_id) {
+        comment_id -> Int4,
+        user_id -> Int4,
+    }
+}
+
+table! {
+    multiple_fks_to_same_table (id) {
+        id -> Int4,
+        post_id_1 -> Nullable<Int4>,
+        post_id_2 -> Nullable<Int4>,
+    }
+}
+
+table! {
+    nullable_doubles (id) {
+        id -> Int4,
+        n -> Nullable<Float8>,
+    }
+}
+
+table! {
+    nullable_table (id) {
+        id -> Int4,
+        value -> Nullable<Int4>,
+    }
+}
+
+table! {
+    numbers (n) {
+        n -> Int4,
+    }
+}
+
+table! {
+    points (x, y) {
+        x -> Int4,
+        y -> Int4,
+    }
+}
+
+table! {
+    pokes (user_id) {
+        user_id -> Int4,
+        poke_count -> Int4,
+    }
+}
+
+table! {
+    posts (id) {
+        id -> Int4,
+        user_id -> Int4,
+        title -> Varchar,
+        body -> Nullable<Text>,
+        tags -> Array<Text>,
+    }
+}
+
+table! {
+    precision_numbers (n) {
+        n -> Float8,
+    }
+}
+
+table! {
+    self_referential_fk (id) {
+        id -> Int4,
+        parent_id -> Int4,
+    }
+}
+
+table! {
+    special_comments (id) {
+        id -> Int4,
+        special_post_id -> Int4,
+    }
+}
+
+table! {
+    special_posts (id) {
+        id -> Int4,
+        user_id -> Int4,
+        title -> Varchar,
+    }
+}
+
+table! {
+    trees (id) {
+        id -> Int4,
+        parent_id -> Nullable<Int4>,
+    }
+}
+
+table! {
+    users (id) {
+        id -> Int4,
+        name -> Varchar,
+        hair_color -> Nullable<Varchar>,
+    }
+}
+
+table! {
+    users_with_name_pk (name) {
+        name -> Varchar,
+    }
+}
+
+table! {
+    with_keywords (fn_) {
+        #[sql_name = "fn"]
+        fn_ -> Int4,
+        #[sql_name = "let"]
+        let_ -> Int4,
+        #[sql_name = "extern"]
+        extern_ -> Int4,
+    }
+}
+
+joinable!(comments -> posts (post_id));
+joinable!(fk_tests -> fk_inits (fk_id));
+joinable!(followings -> posts (post_id));
+joinable!(followings -> users (user_id));
+joinable!(likes -> comments (comment_id));
+joinable!(likes -> users (user_id));
+joinable!(pokes -> users (user_id));
+joinable!(posts -> users (user_id));
+
+allow_tables_to_appear_in_same_query!(
+    all_the_ranges,
+    comments,
+    composite_fk,
+    cyclic_fk_1,
+    cyclic_fk_2,
+    fk_doesnt_reference_pk,
+    fk_inits,
+    fk_tests,
+    followings,
+    likes,
+    multiple_fks_to_same_table,
+    nullable_doubles,
+    nullable_table,
+    numbers,
+    points,
+    pokes,
+    posts,
+    precision_numbers,
+    self_referential_fk,
+    special_comments,
+    special_posts,
+    trees,
+    users,
+    users_with_name_pk,
+    with_keywords,
+);
diff --git a/diesel_tests/tests/postgres_specific_schema.rs b/diesel_tests/tests/schema/postgres_specific_schema.rs
similarity index 100%
rename from diesel_tests/tests/postgres_specific_schema.rs
rename to diesel_tests/tests/schema/postgres_specific_schema.rs
diff --git a/diesel_tests/tests/schema/sqlite_schema.rs b/diesel_tests/tests/schema/sqlite_schema.rs
new file mode 100644
index 0000000..99f88f5
--- /dev/null
+++ b/diesel_tests/tests/schema/sqlite_schema.rs
@@ -0,0 +1,278 @@
+table! {
+    comments (id) {
+        id -> Integer,
+        post_id -> Integer,
+        text -> Text,
+    }
+}
+
+table! {
+    composite_fk (id) {
+        id -> Nullable<Integer>,
+        post_id -> Integer,
+        user_id -> Integer,
+    }
+}
+
+table! {
+    cyclic_fk_1 (id) {
+        id -> Nullable<Integer>,
+        cyclic_fk_2_id -> Nullable<Binary>,
+    }
+}
+
+table! {
+    cyclic_fk_2 (id) {
+        id -> Nullable<Integer>,
+        cyclic_fk_1_id -> Nullable<Binary>,
+    }
+}
+
+table! {
+    fk_doesnt_reference_pk (id) {
+        id -> Nullable<Integer>,
+        random -> Nullable<Text>,
+    }
+}
+
+table! {
+    fk_inits (id) {
+        id -> Nullable<Integer>,
+    }
+}
+
+table! {
+    fk_tests (id) {
+        id -> Nullable<Integer>,
+        fk_id -> Integer,
+    }
+}
+
+table! {
+    followings (user_id, post_id) {
+        user_id -> Integer,
+        post_id -> Integer,
+        email_notifications -> Bool,
+    }
+}
+
+table! {
+    infer_all_the_bools (col1) {
+        col1 -> Bool,
+        col2 -> Bool,
+        col3 -> Bool,
+        col4 -> Bool,
+    }
+}
+
+table! {
+    infer_all_the_datetime_types (dt) {
+        dt -> Timestamp,
+        date -> Date,
+        time -> Time,
+        timestamp -> Timestamp,
+    }
+}
+
+table! {
+    infer_all_the_floats (col1) {
+        col1 -> Float,
+        col2 -> Float,
+        col3 -> Double,
+        col4 -> Double,
+        col5 -> Double,
+        col6 -> Double,
+    }
+}
+
+table! {
+    infer_all_the_ints (col1) {
+        col1 -> Integer,
+        col2 -> Integer,
+        col3 -> Integer,
+        col4 -> Integer,
+        col5 -> SmallInt,
+        col6 -> SmallInt,
+        col7 -> SmallInt,
+        col8 -> BigInt,
+        col9 -> BigInt,
+        col10 -> BigInt,
+        col11 -> SmallInt,
+        col12 -> Integer,
+        col13 -> BigInt,
+    }
+}
+
+table! {
+    infer_all_the_strings (col1) {
+        col1 -> Text,
+        col2 -> Text,
+        col3 -> Text,
+        col4 -> Text,
+        col5 -> Text,
+        col6 -> Text,
+        col7 -> Text,
+        col8 -> Text,
+        col9 -> Binary,
+        col10 -> Binary,
+    }
+}
+
+table! {
+    likes (comment_id, user_id) {
+        comment_id -> Integer,
+        user_id -> Integer,
+    }
+}
+
+table! {
+    multiple_fks_to_same_table (id) {
+        id -> Nullable<Integer>,
+        post_id_1 -> Nullable<Binary>,
+        post_id_2 -> Nullable<Binary>,
+    }
+}
+
+table! {
+    nullable_doubles (id) {
+        id -> Nullable<Integer>,
+        n -> Nullable<Double>,
+    }
+}
+
+table! {
+    nullable_table (id) {
+        id -> Integer,
+        value -> Nullable<Integer>,
+    }
+}
+
+table! {
+    numbers (n) {
+        n -> Nullable<Integer>,
+    }
+}
+
+table! {
+    points (x, y) {
+        x -> Integer,
+        y -> Integer,
+    }
+}
+
+table! {
+    pokes (user_id) {
+        user_id -> Integer,
+        poke_count -> Integer,
+    }
+}
+
+table! {
+    posts (id) {
+        id -> Integer,
+        user_id -> Integer,
+        title -> Text,
+        body -> Nullable<Text>,
+    }
+}
+
+table! {
+    precision_numbers (n) {
+        n -> Double,
+    }
+}
+
+table! {
+    self_referential_fk (id) {
+        id -> Nullable<Integer>,
+        parent_id -> Integer,
+    }
+}
+
+table! {
+    special_comments (id) {
+        id -> Nullable<Integer>,
+        special_post_id -> Integer,
+    }
+}
+
+table! {
+    special_posts (id) {
+        id -> Nullable<Integer>,
+        user_id -> Integer,
+        title -> Text,
+    }
+}
+
+table! {
+    trees (id) {
+        id -> Integer,
+        parent_id -> Nullable<Integer>,
+    }
+}
+
+table! {
+    users (id) {
+        id -> Integer,
+        name -> Text,
+        hair_color -> Nullable<Text>,
+    }
+}
+
+table! {
+    users_with_name_pk (name) {
+        name -> Nullable<Text>,
+    }
+}
+
+table! {
+    with_keywords (fn_) {
+        #[sql_name = "fn"]
+        fn_ -> Integer,
+        #[sql_name = "let"]
+        let_ -> Integer,
+        #[sql_name = "extern"]
+        extern_ -> Integer,
+    }
+}
+
+joinable!(comments -> posts (post_id));
+joinable!(fk_tests -> fk_inits (fk_id));
+joinable!(followings -> posts (post_id));
+joinable!(followings -> users (user_id));
+joinable!(likes -> comments (comment_id));
+joinable!(likes -> users (user_id));
+joinable!(pokes -> users (user_id));
+joinable!(posts -> users (user_id));
+
+allow_tables_to_appear_in_same_query!(
+    comments,
+    composite_fk,
+    cyclic_fk_1,
+    cyclic_fk_2,
+    fk_doesnt_reference_pk,
+    fk_inits,
+    fk_tests,
+    followings,
+    infer_all_the_bools,
+    infer_all_the_datetime_types,
+    infer_all_the_floats,
+    infer_all_the_ints,
+    infer_all_the_strings,
+    likes,
+    multiple_fks_to_same_table,
+    nullable_doubles,
+    nullable_table,
+    numbers,
+    points,
+    pokes,
+    posts,
+    precision_numbers,
+    self_referential_fk,
+    special_comments,
+    special_posts,
+    trees,
+    users,
+    users_with_name_pk,
+    with_keywords,
+);
diff --git a/diesel_tests/tests/select.rs b/diesel_tests/tests/select.rs
index 809ba33..65c49ce 100644
--- a/diesel_tests/tests/select.rs
+++ b/diesel_tests/tests/select.rs
@@ -497,3 +497,18 @@
 
     assert_eq!(Ok(vec![tess]), users_with_post_using_name_as_title);
 }
+
+#[test]
+fn selecting_multiple_aggregate_expressions_without_group_by() {
+    use self::users::dsl::*;
+    use diesel::dsl::{count_star, max};
+
+    let connection = connection_with_sean_and_tess_in_users_table();
+    let (count, max_name) = users
+        .select((count_star(), max(name)))
+        .get_result::<(i64, _)>(&connection)
+        .unwrap();
+
+    assert_eq!(2, count);
+    assert_eq!(Some(String::from("Tess")), max_name);
+}
diff --git a/diesel_tests/tests/types.rs b/diesel_tests/tests/types.rs
index 8358aa3..9b4303f 100644
--- a/diesel_tests/tests/types.rs
+++ b/diesel_tests/tests/types.rs
@@ -1229,14 +1229,15 @@
     select(sql::<T>(sql_str)).first(&connection).unwrap()
 }
 
-use diesel::expression::AsExpression;
+use diesel::expression::{is_aggregate, AsExpression, ValidGrouping};
 use diesel::query_builder::{QueryFragment, QueryId};
 use std::fmt::Debug;
 
 fn query_to_sql_equality<T, U>(sql_str: &str, value: U) -> bool
 where
     U: AsExpression<T> + Debug + Clone,
-    U::Expression: SelectableExpression<(), SqlType = T>,
+    U::Expression: SelectableExpression<(), SqlType = T>
+        + ValidGrouping<(), IsAggregate = is_aggregate::Never>,
     U::Expression: QueryFragment<TestBackend> + QueryId,
     T: QueryId + SingleValue,
 {
diff --git a/diesel_tests/tests/types_roundtrip.rs b/diesel_tests/tests/types_roundtrip.rs
index febd36d..99e4670 100644
--- a/diesel_tests/tests/types_roundtrip.rs
+++ b/diesel_tests/tests/types_roundtrip.rs
@@ -12,7 +12,7 @@
 pub use diesel::sql_types::HasSqlType;
 pub use diesel::*;
 
-use diesel::expression::AsExpression;
+use diesel::expression::{AsExpression, NonAggregate};
 use diesel::query_builder::{QueryFragment, QueryId};
 #[cfg(feature = "postgres")]
 use std::collections::Bound;
@@ -31,6 +31,7 @@
         + Clone
         + ::std::fmt::Debug,
     <T as AsExpression<ST>>::Expression: SelectableExpression<(), SqlType = ST>
+        + NonAggregate
         + QueryFragment<<TestConnection as Connection>::Backend>
         + QueryId,
 {
diff --git a/diesel_tests/tests/update.rs b/diesel_tests/tests/update.rs
index a8d1b74..6ae654c 100644
--- a/diesel_tests/tests/update.rs
+++ b/diesel_tests/tests/update.rs
@@ -226,7 +226,7 @@
 }
 
 #[test]
-#[cfg(feature = "postgres")]
+#[cfg(any(feature = "postgres", feature = "sqlite"))]
 fn upsert_with_no_changes_executes_do_nothing() {
     #[derive(AsChangeset)]
     #[table_name = "users"]
@@ -246,7 +246,7 @@
 }
 
 #[test]
-#[cfg(feature = "postgres")]
+#[cfg(any(feature = "postgres", feature = "sqlite"))]
 fn upsert_with_no_changes_executes_do_nothing_owned() {
     #[derive(AsChangeset)]
     #[table_name = "users"]
@@ -270,8 +270,8 @@
 fn upsert_with_sql_literal_for_target() {
     use crate::schema::users::dsl::*;
     use diesel::dsl::sql;
-    use diesel::pg::upsert::*;
     use diesel::sql_types::Text;
+    use diesel::upsert::*;
 
     let connection = connection();
     // This index needs to happen before the insert or we'll get a deadlock
diff --git a/docker-compose.yml b/docker-compose.yml
index 80bd7b1..8142748 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -13,6 +13,8 @@
   postgres:
     image: postgres
     container_name: diesel.postgres
+    environment:
+      POSTGRES_PASSWORD: "postgres"
     volumes:
       - "postgres-data:/var/lib/postgres/:delegated"
       - "./docker/postgres/init:/docker-entrypoint-initdb.d"
diff --git a/examples/mysql/all_about_inserts/Cargo.toml b/examples/mysql/all_about_inserts/Cargo.toml
index 720654a..78157b1 100644
--- a/examples/mysql/all_about_inserts/Cargo.toml
+++ b/examples/mysql/all_about_inserts/Cargo.toml
@@ -4,7 +4,7 @@
 authors = ["Sean Griffin <sean@seantheprogrammer.com>"]
 
 [dependencies]
-diesel = { version = "1.4.0", features = ["mysql", "chrono"] }
+diesel = { version = "2.0.0", path = "../../../diesel", features = ["mysql", "chrono"] }
 serde = "1.0"
 serde_derive = "1.0"
 serde_json = "1.0"
diff --git a/examples/mysql/getting_started_step_1/Cargo.toml b/examples/mysql/getting_started_step_1/Cargo.toml
index b1ac30b..bfa2e14 100644
--- a/examples/mysql/getting_started_step_1/Cargo.toml
+++ b/examples/mysql/getting_started_step_1/Cargo.toml
@@ -4,5 +4,5 @@
 authors = ["Sean Griffin <sean@seantheprogrammer.com>"]
 
 [dependencies]
-diesel = { version = "1.4.0", features = ["mysql"] }
+diesel = { version = "2.0.0", path = "../../../diesel", features = ["mysql"] }
 dotenv = "0.10"
diff --git a/examples/mysql/getting_started_step_2/Cargo.toml b/examples/mysql/getting_started_step_2/Cargo.toml
index ecb26e0..26f7e54 100644
--- a/examples/mysql/getting_started_step_2/Cargo.toml
+++ b/examples/mysql/getting_started_step_2/Cargo.toml
@@ -4,5 +4,5 @@
 authors = ["Sean Griffin <sean@seantheprogrammer.com>"]
 
 [dependencies]
-diesel = { version = "1.4.0", features = ["mysql"] }
+diesel = { version = "2.0.0", path = "../../../diesel", features = ["mysql"] }
 dotenv = "0.10"
diff --git a/examples/mysql/getting_started_step_3/Cargo.toml b/examples/mysql/getting_started_step_3/Cargo.toml
index aeab29f..960a787 100644
--- a/examples/mysql/getting_started_step_3/Cargo.toml
+++ b/examples/mysql/getting_started_step_3/Cargo.toml
@@ -4,5 +4,5 @@
 authors = ["Sean Griffin <sean@seantheprogrammer.com>"]
 
 [dependencies]
-diesel = { version = "1.4.0", features = ["mysql"] }
+diesel = { version = "2.0.0", path = "../../../diesel", features = ["mysql"] }
 dotenv = "0.10"
diff --git a/examples/mysql/test_all b/examples/mysql/test_all
index 43a3736..772e312 100755
--- a/examples/mysql/test_all
+++ b/examples/mysql/test_all
@@ -4,6 +4,9 @@
 set -a
 if [ -f ../../.env ]; then . ../../.env; fi
 DATABASE_URL=${MYSQL_EXAMPLE_DATABASE_URL}
+if [ -z "$BACKEND" ]; then
+    export BACKEND="mysql"
+fi
 set +a
 
 for dir in $(find . -maxdepth 1 -mindepth 1 -type d); do
diff --git a/examples/postgres/advanced-blog-cli/Cargo.toml b/examples/postgres/advanced-blog-cli/Cargo.toml
index 9f0b7ff..c1e57f9 100644
--- a/examples/postgres/advanced-blog-cli/Cargo.toml
+++ b/examples/postgres/advanced-blog-cli/Cargo.toml
@@ -6,7 +6,7 @@
 [dependencies]
 bcrypt = "0.1.0"
 chrono = "0.4.0"
-diesel = { version = "1.4.0", features = ["postgres", "chrono"] }
+diesel = { version = "2.0.0", path = "../../../diesel", features = ["postgres", "chrono"] }
 dotenv = "0.10.0"
 structopt = "0.1.6"
 structopt-derive = "0.1.6"
@@ -14,5 +14,5 @@
 
 [dev-dependencies]
 assert_matches = "1.1"
-diesel_migrations = { version = "1.4.0", features = ["postgres"] }
+diesel_migrations = { version = "1.4.0", features = ["postgres"], path = "../../../diesel_migrations" }
 lazy_static = "1.0"
diff --git a/examples/postgres/all_about_inserts/Cargo.toml b/examples/postgres/all_about_inserts/Cargo.toml
index 75327d4..ec0ddd7 100644
--- a/examples/postgres/all_about_inserts/Cargo.toml
+++ b/examples/postgres/all_about_inserts/Cargo.toml
@@ -4,7 +4,7 @@
 authors = ["Sean Griffin <sean@seantheprogrammer.com>"]
 
 [dependencies]
-diesel = { version = "1.4.0", features = ["postgres"] }
+diesel = { version = "2.0.0", path = "../../../diesel", features = ["postgres"] }
 serde = "1.0"
 serde_derive = "1.0"
 serde_json = "1.0"
diff --git a/examples/postgres/all_about_updates/Cargo.toml b/examples/postgres/all_about_updates/Cargo.toml
index 2e9d246..9abffd2 100644
--- a/examples/postgres/all_about_updates/Cargo.toml
+++ b/examples/postgres/all_about_updates/Cargo.toml
@@ -4,4 +4,4 @@
 authors = ["Sean Griffin <sean@seantheprogrammer.com>"]
 
 [dependencies]
-diesel = { version = "1.4.0", features = ["postgres"] }
+diesel = { version = "2.0.0", path = "../../../diesel", features = ["postgres"] }
diff --git a/examples/postgres/getting_started_step_1/Cargo.toml b/examples/postgres/getting_started_step_1/Cargo.toml
index c02bc5a..e6d6031 100644
--- a/examples/postgres/getting_started_step_1/Cargo.toml
+++ b/examples/postgres/getting_started_step_1/Cargo.toml
@@ -4,5 +4,5 @@
 authors = ["Sean Griffin <sean@seantheprogrammer.com>"]
 
 [dependencies]
-diesel = { version = "1.4.0", features = ["postgres"] }
+diesel = { version = "2.0.0", path = "../../../diesel", features = ["postgres"] }
 dotenv = "0.10"
diff --git a/examples/postgres/getting_started_step_2/Cargo.toml b/examples/postgres/getting_started_step_2/Cargo.toml
index 35cfb33..c1e740b 100644
--- a/examples/postgres/getting_started_step_2/Cargo.toml
+++ b/examples/postgres/getting_started_step_2/Cargo.toml
@@ -4,5 +4,5 @@
 authors = ["Sean Griffin <sean@seantheprogrammer.com>"]
 
 [dependencies]
-diesel = { version = "1.4.0", features = ["postgres"] }
+diesel = { version = "2.0.0", path = "../../../diesel", features = ["postgres"] }
 dotenv = "0.10"
diff --git a/examples/postgres/getting_started_step_3/Cargo.toml b/examples/postgres/getting_started_step_3/Cargo.toml
index 06bc58d..90c6103 100644
--- a/examples/postgres/getting_started_step_3/Cargo.toml
+++ b/examples/postgres/getting_started_step_3/Cargo.toml
@@ -4,5 +4,5 @@
 authors = ["Sean Griffin <sean@seantheprogrammer.com>"]
 
 [dependencies]
-diesel = { version = "1.4.0", features = ["postgres"] }
+diesel = { version = "2.0.0", path = "../../../diesel", features = ["postgres"] }
 dotenv = "0.10"
diff --git a/examples/postgres/test_all b/examples/postgres/test_all
index a52c559..94e4dab 100755
--- a/examples/postgres/test_all
+++ b/examples/postgres/test_all
@@ -4,6 +4,9 @@
 set -a
 if [ -f ../../.env ]; then . ../../.env; fi
 DATABASE_URL=${PG_EXAMPLE_DATABASE_URL}
+if [ -z "$BACKEND" ]; then
+    export BACKEND="postgres"
+fi
 set +a
 
 for dir in $(find . -maxdepth 1 -mindepth 1 -type d); do
diff --git a/examples/sqlite/all_about_inserts/Cargo.toml b/examples/sqlite/all_about_inserts/Cargo.toml
index 5d1f2ff..36a2a94 100644
--- a/examples/sqlite/all_about_inserts/Cargo.toml
+++ b/examples/sqlite/all_about_inserts/Cargo.toml
@@ -4,7 +4,7 @@
 authors = ["Sean Griffin <sean@seantheprogrammer.com>"]
 
 [dependencies]
-diesel = { version = "1.4.0", features = ["sqlite", "chrono"] }
+diesel = { version = "2.0.0", path = "../../../diesel", features = ["sqlite", "chrono"] }
 serde = "1.0"
 serde_derive = "1.0"
 serde_json = "1.0"
diff --git a/examples/sqlite/getting_started_step_1/Cargo.toml b/examples/sqlite/getting_started_step_1/Cargo.toml
index a262528..60e3da4 100644
--- a/examples/sqlite/getting_started_step_1/Cargo.toml
+++ b/examples/sqlite/getting_started_step_1/Cargo.toml
@@ -5,5 +5,5 @@
 authors = ["Taryn Hill <taryn@phrohdoh.com>"]
 
 [dependencies]
-diesel = { version = "1.4.0", features = ["sqlite"] }
+diesel = { version = "2.0.0", path = "../../../diesel", features = ["sqlite"] }
 dotenv = "0.10"
diff --git a/examples/sqlite/getting_started_step_2/Cargo.toml b/examples/sqlite/getting_started_step_2/Cargo.toml
index bdd8c3d..4dc805b 100644
--- a/examples/sqlite/getting_started_step_2/Cargo.toml
+++ b/examples/sqlite/getting_started_step_2/Cargo.toml
@@ -5,5 +5,5 @@
 authors = ["Taryn Hill <taryn@phrohdoh.com>"]
 
 [dependencies]
-diesel = { version = "1.4.0", features = ["sqlite"] }
+diesel = { version = "2.0.0", path = "../../../diesel", features = ["sqlite"] }
 dotenv = "0.10"
diff --git a/examples/sqlite/getting_started_step_3/Cargo.toml b/examples/sqlite/getting_started_step_3/Cargo.toml
index 9bcfd0e..dd6119aa 100644
--- a/examples/sqlite/getting_started_step_3/Cargo.toml
+++ b/examples/sqlite/getting_started_step_3/Cargo.toml
@@ -5,5 +5,5 @@
 authors = ["Taryn Hill <taryn@phrohdoh.com>"]
 
 [dependencies]
-diesel = { version = "1.4.0", features = ["sqlite"] }
+diesel = { version = "2.0.0", path = "../../../diesel", features = ["sqlite"] }
 dotenv = "0.10"
diff --git a/examples/sqlite/test_all b/examples/sqlite/test_all
index 04af2c0..f9474e5 100755
--- a/examples/sqlite/test_all
+++ b/examples/sqlite/test_all
@@ -3,6 +3,10 @@
 
 export DATABASE_URL="/tmp/test_examples.db"
 
+if [ -z "$BACKEND" ]; then
+    export BACKEND="sqlite"
+fi
+
 for dir in $(find . -maxdepth 1 -mindepth 1 -type d); do
   cd $dir
   ../../../bin/diesel database reset
diff --git a/guide_drafts/backend_installation.md b/guide_drafts/backend_installation.md
index 94e29b1..78fad4a 100644
--- a/guide_drafts/backend_installation.md
+++ b/guide_drafts/backend_installation.md
@@ -47,8 +47,8 @@
 1. Install the following to add the MySQL APT repository.
 
     ```
-    wget https://dev.mysql.com/get/mysql-apt-config_0.8.7-1_all.deb
-    sudo dpkg -i mysql-apt-config_0.8.7-1_all.deb
+    wget https://dev.mysql.com/get/mysql-apt-config_0.8.15-1_all.deb
+    sudo dpkg -i mysql-apt-config_0.8.15-1_all.deb
     ```
 
     Select `<Ok>`.
diff --git a/migrations/mysql/20170209180355_add_one_off_tables_from_integration_tests/up.sql b/migrations/mysql/20170209180355_add_one_off_tables_from_integration_tests/up.sql
index 1ae48b2..cacf5aa 100644
--- a/migrations/mysql/20170209180355_add_one_off_tables_from_integration_tests/up.sql
+++ b/migrations/mysql/20170209180355_add_one_off_tables_from_integration_tests/up.sql
@@ -1,5 +1,5 @@
 CREATE TABLE numbers (n INTEGER PRIMARY KEY);
 CREATE TABLE precision_numbers (n DOUBLE PRECISION NOT NULL PRIMARY KEY);
 CREATE TABLE nullable_doubles (id INT PRIMARY KEY AUTO_INCREMENT, n DOUBLE PRECISION);
-CREATE TABLE users_with_name_pk (name VARCHAR(255) PRIMARY KEY);
+CREATE TABLE users_with_name_pk (name VARCHAR(50) PRIMARY KEY);
 CREATE TABLE points (x INTEGER NOT NULL, y INTEGER NOT NULL, PRIMARY KEY (x, y));
diff --git a/migrations/mysql/2020-01-25-033332_add unsigned table/down.sql b/migrations/mysql/2020-01-25-033332_add unsigned table/down.sql
new file mode 100644
index 0000000..8ebd48e
--- /dev/null
+++ b/migrations/mysql/2020-01-25-033332_add unsigned table/down.sql
@@ -0,0 +1 @@
+DROP TABLE unsigned_table;
\ No newline at end of file
diff --git a/migrations/mysql/2020-01-25-033332_add unsigned table/up.sql b/migrations/mysql/2020-01-25-033332_add unsigned table/up.sql
new file mode 100644
index 0000000..00f52b3
--- /dev/null
+++ b/migrations/mysql/2020-01-25-033332_add unsigned table/up.sql
@@ -0,0 +1,4 @@
+CREATE TABLE unsigned_table (
+    id INTEGER UNSIGNED PRIMARY KEY AUTO_INCREMENT,
+    value INTEGER UNSIGNED NOT NULL
+) CHARACTER SET utf8mb4;
\ No newline at end of file
diff --git a/migrations/mysql/2020-02-18-111430_create_pokes/down.sql b/migrations/mysql/2020-02-18-111430_create_pokes/down.sql
new file mode 100644
index 0000000..cdadc29
--- /dev/null
+++ b/migrations/mysql/2020-02-18-111430_create_pokes/down.sql
@@ -0,0 +1 @@
+DROP TABLE pokes;
diff --git a/migrations/mysql/2020-02-18-111430_create_pokes/up.sql b/migrations/mysql/2020-02-18-111430_create_pokes/up.sql
new file mode 100644
index 0000000..f685fd3
--- /dev/null
+++ b/migrations/mysql/2020-02-18-111430_create_pokes/up.sql
@@ -0,0 +1,5 @@
+create table pokes (
+    user_id INTEGER PRIMARY KEY NOT NULL REFERENCES users(id),
+    poke_count INTEGER NOT NULL,
+    CONSTRAINT pokes_poke_count_check CHECK (poke_count > 0)
+);
diff --git a/migrations/postgresql/2020-02-18-111430_create_pokes/down.sql b/migrations/postgresql/2020-02-18-111430_create_pokes/down.sql
new file mode 100644
index 0000000..cdadc29
--- /dev/null
+++ b/migrations/postgresql/2020-02-18-111430_create_pokes/down.sql
@@ -0,0 +1 @@
+DROP TABLE pokes;
diff --git a/migrations/postgresql/2020-02-18-111430_create_pokes/up.sql b/migrations/postgresql/2020-02-18-111430_create_pokes/up.sql
new file mode 100644
index 0000000..f685fd3
--- /dev/null
+++ b/migrations/postgresql/2020-02-18-111430_create_pokes/up.sql
@@ -0,0 +1,5 @@
+create table pokes (
+    user_id INTEGER PRIMARY KEY NOT NULL REFERENCES users(id),
+    poke_count INTEGER NOT NULL,
+    CONSTRAINT pokes_poke_count_check CHECK (poke_count > 0)
+);
diff --git a/migrations/sqlite/2020-02-18-111430_create_pokes/down.sql b/migrations/sqlite/2020-02-18-111430_create_pokes/down.sql
new file mode 100644
index 0000000..cdadc29
--- /dev/null
+++ b/migrations/sqlite/2020-02-18-111430_create_pokes/down.sql
@@ -0,0 +1 @@
+DROP TABLE pokes;
diff --git a/migrations/sqlite/2020-02-18-111430_create_pokes/up.sql b/migrations/sqlite/2020-02-18-111430_create_pokes/up.sql
new file mode 100644
index 0000000..f685fd3
--- /dev/null
+++ b/migrations/sqlite/2020-02-18-111430_create_pokes/up.sql
@@ -0,0 +1,5 @@
+create table pokes (
+    user_id INTEGER PRIMARY KEY NOT NULL REFERENCES users(id),
+    poke_count INTEGER NOT NULL,
+    CONSTRAINT pokes_poke_count_check CHECK (poke_count > 0)
+);
diff --git a/rust-toolchain b/rust-toolchain
index bf50e91..32b7211 100644
--- a/rust-toolchain
+++ b/rust-toolchain
@@ -1 +1 @@
-1.37.0
+1.40.0