diff --git a/.circleci/config.yml b/.circleci/config.yml
index d78c56232a61bd6313ad2836c0ee1949c967e28e..0d9a0c00ccfed09526d159cc66662fe4a30e19f1 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -256,6 +256,24 @@ jobs:
             lein with-profile +ci test
           no_output_timeout: 5m
 
+  be-tests-snowflake:
+    working_directory: /home/circleci/metabase/metabase/
+    docker:
+      - image: circleci/clojure:lein-2.8.1-node-browsers
+    steps:
+      - attach_workspace:
+          at: /home/circleci/
+      - restore_cache:
+          <<: *restore-be-deps-cache
+      - run:
+          name: Run backend unit tests (Snowflake)
+          environment:
+            ENGINES: h2,snowflake
+          command: >
+            /home/circleci/metabase/metabase/.circleci/skip-driver-tests.sh snowflake ||
+            lein with-profile +ci test
+          no_output_timeout: 10m
+
   be-tests-sqlserver:
     <<: *defaults
     steps:
@@ -341,11 +359,19 @@ jobs:
     docker:
       - image: circleci/clojure:lein-2.8.1-node-browsers
       - image: metabase/presto-mb-ci
+        environment:
+          JAVA_TOOL_OPTIONS: "-Xmx2g"
     steps:
       - attach_workspace:
           at: /home/circleci/
       - restore_cache:
           <<: *restore-be-deps-cache
+      - run:
+          name: Wait for Presto to be ready
+          command: >
+            /home/circleci/metabase/metabase/.circleci/skip-driver-tests.sh sparksql ||
+            while ! nc -z localhost 8080; do sleep 0.1; done
+          no_output_timeout: 5m
       - run:
           name: Run backend unit tests (Presto)
           environment:
@@ -395,15 +421,22 @@ jobs:
       - restore_cache:
           <<: *restore-fe-deps-cache
       - run:
-          name: Run yarn
-          command: SAUCE_CONNECT_DOWNLOAD_ON_INSTALL=true yarn
+          name: Run yarn if yarn.lock checksum has changed
+          command: >
+            if [ ! -f yarn.lock.checksum ] || [ "$(md5sum yarn.lock)" != "$(cat yarn.lock.checksum)" ];
+              then SAUCE_CONNECT_DOWNLOAD_ON_INSTALL=true yarn;
+            fi
           no_output_timeout: 5m
+      - run:
+          name: Save yarn checksum
+          command: md5sum yarn.lock > yarn.lock.checksum
       - save_cache:
           key: fe-deps-{{ checksum "yarn.lock" }}
           paths:
             - /home/circleci/.yarn
             - /home/circleci/.yarn-cache
             - /home/circleci/metabase/metabase/node_modules
+            - /home/circleci/yarn.lock.checksum
 
   fe-linter-eslint:
     <<: *defaults
@@ -478,7 +511,9 @@ jobs:
       - run:
           name: Build uberjar if needed
           command: >
-            if [ ! -f './target/uberjar/metabase.jar' ]; then ./bin/build version uberjar; fi
+            if [ ! -f './target/uberjar/metabase.jar' ];
+              then ./bin/build version uberjar;
+            fi
           no_output_timeout: 5m
       - save_cache:
           key: uberjar-{{ checksum "./backend-checksums.txt" }}
@@ -579,6 +614,9 @@ workflows:
       - be-tests-vertica:
           requires:
             - be-tests
+      - be-tests-snowflake:
+          requires:
+            - be-tests
       - fe-deps:
           requires:
             - checkout
@@ -623,6 +661,7 @@ workflows:
             - be-tests-druid
             - be-tests-redshift
             - be-tests-vertica
+            - be-tests-snowflake
             - fe-linter-eslint
             - fe-linter-prettier
             - fe-linter-flow
diff --git a/README.md b/README.md
index 292677ef4e6b1d8e71ac05ae64d6c6206aaefcdc..7a761bc9f0eae222e5b25fc69404ad4e5fd0dc24 100644
--- a/README.md
+++ b/README.md
@@ -87,7 +87,7 @@ Some questions come up over and over again. Check here first:
 
 # Security Disclosure
 
-Security is very important to us. If discover any issue regarding security, please disclose the information responsibly by sending an email to security@metabase.com and not by creating a GitHub issue.
+Security is very important to us. If you discover any issue regarding security, please disclose the information responsibly by sending an email to security@metabase.com and not by creating a GitHub issue.
 
 
 # Contributing
diff --git a/bin/i18n/update-translation-template b/bin/i18n/update-translation-template
index 15eba26961d183e6544630c77ba3d5933f57f39a..fd0873cc0ab6dd6575ab9c8f8fbe1df25c63912d 100755
--- a/bin/i18n/update-translation-template
+++ b/bin/i18n/update-translation-template
@@ -15,11 +15,16 @@ fi
 
 POT_NAME="locales/metabase.pot"
 POT_BACKEND_NAME="locales/metabase-backend.pot"
+# NOTE: hardcoded in .babelrc
 POT_FRONTEND_NAME="locales/metabase-frontend.pot"
+# NOTE: hardcoded in src/metabase/automagic_dashboards/rules.clj
+POT_AUTODASH_NAME="locales/metabase-automatic-dashboards.pot"
 
 mkdir -p "locales"
 
-# update frontend pot
+#######################
+# update frontend pot #
+#######################
 
 # NOTE: about twice as fast to call babel directly rather than a full webpack build
 BABEL_ENV=extract ./node_modules/.bin/babel -q -x .js,.jsx -o /dev/null frontend/src
@@ -29,7 +34,9 @@ BABEL_ENV=extract ./node_modules/.bin/babel -q -x .js,.jsx -o /dev/null frontend
 sed -i".bak" -E 's/\$\{ *([0-9]+) *\}/{\1}/g' "$POT_FRONTEND_NAME"
 rm "$POT_FRONTEND_NAME.bak"
 
-# update backend pot
+######################
+# update backend pot #
+######################
 
 # xgettext before 0.19 does not understand --add-location=file. Even CentOS
 # 7 ships with an older gettext. We will therefore generate full location
@@ -55,5 +62,14 @@ find src -name "*.clj" | xgettext                   \
 sed -i".bak" 's/charset=CHARSET/charset=UTF-8/' "$POT_BACKEND_NAME"
 rm "$POT_BACKEND_NAME.bak"
 
-# merge frontend and backend pots
-msgcat "$POT_FRONTEND_NAME" "$POT_BACKEND_NAME" > "$POT_NAME"
+########################
+# update auto dash pot #
+########################
+
+lein generate-automagic-dashboards-pot
+
+##################
+# merge all pots #
+##################
+
+msgcat "$POT_FRONTEND_NAME" "$POT_BACKEND_NAME" "$POT_AUTODASH_NAME" > "$POT_NAME"
diff --git a/docs/api-documentation.md b/docs/api-documentation.md
index d9adda78e32a89e2df74f973b56ad2521dda8938..0c459fa33d57b798b5ed6742a6a7ea1ec5c861f2 100644
--- a/docs/api-documentation.md
+++ b/docs/api-documentation.md
@@ -16,7 +16,7 @@ Delete an Alert. (DEPRECATED -- don't delete a Alert anymore -- archive it inste
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/alert/`
@@ -34,7 +34,7 @@ Fetch all questions for the given question (`Card`) id
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `POST /api/alert/`
@@ -53,7 +53,7 @@ Create a new Alert.
 
 *  **`alert_above_goal`** value may be nil, or if non-nil, value must be a boolean.
 
-*  **`new-alert-request-body`** 
+*  **`new-alert-request-body`**
 
 
 ## `PUT /api/alert/:id`
@@ -62,7 +62,7 @@ Update a `Alert` with ID.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 *  **`alert_condition`** value may be nil, or if non-nil, value must be one of: `goal`, `rows`.
 
@@ -76,7 +76,7 @@ Update a `Alert` with ID.
 
 *  **`archived`** value may be nil, or if non-nil, value must be a boolean.
 
-*  **`alert-updates`** 
+*  **`alert-updates`**
 
 
 ## `PUT /api/alert/:id/unsubscribe`
@@ -85,7 +85,7 @@ Unsubscribes a user from the given alert
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/automagic-dashboards/:entity/:entity-id-or-query`
@@ -96,7 +96,7 @@ Return an automagic dashboard for entity `entity` with id `ìd`.
 
 *  **`entity`** Invalid entity type
 
-*  **`entity-id-or-query`** 
+*  **`entity-id-or-query`**
 
 *  **`show`** invalid show value
 
@@ -111,7 +111,7 @@ Return an automagic dashboard analyzing cell in  automagic dashboard for entity
 
 *  **`entity`** Invalid entity type
 
-*  **`entity-id-or-query`** 
+*  **`entity-id-or-query`**
 
 *  **`cell-query`** value couldn't be parsed as base64 encoded JSON
 
@@ -128,7 +128,7 @@ Return an automagic comparison dashboard for cell in automagic dashboard for ent
 
 *  **`entity`** Invalid entity type
 
-*  **`entity-id-or-query`** 
+*  **`entity-id-or-query`**
 
 *  **`cell-query`** value couldn't be parsed as base64 encoded JSON
 
@@ -136,7 +136,7 @@ Return an automagic comparison dashboard for cell in automagic dashboard for ent
 
 *  **`comparison-entity`** Invalid comparison entity type. Can only be one of "table", "segment", or "adhoc"
 
-*  **`comparison-entity-id-or-query`** 
+*  **`comparison-entity-id-or-query`**
 
 
 ## `GET /api/automagic-dashboards/:entity/:entity-id-or-query/cell/:cell-query/rule/:prefix/:rule`
@@ -148,7 +148,7 @@ Return an automagic dashboard analyzing cell in question  with id `id` defined b
 
 *  **`entity`** Invalid entity type
 
-*  **`entity-id-or-query`** 
+*  **`entity-id-or-query`**
 
 *  **`cell-query`** value couldn't be parsed as base64 encoded JSON
 
@@ -169,7 +169,7 @@ Return an automagic comparison dashboard for cell in automagic dashboard for ent
 
 *  **`entity`** Invalid entity type
 
-*  **`entity-id-or-query`** 
+*  **`entity-id-or-query`**
 
 *  **`cell-query`** value couldn't be parsed as base64 encoded JSON
 
@@ -181,7 +181,7 @@ Return an automagic comparison dashboard for cell in automagic dashboard for ent
 
 *  **`comparison-entity`** Invalid comparison entity type. Can only be one of "table", "segment", or "adhoc"
 
-*  **`comparison-entity-id-or-query`** 
+*  **`comparison-entity-id-or-query`**
 
 
 ## `GET /api/automagic-dashboards/:entity/:entity-id-or-query/compare/:comparison-entity/:comparison-entity-id-or-query`
@@ -193,13 +193,13 @@ Return an automagic comparison dashboard for entity `entity` with id `ìd` compa
 
 *  **`entity`** Invalid entity type
 
-*  **`entity-id-or-query`** 
+*  **`entity-id-or-query`**
 
 *  **`show`** invalid show value
 
 *  **`comparison-entity`** Invalid comparison entity type. Can only be one of "table", "segment", or "adhoc"
 
-*  **`comparison-entity-id-or-query`** 
+*  **`comparison-entity-id-or-query`**
 
 
 ## `GET /api/automagic-dashboards/:entity/:entity-id-or-query/rule/:prefix/:rule`
@@ -210,7 +210,7 @@ Return an automagic dashboard for entity `entity` with id `ìd` using rule `rule
 
 *  **`entity`** Invalid entity type
 
-*  **`entity-id-or-query`** 
+*  **`entity-id-or-query`**
 
 *  **`prefix`** invalid value for prefix
 
@@ -228,7 +228,7 @@ Return an automagic comparison dashboard for entity `entity` with id `ìd` using
 
 *  **`entity`** Invalid entity type
 
-*  **`entity-id-or-query`** 
+*  **`entity-id-or-query`**
 
 *  **`prefix`** invalid value for prefix
 
@@ -238,7 +238,7 @@ Return an automagic comparison dashboard for entity `entity` with id `ìd` using
 
 *  **`comparison-entity`** Invalid comparison entity type. Can only be one of "table", "segment", or "adhoc"
 
-*  **`comparison-entity-id-or-query`** 
+*  **`comparison-entity-id-or-query`**
 
 
 ## `GET /api/automagic-dashboards/database/:id/candidates`
@@ -247,7 +247,7 @@ Return a list of candidates for automagic dashboards orderd by interestingness.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `DELETE /api/card/:card-id/favorite`
@@ -256,7 +256,7 @@ Unfavorite a Card.
 
 ##### PARAMS:
 
-*  **`card-id`** 
+*  **`card-id`**
 
 
 ## `DELETE /api/card/:card-id/public_link`
@@ -267,7 +267,7 @@ You must be a superuser to do this.
 
 ##### PARAMS:
 
-*  **`card-id`** 
+*  **`card-id`**
 
 
 ## `DELETE /api/card/:id`
@@ -276,7 +276,7 @@ Delete a Card. (DEPRECATED -- don't delete a Card anymore -- archive it instead.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/card/`
@@ -298,7 +298,7 @@ Get `Card` with ID.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/card/:id/related`
@@ -307,7 +307,7 @@ Return related entities.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/card/embeddable`
@@ -345,7 +345,7 @@ Create a new `Card`.
 
 *  **`name`** value must be a non-blank string.
 
-*  **`dataset_query`** 
+*  **`dataset_query`**
 
 *  **`display`** value must be a non-blank string.
 
@@ -356,7 +356,7 @@ Favorite a Card.
 
 ##### PARAMS:
 
-*  **`card-id`** 
+*  **`card-id`**
 
 
 ## `POST /api/card/:card-id/public_link`
@@ -369,7 +369,7 @@ You must be a superuser to do this.
 
 ##### PARAMS:
 
-*  **`card-id`** 
+*  **`card-id`**
 
 
 ## `POST /api/card/:card-id/query`
@@ -378,9 +378,9 @@ Run the query associated with a Card.
 
 ##### PARAMS:
 
-*  **`card-id`** 
+*  **`card-id`**
 
-*  **`parameters`** 
+*  **`parameters`**
 
 *  **`ignore_cache`** value may be nil, or if non-nil, value must be a boolean.
 
@@ -392,7 +392,7 @@ Run the query associated with a Card, and return its results as a file in the sp
 
 ##### PARAMS:
 
-*  **`card-id`** 
+*  **`card-id`**
 
 *  **`export-format`** value must be one of: `csv`, `json`, `xlsx`.
 
@@ -417,7 +417,7 @@ Return related entities for an ad-hoc query.
 
 ##### PARAMS:
 
-*  **`query`** 
+*  **`query`**
 
 
 ## `PUT /api/card/:id`
@@ -442,7 +442,7 @@ Update a `Card`.
 
 *  **`collection_id`** value may be nil, or if non-nil, value must be an integer greater than zero.
 
-*  **`card-updates`** 
+*  **`card-updates`**
 
 *  **`name`** value may be nil, or if non-nil, value must be a non-blank string.
 
@@ -450,7 +450,7 @@ Update a `Card`.
 
 *  **`dataset_query`** value may be nil, or if non-nil, value must be a map.
 
-*  **`id`** 
+*  **`id`**
 
 *  **`display`** value may be nil, or if non-nil, value must be a non-blank string.
 
@@ -474,7 +474,7 @@ Fetch a specific Collection with standard details added
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/collection/:id/items`
@@ -486,7 +486,7 @@ Fetch a specific Collection's items with the following options:
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 *  **`model`** value may be nil, or if non-nil, value must be one of: `card`, `collection`, `dashboard`, `pulse`.
 
@@ -546,7 +546,7 @@ Modify an existing Collection, including archiving or unarchiving it, or moving
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 *  **`name`** value may be nil, or if non-nil, value must be a non-blank string.
 
@@ -558,7 +558,7 @@ Modify an existing Collection, including archiving or unarchiving it, or moving
 
 *  **`parent_id`** value may be nil, or if non-nil, value must be an integer greater than zero.
 
-*  **`collection-updates`** 
+*  **`collection-updates`**
 
 
 ## `PUT /api/collection/graph`
@@ -580,7 +580,7 @@ You must be a superuser to do this.
 
 ##### PARAMS:
 
-*  **`dashboard-id`** 
+*  **`dashboard-id`**
 
 
 ## `DELETE /api/dashboard/:id`
@@ -589,7 +589,7 @@ Delete a `Dashboard`.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `DELETE /api/dashboard/:id/cards`
@@ -598,7 +598,7 @@ Remove a `DashboardCard` from a `Dashboard`.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 *  **`dashcardId`** value must be a valid integer greater than zero.
 
@@ -609,7 +609,7 @@ Unfavorite a Dashboard.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/dashboard/`
@@ -631,7 +631,7 @@ Get `Dashboard` with ID.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/dashboard/:id/related`
@@ -640,7 +640,7 @@ Return related entities.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/dashboard/:id/revisions`
@@ -649,7 +649,7 @@ Fetch `Revisions` for `Dashboard` with ID.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/dashboard/embeddable`
@@ -684,7 +684,7 @@ Create a new `Dashboard`.
 
 *  **`collection_position`** value may be nil, or if non-nil, value must be an integer greater than zero.
 
-*  **`dashboard`** 
+*  **`dashboard`**
 
 
 ## `POST /api/dashboard/:dashboard-id/public_link`
@@ -697,7 +697,7 @@ You must be a superuser to do this.
 
 ##### PARAMS:
 
-*  **`dashboard-id`** 
+*  **`dashboard-id`**
 
 
 ## `POST /api/dashboard/:id/cards`
@@ -706,15 +706,15 @@ Add a `Card` to a `Dashboard`.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 *  **`cardId`** value may be nil, or if non-nil, value must be an integer greater than zero.
 
 *  **`parameter_mappings`** value must be an array. Each value must be a map.
 
-*  **`series`** 
+*  **`series`**
 
-*  **`dashboard-card`** 
+*  **`dashboard-card`**
 
 
 ## `POST /api/dashboard/:id/favorite`
@@ -723,7 +723,7 @@ Favorite a Dashboard.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `POST /api/dashboard/:id/revert`
@@ -732,7 +732,7 @@ Revert a `Dashboard` to a prior `Revision`.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 *  **`revision_id`** value must be an integer greater than zero.
 
@@ -743,7 +743,7 @@ Save a denormalized description of dashboard.
 
 ##### PARAMS:
 
-*  **`dashboard`** 
+*  **`dashboard`**
 
 
 ## `POST /api/dashboard/save/collection/:parent-collection-id`
@@ -752,9 +752,9 @@ Save a denormalized description of dashboard into collection with ID `:parent-co
 
 ##### PARAMS:
 
-*  **`parent-collection-id`** 
+*  **`parent-collection-id`**
 
-*  **`dashboard`** 
+*  **`dashboard`**
 
 
 ## `PUT /api/dashboard/:id`
@@ -783,7 +783,7 @@ Update a `Dashboard`.
 
 *  **`collection_id`** value may be nil, or if non-nil, value must be an integer greater than zero.
 
-*  **`dash-updates`** 
+*  **`dash-updates`**
 
 *  **`name`** value may be nil, or if non-nil, value must be a non-blank string.
 
@@ -791,7 +791,7 @@ Update a `Dashboard`.
 
 *  **`embedding_params`** value may be nil, or if non-nil, value must be a valid embedding params map.
 
-*  **`id`** 
+*  **`id`**
 
 *  **`position`** value may be nil, or if non-nil, value must be an integer greater than zero.
 
@@ -810,9 +810,9 @@ Update `Cards` on a `Dashboard`. Request body should have the form:
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
-*  **`cards`** 
+*  **`cards`**
 
 
 ## `DELETE /api/database/:id`
@@ -821,7 +821,7 @@ Delete a `Database`.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/database/`
@@ -843,7 +843,7 @@ Get `Database` with ID.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/database/:id/autocomplete_suggestions`
@@ -857,7 +857,7 @@ Return a list of autocomplete suggestions for a given PREFIX.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 *  **`prefix`** value must be a non-blank string.
 
@@ -868,7 +868,7 @@ Get a list of all `Fields` in `Database`.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/database/:id/idfields`
@@ -877,7 +877,7 @@ Get a list of all primary key `Fields` for `Database`.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/database/:id/metadata`
@@ -887,7 +887,7 @@ Get metadata about a `Database`, including all of its `Tables` and `Fields`.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/database/:id/schema/:schema`
@@ -896,9 +896,9 @@ Returns a list of tables for the given database `id` and `schema`
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
-*  **`schema`** 
+*  **`schema`**
 
 
 ## `GET /api/database/:id/schemas`
@@ -907,7 +907,7 @@ Returns a list of all the schemas found for the database `id`
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/database/:virtual-db/metadata`
@@ -945,7 +945,7 @@ You must be a superuser to do this.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `POST /api/database/:id/rescan_values`
@@ -956,7 +956,7 @@ You must be a superuser to do this.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `POST /api/database/:id/sync`
@@ -965,7 +965,7 @@ Update the metadata for this `Database`. This happens asynchronously.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `POST /api/database/:id/sync_schema`
@@ -976,7 +976,7 @@ You must be a superuser to do this.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `POST /api/database/sample_dataset`
@@ -1019,13 +1019,13 @@ You must be a superuser to do this.
 
 *  **`caveats`** value may be nil, or if non-nil, value must be a string.
 
-*  **`is_full_sync`** 
+*  **`is_full_sync`**
 
 *  **`details`** value may be nil, or if non-nil, value must be a map.
 
-*  **`id`** 
+*  **`id`**
 
-*  **`is_on_demand`** 
+*  **`is_on_demand`**
 
 
 ## `POST /api/dataset/`
@@ -1036,7 +1036,7 @@ Execute a query and retrieve the results in the usual format.
 
 *  **`database`** value must be an integer.
 
-*  **`query`** 
+*  **`query`**
 
 
 ## `POST /api/dataset/:export-format`
@@ -1056,9 +1056,9 @@ Get historical query execution duration.
 
 ##### PARAMS:
 
-*  **`database`** 
+*  **`database`**
 
-*  **`query`** 
+*  **`query`**
 
 
 ## `DELETE /api/email/`
@@ -1096,7 +1096,7 @@ Fetch a Card via a JSON Web Token signed with the `embedding-secret-key`.
 
 ##### PARAMS:
 
-*  **`token`** 
+*  **`token`**
 
 
 ## `GET /api/embed/card/:token/field/:field-id/remapping/:remapped-id`
@@ -1106,11 +1106,11 @@ Fetch remapped Field values. This is the same as `GET /api/field/:id/remapping/:
 
 ##### PARAMS:
 
-*  **`token`** 
+*  **`token`**
 
-*  **`field-id`** 
+*  **`field-id`**
 
-*  **`remapped-id`** 
+*  **`remapped-id`**
 
 *  **`value`** value must be a non-blank string.
 
@@ -1121,11 +1121,11 @@ Search for values of a Field that is referenced by an embedded Card.
 
 ##### PARAMS:
 
-*  **`token`** 
+*  **`token`**
 
-*  **`field-id`** 
+*  **`field-id`**
 
-*  **`search-field-id`** 
+*  **`search-field-id`**
 
 *  **`value`** value must be a non-blank string.
 
@@ -1138,9 +1138,9 @@ Fetch FieldValues for a Field that is referenced by an embedded Card.
 
 ##### PARAMS:
 
-*  **`token`** 
+*  **`token`**
 
-*  **`field-id`** 
+*  **`field-id`**
 
 
 ## `GET /api/embed/card/:token/query`
@@ -1154,11 +1154,11 @@ Fetch the results of running a Card using a JSON Web Token signed with the `embe
 
 ##### PARAMS:
 
-*  **`token`** 
+*  **`token`**
 
-*  **`&`** 
+*  **`&`**
 
-*  **`query-params`** 
+*  **`query-params`**
 
 
 ## `GET /api/embed/card/:token/query/:export-format`
@@ -1167,13 +1167,13 @@ Like `GET /api/embed/card/query`, but returns the results as a file in the speci
 
 ##### PARAMS:
 
-*  **`token`** 
+*  **`token`**
 
 *  **`export-format`** value must be one of: `csv`, `json`, `xlsx`.
 
-*  **`&`** 
+*  **`&`**
 
-*  **`query-params`** 
+*  **`query-params`**
 
 
 ## `GET /api/embed/dashboard/:token`
@@ -1186,7 +1186,7 @@ Fetch a Dashboard via a JSON Web Token signed with the `embedding-secret-key`.
 
 ##### PARAMS:
 
-*  **`token`** 
+*  **`token`**
 
 
 ## `GET /api/embed/dashboard/:token/dashcard/:dashcard-id/card/:card-id`
@@ -1195,15 +1195,15 @@ Fetch the results of running a Card belonging to a Dashboard using a JSON Web To
 
 ##### PARAMS:
 
-*  **`token`** 
+*  **`token`**
 
-*  **`dashcard-id`** 
+*  **`dashcard-id`**
 
-*  **`card-id`** 
+*  **`card-id`**
 
-*  **`&`** 
+*  **`&`**
 
-*  **`query-params`** 
+*  **`query-params`**
 
 
 ## `GET /api/embed/dashboard/:token/dashcard/:dashcard-id/card/:card-id/:export-format`
@@ -1213,17 +1213,17 @@ Fetch the results of running a Card belonging to a Dashboard using a JSON Web To
 
 ##### PARAMS:
 
-*  **`token`** 
+*  **`token`**
 
 *  **`export-format`** value must be one of: `csv`, `json`, `xlsx`.
 
-*  **`dashcard-id`** 
+*  **`dashcard-id`**
 
-*  **`card-id`** 
+*  **`card-id`**
 
-*  **`&`** 
+*  **`&`**
 
-*  **`query-params`** 
+*  **`query-params`**
 
 
 ## `GET /api/embed/dashboard/:token/field/:field-id/remapping/:remapped-id`
@@ -1233,11 +1233,11 @@ Fetch remapped Field values. This is the same as `GET /api/field/:id/remapping/:
 
 ##### PARAMS:
 
-*  **`token`** 
+*  **`token`**
 
-*  **`field-id`** 
+*  **`field-id`**
 
-*  **`remapped-id`** 
+*  **`remapped-id`**
 
 *  **`value`** value must be a non-blank string.
 
@@ -1248,11 +1248,11 @@ Search for values of a Field that is referenced by a Card in an embedded Dashboa
 
 ##### PARAMS:
 
-*  **`token`** 
+*  **`token`**
 
-*  **`field-id`** 
+*  **`field-id`**
 
-*  **`search-field-id`** 
+*  **`search-field-id`**
 
 *  **`value`** value must be a non-blank string.
 
@@ -1265,9 +1265,9 @@ Fetch FieldValues for a Field that is used as a param in an embedded Dashboard.
 
 ##### PARAMS:
 
-*  **`token`** 
+*  **`token`**
 
-*  **`field-id`** 
+*  **`field-id`**
 
 
 ## `DELETE /api/field/:id/dimension`
@@ -1276,7 +1276,7 @@ Remove the dimension associated to field at ID
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/field/:id`
@@ -1285,7 +1285,7 @@ Get `Field` with ID.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/field/:id/related`
@@ -1294,7 +1294,7 @@ Return related entities.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/field/:id/remapping/:remapped-id`
@@ -1303,22 +1303,22 @@ Fetch remapped Field values.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
-*  **`remapped-id`** 
+*  **`remapped-id`**
 
-*  **`value`** 
+*  **`value`**
 
 
 ## `GET /api/field/:id/search/:search-id`
 
-Search for values of a Field that match values of another Field when breaking out by the 
+Search for values of a Field that match values of another Field when breaking out by the
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
-*  **`search-id`** 
+*  **`search-id`**
 
 *  **`value`** value must be a non-blank string.
 
@@ -1331,7 +1331,7 @@ Get the count and distinct count of `Field` with ID.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/field/:id/values`
@@ -1341,7 +1341,7 @@ If a Field's value of `has_field_values` is `list`, return a list of all the dis
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/field/field-literal%2C:field-name%2Ctype%2F:field-type/values`
@@ -1351,7 +1351,7 @@ Implementation of the field values endpoint for fields in the Saved Questions 'v
 
 ##### PARAMS:
 
-*  **`_`** 
+*  **`_`**
 
 
 ## `POST /api/field/:id/dimension`
@@ -1360,7 +1360,7 @@ Sets the dimension for the given field at ID
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 *  **`type`** value must be one of: `external`, `internal`.
 
@@ -1378,7 +1378,7 @@ You must be a superuser to do this.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `POST /api/field/:id/rescan_values`
@@ -1390,7 +1390,7 @@ You must be a superuser to do this.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `POST /api/field/:id/values`
@@ -1400,7 +1400,7 @@ Update the fields values and human-readable values for a `Field` whose special t
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 *  **`value-pairs`** value must be an array. Each value must be an array.
 
@@ -1427,7 +1427,7 @@ Update `Field` with ID.
 
 *  **`fk_target_field_id`** value may be nil, or if non-nil, value must be an integer greater than zero.
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/geojson/:key`
@@ -1459,7 +1459,7 @@ You must be a superuser to do this.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 *  **`revision_message`** value must be a non-blank string.
 
@@ -1470,7 +1470,7 @@ Fetch *all* `Metrics`.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/metric/:id`
@@ -1481,7 +1481,7 @@ You must be a superuser to do this.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/metric/:id/related`
@@ -1490,7 +1490,7 @@ Return related entities.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/metric/:id/revisions`
@@ -1501,7 +1501,7 @@ You must be a superuser to do this.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `POST /api/metric/`
@@ -1514,7 +1514,7 @@ You must be a superuser to do this.
 
 *  **`name`** value must be a non-blank string.
 
-*  **`description`** 
+*  **`description`**
 
 *  **`table_id`** value must be an integer greater than zero.
 
@@ -1529,7 +1529,7 @@ You must be a superuser to do this.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 *  **`revision_id`** value must be an integer greater than zero.
 
@@ -1542,7 +1542,7 @@ You must be a superuser to do this.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 *  **`definition`** value must be a map.
 
@@ -1560,7 +1560,7 @@ You must be a superuser to do this.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 *  **`important_field_ids`** value must be an array. Each value must be an integer greater than zero.
 
@@ -1572,11 +1572,11 @@ Notification about a potential schema change to one of our `Databases`.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
-*  **`table_id`** 
+*  **`table_id`**
 
-*  **`table_name`** 
+*  **`table_name`**
 
 
 ## `DELETE /api/permissions/group/:group-id`
@@ -1587,7 +1587,7 @@ You must be a superuser to do this.
 
 ##### PARAMS:
 
-*  **`group-id`** 
+*  **`group-id`**
 
 
 ## `DELETE /api/permissions/membership/:id`
@@ -1598,7 +1598,7 @@ You must be a superuser to do this.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/permissions/graph`
@@ -1623,7 +1623,7 @@ You must be a superuser to do this.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/permissions/membership`
@@ -1664,7 +1664,7 @@ You must be a superuser to do this.
 ## `PUT /api/permissions/graph`
 
 Do a batch update of Permissions by passing in a modified graph. This should return the same graph, in the same
-  format, that you got from `GET /api/permissions/graph`, with any changes made in the wherever neccesary. This
+  format, that you got from `GET /api/permissions/graph`, with any changes made in the wherever necessary. This
   modified graph must correspond to the `PermissionsGraph` schema. If successful, this endpoint returns the updated
   permissions graph; use this as a base for any further modifications.
 
@@ -1687,7 +1687,7 @@ You must be a superuser to do this.
 
 ##### PARAMS:
 
-*  **`group-id`** 
+*  **`group-id`**
 
 *  **`name`** value must be a non-blank string.
 
@@ -1698,7 +1698,7 @@ Fetch a Card you're considering embedding by passing a JWT TOKEN.
 
 ##### PARAMS:
 
-*  **`token`** 
+*  **`token`**
 
 
 ## `GET /api/preview-embed/card/:token/query`
@@ -1707,20 +1707,20 @@ Fetch the query results for a Card you're considering embedding by passing a JWT
 
 ##### PARAMS:
 
-*  **`token`** 
+*  **`token`**
 
-*  **`&`** 
+*  **`&`**
 
-*  **`query-params`** 
+*  **`query-params`**
 
 
 ## `GET /api/preview-embed/dashboard/:token`
 
-Fetch a Dashboard you're considering embedding by passing a JWT TOKEN. 
+Fetch a Dashboard you're considering embedding by passing a JWT TOKEN.
 
 ##### PARAMS:
 
-*  **`token`** 
+*  **`token`**
 
 
 ## `GET /api/preview-embed/dashboard/:token/dashcard/:dashcard-id/card/:card-id`
@@ -1729,15 +1729,15 @@ Fetch the results of running a Card belonging to a Dashboard you're considering
 
 ##### PARAMS:
 
-*  **`token`** 
+*  **`token`**
 
-*  **`dashcard-id`** 
+*  **`dashcard-id`**
 
-*  **`card-id`** 
+*  **`card-id`**
 
-*  **`&`** 
+*  **`&`**
 
-*  **`query-params`** 
+*  **`query-params`**
 
 
 ## `GET /api/public/card/:uuid`
@@ -1747,7 +1747,7 @@ Fetch a publicly-accessible Card an return query results as well as `:card` info
 
 ##### PARAMS:
 
-*  **`uuid`** 
+*  **`uuid`**
 
 
 ## `GET /api/public/card/:uuid/field/:field-id/remapping/:remapped-id`
@@ -1757,11 +1757,11 @@ Fetch remapped Field values. This is the same as `GET /api/field/:id/remapping/:
 
 ##### PARAMS:
 
-*  **`uuid`** 
+*  **`uuid`**
 
-*  **`field-id`** 
+*  **`field-id`**
 
-*  **`remapped-id`** 
+*  **`remapped-id`**
 
 *  **`value`** value must be a non-blank string.
 
@@ -1772,11 +1772,11 @@ Search for values of a Field that is referenced by a public Card.
 
 ##### PARAMS:
 
-*  **`uuid`** 
+*  **`uuid`**
 
-*  **`field-id`** 
+*  **`field-id`**
 
-*  **`search-field-id`** 
+*  **`search-field-id`**
 
 *  **`value`** value must be a non-blank string.
 
@@ -1789,9 +1789,9 @@ Fetch FieldValues for a Field that is referenced by a public Card.
 
 ##### PARAMS:
 
-*  **`uuid`** 
+*  **`uuid`**
 
-*  **`field-id`** 
+*  **`field-id`**
 
 
 ## `GET /api/public/card/:uuid/query`
@@ -1801,7 +1801,7 @@ Fetch a publicly-accessible Card an return query results as well as `:card` info
 
 ##### PARAMS:
 
-*  **`uuid`** 
+*  **`uuid`**
 
 *  **`parameters`** value may be nil, or if non-nil, value must be a valid JSON string.
 
@@ -1813,7 +1813,7 @@ Fetch a publicly-accessible Card and return query results in the specified forma
 
 ##### PARAMS:
 
-*  **`uuid`** 
+*  **`uuid`**
 
 *  **`export-format`** value must be one of: `csv`, `json`, `xlsx`.
 
@@ -1826,7 +1826,7 @@ Fetch a publicly-accessible Dashboard. Does not require auth credentials. Public
 
 ##### PARAMS:
 
-*  **`uuid`** 
+*  **`uuid`**
 
 
 ## `GET /api/public/dashboard/:uuid/card/:card-id`
@@ -1836,9 +1836,9 @@ Fetch the results for a Card in a publicly-accessible Dashboard. Does not requir
 
 ##### PARAMS:
 
-*  **`uuid`** 
+*  **`uuid`**
 
-*  **`card-id`** 
+*  **`card-id`**
 
 *  **`parameters`** value may be nil, or if non-nil, value must be a valid JSON string.
 
@@ -1850,11 +1850,11 @@ Fetch remapped Field values. This is the same as `GET /api/field/:id/remapping/:
 
 ##### PARAMS:
 
-*  **`uuid`** 
+*  **`uuid`**
 
-*  **`field-id`** 
+*  **`field-id`**
 
-*  **`remapped-id`** 
+*  **`remapped-id`**
 
 *  **`value`** value must be a non-blank string.
 
@@ -1865,11 +1865,11 @@ Search for values of a Field that is referenced by a Card in a public Dashboard.
 
 ##### PARAMS:
 
-*  **`uuid`** 
+*  **`uuid`**
 
-*  **`field-id`** 
+*  **`field-id`**
 
-*  **`search-field-id`** 
+*  **`search-field-id`**
 
 *  **`value`** value must be a non-blank string.
 
@@ -1882,9 +1882,9 @@ Fetch FieldValues for a Field that is referenced by a Card in a public Dashboard
 
 ##### PARAMS:
 
-*  **`uuid`** 
+*  **`uuid`**
 
-*  **`field-id`** 
+*  **`field-id`**
 
 
 ## `GET /api/public/oembed`
@@ -1908,7 +1908,7 @@ Delete a Pulse. (DEPRECATED -- don't delete a Pulse anymore -- archive it instea
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/pulse/`
@@ -1926,7 +1926,7 @@ Fetch `Pulse` with ID.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/pulse/form_input`
@@ -1940,7 +1940,7 @@ Get HTML rendering of a Card with `id`.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/pulse/preview_card_info/:id`
@@ -1949,7 +1949,7 @@ Get JSON object containing HTML rendering of a Card with `id` and other informat
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/pulse/preview_card_png/:id`
@@ -1958,7 +1958,7 @@ Get PNG rendering of a Card with `id`.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `POST /api/pulse/`
@@ -2005,7 +2005,7 @@ Update a Pulse with `id`.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 *  **`name`** value may be nil, or if non-nil, value must be a non-blank string.
 
@@ -2019,7 +2019,7 @@ Update a Pulse with `id`.
 
 *  **`archived`** value may be nil, or if non-nil, value must be a boolean.
 
-*  **`pulse-updates`** 
+*  **`pulse-updates`**
 
 
 ## `GET /api/revision/`
@@ -2065,7 +2065,7 @@ You must be a superuser to do this.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 *  **`revision_message`** value must be a non-blank string.
 
@@ -2083,7 +2083,7 @@ You must be a superuser to do this.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/segment/:id/related`
@@ -2092,7 +2092,7 @@ Return related entities.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/segment/:id/revisions`
@@ -2103,7 +2103,7 @@ You must be a superuser to do this.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `POST /api/segment/`
@@ -2116,7 +2116,7 @@ You must be a superuser to do this.
 
 *  **`name`** value must be a non-blank string.
 
-*  **`description`** 
+*  **`description`**
 
 *  **`table_id`** value must be an integer greater than zero.
 
@@ -2131,7 +2131,7 @@ You must be a superuser to do this.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 *  **`revision_id`** value must be an integer greater than zero.
 
@@ -2144,7 +2144,7 @@ You must be a superuser to do this.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 *  **`name`** value must be a non-blank string.
 
@@ -2186,7 +2186,7 @@ Login.
 
 *  **`password`** value must be a non-blank string.
 
-*  **`remote-address`** 
+*  **`remote-address`**
 
 
 ## `POST /api/session/forgot_password`
@@ -2195,11 +2195,11 @@ Send a reset email when user has forgotten their password.
 
 ##### PARAMS:
 
-*  **`server-name`** 
+*  **`server-name`**
 
 *  **`email`** value must be a valid email address.
 
-*  **`remote-address`** 
+*  **`remote-address`**
 
 
 ## `POST /api/session/google_auth`
@@ -2210,7 +2210,7 @@ Login with Google Auth.
 
 *  **`token`** value must be a non-blank string.
 
-*  **`remote-address`** 
+*  **`remote-address`**
 
 
 ## `POST /api/session/reset_password`
@@ -2250,7 +2250,7 @@ You must be a superuser to do this.
 
 ##### PARAMS:
 
-*  **`settings`** 
+*  **`settings`**
 
 
 ## `PUT /api/setting/:key`
@@ -2264,7 +2264,7 @@ You must be a superuser to do this.
 
 *  **`key`** value must be a non-blank string.
 
-*  **`value`** 
+*  **`value`**
 
 
 ## `GET /api/setup/admin_checklist`
@@ -2281,7 +2281,7 @@ Special endpoint for creating the first user during setup.
 
 ##### PARAMS:
 
-*  **`engine`** 
+*  **`engine`**
 
 *  **`schedules`** value may be nil, or if non-nil, value must be a valid map of schedule maps for a DB.
 
@@ -2293,17 +2293,17 @@ Special endpoint for creating the first user during setup.
 
 *  **`password`** Insufficient password strength
 
-*  **`name`** 
+*  **`name`**
 
-*  **`is_full_sync`** 
+*  **`is_full_sync`**
 
 *  **`site_name`** value must be a non-blank string.
 
 *  **`token`** Token does not match the setup token.
 
-*  **`details`** 
+*  **`details`**
 
-*  **`is_on_demand`** 
+*  **`is_on_demand`**
 
 *  **`last_name`** value must be a non-blank string.
 
@@ -2316,7 +2316,7 @@ Validate that we can connect to a database given a set of details.
 
 *  **`engine`** value must be a valid database engine.
 
-*  **`details`** 
+*  **`details`**
 
 *  **`token`** Token does not match the setup token.
 
@@ -2333,7 +2333,7 @@ You must be a superuser to do this.
 
 *  **`metabot-enabled`** value must be a boolean.
 
-*  **`slack-settings`** 
+*  **`slack-settings`**
 
 
 ## `GET /api/table/`
@@ -2347,7 +2347,7 @@ Get `Table` with ID.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/table/:id/fks`
@@ -2356,7 +2356,7 @@ Get all foreign keys whose destination is a `Field` that belongs to this `Table`
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/table/:id/query_metadata`
@@ -2369,7 +2369,7 @@ Get metadata about a `Table` useful for running queries.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 *  **`include_sensitive_fields`** value may be nil, or if non-nil, value must be a valid boolean string ('true' or 'false').
 
@@ -2380,7 +2380,7 @@ Return related entities.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/table/card__:id/fks`
@@ -2395,7 +2395,7 @@ Return metadata for the 'virtual' table for a Card.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `POST /api/table/:id/discard_values`
@@ -2407,7 +2407,7 @@ You must be a superuser to do this.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `POST /api/table/:id/rescan_values`
@@ -2419,7 +2419,7 @@ You must be a superuser to do this.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `PUT /api/table/:id`
@@ -2428,7 +2428,7 @@ Update `Table` with ID.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 *  **`display_name`** value may be nil, or if non-nil, value must be a non-blank string.
 
@@ -2479,7 +2479,7 @@ You must be a superuser to do this.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/user/`
@@ -2499,7 +2499,7 @@ Fetch a `User`. You must be fetching yourself *or* be a superuser.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/user/current`
@@ -2521,7 +2521,7 @@ You must be a superuser to do this.
 
 *  **`email`** value must be a valid email address.
 
-*  **`password`** 
+*  **`password`**
 
 *  **`login_attributes`** value may be nil, or if non-nil, value must be a map with each value either a string or number.
 
@@ -2534,7 +2534,7 @@ You must be a superuser to do this.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `PUT /api/user/:id`
@@ -2543,7 +2543,7 @@ Update an existing, active `User`.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 *  **`email`** value may be nil, or if non-nil, value must be a valid email address.
 
@@ -2551,7 +2551,7 @@ Update an existing, active `User`.
 
 *  **`last_name`** value may be nil, or if non-nil, value must be a non-blank string.
 
-*  **`is_superuser`** 
+*  **`is_superuser`**
 
 *  **`login_attributes`** value may be nil, or if non-nil, value must be a map with each value either a string or number.
 
@@ -2562,11 +2562,11 @@ Update a user's password.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 *  **`password`** Insufficient password strength
 
-*  **`old_password`** 
+*  **`old_password`**
 
 
 ## `PUT /api/user/:id/qbnewb`
@@ -2575,7 +2575,7 @@ Indicate that a user has been informed about the vast intricacies of 'the' Query
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `PUT /api/user/:id/reactivate`
@@ -2586,7 +2586,7 @@ You must be a superuser to do this.
 
 ##### PARAMS:
 
-*  **`id`** 
+*  **`id`**
 
 
 ## `GET /api/util/logs`
@@ -2616,4 +2616,4 @@ Endpoint that checks if the supplied password meets the currently configured pas
 
 ##### PARAMS:
 
-*  **`password`** Insufficient password strength
\ No newline at end of file
+*  **`password`** Insufficient password strength
diff --git a/docs/operations-guide/running-metabase-on-elastic-beanstalk.md b/docs/operations-guide/running-metabase-on-elastic-beanstalk.md
index 6e65b2b924418f186b6905b3c972ae9ca2f3cc5e..12bbe4be28c68bbe72406430611e61e532c5b54a 100644
--- a/docs/operations-guide/running-metabase-on-elastic-beanstalk.md
+++ b/docs/operations-guide/running-metabase-on-elastic-beanstalk.md
@@ -68,7 +68,7 @@ For the base configuration settings we want to make the following selections:
 
 This will run our Metabase application using  [Docker](https://www.docker.com) under the hood.
 
-If you reached this screen from the Metabase start page at [www.metabase.com/start/aws.html](www.metabase.com/start/aws.html) the application code settings will have already been set and you don't need to do anything. This will use the official Metabase Docker image which is [published on Dockerhub](https://hub.docker.com/r/metabase/metabase/).
+If you reached this screen from the Metabase start page at [www.metabase.com/start/aws.html](https://www.metabase.com/start/aws.html) the application code settings will have already been set and you don't need to do anything. This will use the official Metabase Docker image which is [published on Dockerhub](https://hub.docker.com/r/metabase/metabase/).
 
 When your environment type settings look like the above then go ahead and click `Review and launch`.
 
diff --git a/docs/users-guide/13-sql-parameters.md b/docs/users-guide/13-sql-parameters.md
index d1b90c6ea95ad74aee69ebd6c083e9203468b544..7441cda33247347db19fb113d52ad5cd30c107a7 100644
--- a/docs/users-guide/13-sql-parameters.md
+++ b/docs/users-guide/13-sql-parameters.md
@@ -64,6 +64,16 @@ Filter widgets **can't** be displayed if the variable is mapped to a field marke
 ##### Setting a default value
 If you input a default value for your field filter, this value will be selected in the filter whenever you come back to this question. If you clear out the filter, though, no value will be passed (i.e., not even the default value). The default value has no effect on the behavior of your SQL question when viewed in a dashboard.
 
+###### Default value in the query
+You can also define default value directly in your query, useful for complex default value.
+
+Current date example:
+```
+SELECT p.*
+FROM products p
+WHERE p.createdAt = [[ {{dateOfCreation}} #]]CURRENT_DATE()
+```
+
 ##### Connecting a SQL question to a dashboard filter
 In order for a saved SQL question to be usable with a dashboard filter, it must contain at least one field filter. The kind of dashboard filter that can be used with the SQL question depends on the field that you map to the question's field filter(s). For example, if you have a field filter called `{% raw %}{{var}}{% endraw %}` and you map it to a State field, you can map a Location dashboard filter to your SQL question. In this example, you'd create a new dashboard or go to an existing one, click the Edit button, and the SQL question that contains your State field filter, add a new dashboard filter or edit an existing Location filter, then click the dropdown on the SQL question card to see the State field filter. [Learn more about dashboard filters here](08-dashboard-filters.md).
 
diff --git a/frontend/src/metabase/components/ColumnarSelector.css b/frontend/src/metabase/components/ColumnarSelector.css
index fb4a782fc28a4bf599c0ad00997fa216463ed3f9..e11a973321920d778cf46d49305ae013e767765c 100644
--- a/frontend/src/metabase/components/ColumnarSelector.css
+++ b/frontend/src/metabase/components/ColumnarSelector.css
@@ -55,10 +55,8 @@
 }
 
 .ColumnarSelector-row--selected {
-  color: inherit !important;
+  color: var(--color-brand);
   background: white;
-  border-top: var(--border-size) var(--border-style) var(--color-border);
-  border-bottom: var(--border-size) var(--border-style) var(--color-border);
 }
 
 .ColumnarSelector-row--disabled {
diff --git a/frontend/src/metabase/components/DatabaseDetailsForm.jsx b/frontend/src/metabase/components/DatabaseDetailsForm.jsx
index 00a25753b4f54338a8eadac46a2438d30df7b2c2..2ca3b246daf696b48c1a7831cf578b9ec21e20d6 100644
--- a/frontend/src/metabase/components/DatabaseDetailsForm.jsx
+++ b/frontend/src/metabase/components/DatabaseDetailsForm.jsx
@@ -287,7 +287,7 @@ export default class DatabaseDetailsForm extends Component {
         <div className="flex align-center Form-offset">
           <div className="Grid-cell--top">
             {jt`${(
-              <a href={credentialsURL} target="_blank">
+              <a className="link" href={credentialsURL} target="_blank">
                 {t`Click here`}
               </a>
             )} to generate a Client ID and Client Secret for your project.`}
@@ -314,7 +314,7 @@ export default class DatabaseDetailsForm extends Component {
           <div className="flex align-center Form-offset">
             <div className="Grid-cell--top">
               {jt`${(
-                <a href={authURL} target="_blank">
+                <a className="link" href={authURL} target="_blank">
                   {t`Click here`}
                 </a>
               )} to get an auth code`}
diff --git a/frontend/src/metabase/components/Popover.jsx b/frontend/src/metabase/components/Popover.jsx
index 5a9ad333d2c53fc34e654ab66322f61399feaed1..37e1e857830d733ed230a2b6bbd33aaf75094578 100644
--- a/frontend/src/metabase/components/Popover.jsx
+++ b/frontend/src/metabase/components/Popover.jsx
@@ -53,6 +53,8 @@ export default class Popover extends Component {
     // by default we align the popover to the center of the target. This
     // causes the edges to be aligned
     alignHorizontalEdge: PropTypes.bool,
+    // don't wrap the popover in an OnClickOutsideWrapper
+    noOnClickOutsideWrapper: PropTypes.bool,
   };
 
   static defaultProps = {
@@ -67,6 +69,7 @@ export default class Popover extends Component {
     targetOffsetY: 5,
     sizeToFit: false,
     autoWidth: false,
+    noOnClickOutsideWrapper: false,
   };
 
   _getPopoverElement() {
@@ -119,40 +122,47 @@ export default class Popover extends Component {
     const childProps = {
       maxHeight: this._getMaxHeight(),
     };
-    return (
-      <OnClickOutsideWrapper
-        handleDismissal={this.handleDismissal}
-        dismissOnEscape={this.props.dismissOnEscape}
-        dismissOnClickOutside={this.props.dismissOnClickOutside}
+    const content = (
+      <div
+        id={this.props.id}
+        className={cx(
+          "PopoverBody",
+          {
+            "PopoverBody--withBackground": this.props.hasBackground,
+            "PopoverBody--withArrow":
+              this.props.hasArrow && this.props.hasBackground,
+            "PopoverBody--autoWidth": this.props.autoWidth,
+          },
+          // TODO kdoh 10/16/2017 we should eventually remove this
+          this.props.className,
+        )}
+        style={this.props.style}
       >
-        <div
-          id={this.props.id}
-          className={cx(
-            "PopoverBody",
-            {
-              "PopoverBody--withBackground": this.props.hasBackground,
-              "PopoverBody--withArrow":
-                this.props.hasArrow && this.props.hasBackground,
-              "PopoverBody--autoWidth": this.props.autoWidth,
-            },
-            // TODO kdoh 10/16/2017 we should eventually remove this
-            this.props.className,
-          )}
-          style={this.props.style}
-        >
-          {typeof this.props.children === "function"
-            ? this.props.children(childProps)
-            : React.Children.count(this.props.children) === 1 &&
-              // NOTE: workaround for https://github.com/facebook/react/issues/12136
-              !Array.isArray(this.props.children)
-              ? React.cloneElement(
-                  React.Children.only(this.props.children),
-                  childProps,
-                )
-              : this.props.children}
-        </div>
-      </OnClickOutsideWrapper>
+        {typeof this.props.children === "function"
+          ? this.props.children(childProps)
+          : React.Children.count(this.props.children) === 1 &&
+            // NOTE: workaround for https://github.com/facebook/react/issues/12136
+            !Array.isArray(this.props.children)
+            ? React.cloneElement(
+                React.Children.only(this.props.children),
+                childProps,
+              )
+            : this.props.children}
+      </div>
     );
+    if (this.props.noOnClickOutsideWrapper) {
+      return content;
+    } else {
+      return (
+        <OnClickOutsideWrapper
+          handleDismissal={this.handleDismissal}
+          dismissOnEscape={this.props.dismissOnEscape}
+          dismissOnClickOutside={this.props.dismissOnClickOutside}
+        >
+          {content}
+        </OnClickOutsideWrapper>
+      );
+    }
   }
 
   _setTetherOptions(tetherOptions, o) {
diff --git a/frontend/src/metabase/components/Select.jsx b/frontend/src/metabase/components/Select.jsx
index 7dacbfd38d19ce7a73896807728ab9f63295e296..f3939346193c6ef609a4194dd311ad81c0f68f71 100644
--- a/frontend/src/metabase/components/Select.jsx
+++ b/frontend/src/metabase/components/Select.jsx
@@ -57,7 +57,7 @@ class BrowserSelect extends Component {
   };
   static defaultProps = {
     className: "",
-    width: 320,
+    width: 300,
     height: 320,
     rowHeight: 40,
     multiple: false,
diff --git a/frontend/src/metabase/components/TooltipPopover.jsx b/frontend/src/metabase/components/TooltipPopover.jsx
index af6dd1ae3d5416bca53a6bb8d7b0115ede0749ba..d4da1c42cbc41b79a91b97f36d44050e045669ae 100644
--- a/frontend/src/metabase/components/TooltipPopover.jsx
+++ b/frontend/src/metabase/components/TooltipPopover.jsx
@@ -32,7 +32,14 @@ const TooltipPopover = ({ children, maxWidth, ...props }) => {
   }
 
   return (
-    <Popover className="PopoverBody--tooltip" targetOffsetY={10} {...props}>
+    <Popover
+      className="PopoverBody--tooltip"
+      targetOffsetY={10}
+      // OnClickOutsideWrapper is unecessary and causes existing popovers not to
+      // be dismissed if a tooltip is visisble, so pass noOnClickOutsideWrapper
+      noOnClickOutsideWrapper
+      {...props}
+    >
       {popoverContent}
     </Popover>
   );
diff --git a/frontend/src/metabase/dashboard/components/DashboardActions.jsx b/frontend/src/metabase/dashboard/components/DashboardActions.jsx
index f66bda30526b91ff61875505b15df5aac3a1c55f..205bb140d5877570d23d6ee4df179a499e6c5d88 100644
--- a/frontend/src/metabase/dashboard/components/DashboardActions.jsx
+++ b/frontend/src/metabase/dashboard/components/DashboardActions.jsx
@@ -21,9 +21,9 @@ export const getDashboardActions = ({
   if (!isEditing && !isEmpty) {
     buttons.push(
       <RefreshWidget
+        key="refresh"
         data-metabase-event="Dashboard;Refresh Menu Open"
         className="text-brand-hover"
-        key="refresh"
         period={refreshPeriod}
         elapsed={refreshElapsed}
         onChangePeriod={onRefreshPeriodChange}
@@ -33,11 +33,13 @@ export const getDashboardActions = ({
 
   if (!isEditing && isFullscreen) {
     buttons.push(
-      <Tooltip tooltip={isNightMode ? t`Daytime mode` : t`Nighttime mode`}>
+      <Tooltip
+        key="night"
+        tooltip={isNightMode ? t`Daytime mode` : t`Nighttime mode`}
+      >
         <span data-metabase-event={"Dashboard;Night Mode;" + !isNightMode}>
           <NightModeIcon
             className="text-brand-hover cursor-pointer"
-            key="night"
             isNightMode={isNightMode}
             onClick={() => onNightModeChange(!isNightMode)}
           />
@@ -50,6 +52,7 @@ export const getDashboardActions = ({
     // option click to enter fullscreen without making the browser go fullscreen
     buttons.push(
       <Tooltip
+        key="fullscreen"
         tooltip={isFullscreen ? t`Exit fullscreen` : t`Enter fullscreen`}
       >
         <span
@@ -57,7 +60,6 @@ export const getDashboardActions = ({
         >
           <FullscreenIcon
             className="text-brand-hover cursor-pointer"
-            key="fullscreen"
             isFullscreen={isFullscreen}
             onClick={e => onFullscreenChange(!isFullscreen, !e.altKey)}
           />
diff --git a/frontend/src/metabase/dashboard/components/DashboardHeader.jsx b/frontend/src/metabase/dashboard/components/DashboardHeader.jsx
index e5506f77f20deb4b299020731537135284bd1595..3a3380e9e1bd8e1a4181fb9259e193d86e4264b1 100644
--- a/frontend/src/metabase/dashboard/components/DashboardHeader.jsx
+++ b/frontend/src/metabase/dashboard/components/DashboardHeader.jsx
@@ -210,7 +210,7 @@ export default class DashboardHeader extends Component {
     if (!isFullscreen && canEdit) {
       buttons.push(
         <ModalWithTrigger
-          key="add"
+          key="add-a-question"
           ref="addQuestionModal"
           triggerElement={
             <Tooltip tooltip={t`Add a question`}>
@@ -244,7 +244,7 @@ export default class DashboardHeader extends Component {
     if (isEditing) {
       // Parameters
       buttons.push(
-        <span>
+        <span key="add-a-filter">
           <Tooltip tooltip={t`Add a filter`}>
             <a
               key="parameters"
@@ -272,7 +272,7 @@ export default class DashboardHeader extends Component {
 
       // Add text card button
       buttons.push(
-        <Tooltip tooltip={t`Add a text box`}>
+        <Tooltip key="add-a-text-box" tooltip={t`Add a text box`}>
           <a
             data-metabase-event="Dashboard;Add Text Box"
             key="add-text"
@@ -286,7 +286,7 @@ export default class DashboardHeader extends Component {
       );
 
       buttons.push(
-        <Tooltip tooltip={t`Revision history`}>
+        <Tooltip key="revision-history" tooltip={t`Revision history`}>
           <Link
             to={location.pathname + "/history"}
             data-metabase-event={"Dashboard;Revisions"}
@@ -299,7 +299,7 @@ export default class DashboardHeader extends Component {
 
     if (!isFullscreen) {
       buttons.push(
-        <Tooltip tooltip={t`Move dashboard`}>
+        <Tooltip key="new-dashboard" tooltip={t`Move dashboard`}>
           <Link
             to={location.pathname + "/move"}
             data-metabase-event={"Dashboard;Move"}
@@ -312,7 +312,7 @@ export default class DashboardHeader extends Component {
 
     if (!isFullscreen && !isEditing && canEdit) {
       buttons.push(
-        <Tooltip tooltip={t`Edit dashboard`}>
+        <Tooltip key="edit-dashboard" tooltip={t`Edit dashboard`}>
           <a
             data-metabase-event="Dashboard;Edit"
             key="edit"
@@ -331,7 +331,9 @@ export default class DashboardHeader extends Component {
       ((isPublicLinksEnabled && (isAdmin || dashboard.public_uuid)) ||
         (isEmbeddingEnabled && isAdmin))
     ) {
-      buttons.push(<DashboardEmbedWidget dashboard={dashboard} />);
+      buttons.push(
+        <DashboardEmbedWidget key="dashboard-embed" dashboard={dashboard} />,
+      );
     }
 
     buttons.push(...getDashboardActions(this.props));
diff --git a/frontend/src/metabase/query_builder/components/QueryVisualization.jsx b/frontend/src/metabase/query_builder/components/QueryVisualization.jsx
index 0020002e53aa148da969aa6c1d55bbcf4d6a97b5..2a20f7169e98003ce0ce62b6d7f4bcec22a61526 100644
--- a/frontend/src/metabase/query_builder/components/QueryVisualization.jsx
+++ b/frontend/src/metabase/query_builder/components/QueryVisualization.jsx
@@ -250,7 +250,7 @@ export default class QueryVisualization extends Component {
           <VisualizationResult
             lastRunDatasetQuery={this.state.lastRunDatasetQuery}
             onUpdateWarnings={warnings => this.setState({ warnings })}
-            onOpenChartSettings={() => this.refs.settings.open()}
+            onOpenChartSettings={initial => this.refs.settings.open(initial)}
             {...this.props}
             className="spread"
           />
diff --git a/frontend/src/metabase/query_builder/components/VisualizationSettings.jsx b/frontend/src/metabase/query_builder/components/VisualizationSettings.jsx
index 0b87abba291e1435eb6e55960ba974c5e48fde8f..412f2270643d357baf6dfd19d8604bef94dfa994 100644
--- a/frontend/src/metabase/query_builder/components/VisualizationSettings.jsx
+++ b/frontend/src/metabase/query_builder/components/VisualizationSettings.jsx
@@ -90,8 +90,8 @@ export default class VisualizationSettings extends React.Component {
     );
   }
 
-  open = () => {
-    this.props.showChartSettings({});
+  open = initial => {
+    this.props.showChartSettings(initial || {});
   };
 
   close = () => {
@@ -123,7 +123,7 @@ export default class VisualizationSettings extends React.Component {
               ]}
               onChange={this.props.onReplaceAllVisualizationSettings}
               onClose={this.close}
-              initialWidget={chartSettings && chartSettings.widget}
+              initial={chartSettings}
             />
           </Modal>
         </div>
diff --git a/frontend/src/metabase/query_builder/components/template_tags/TagEditorParam.jsx b/frontend/src/metabase/query_builder/components/template_tags/TagEditorParam.jsx
index 7dc3ab8f172c64a8a415675a51197825cf23a927..4cee77c3b6bce09ea6e630545d532ae157047fe4 100644
--- a/frontend/src/metabase/query_builder/components/template_tags/TagEditorParam.jsx
+++ b/frontend/src/metabase/query_builder/components/template_tags/TagEditorParam.jsx
@@ -197,15 +197,13 @@ export default class TagEditorParam extends Component {
             </div>
           )}
 
-        {tag.type !== "dimension" && (
-          <div className="flex align-center pb1">
-            <h5 className="text-normal mr1">{t`Required?`}</h5>
-            <Toggle
-              value={tag.required}
-              onChange={value => this.setRequired(value)}
-            />
-          </div>
-        )}
+        <div className="flex align-center pb1">
+          <h5 className="text-normal mr1">{t`Required?`}</h5>
+          <Toggle
+            value={tag.required}
+            onChange={value => this.setRequired(value)}
+          />
+        </div>
 
         {((tag.type !== "dimension" && tag.required) ||
           (tag.type === "dimension" || tag["widget-type"])) && (
diff --git a/frontend/src/metabase/visualizations/components/ChartSettings.jsx b/frontend/src/metabase/visualizations/components/ChartSettings.jsx
index 6c64ddaa2877641830342cf1849a1c0b28dce05a..821b4d7f321bd67182862a7315c4398cc1a3e6fd 100644
--- a/frontend/src/metabase/visualizations/components/ChartSettings.jsx
+++ b/frontend/src/metabase/visualizations/components/ChartSettings.jsx
@@ -19,14 +19,15 @@ import {
 } from "metabase/visualizations";
 import { updateSettings } from "metabase/visualizations/lib/settings";
 
-const DEFAULT_TAB_PRIORITY = ["Display"];
+// section names are localized
+const DEFAULT_TAB_PRIORITY = [t`Display`];
 
 class ChartSettings extends Component {
   constructor(props) {
     super(props);
     this.state = {
-      currentTab: null,
-      showWidget: props.initialWidget,
+      currentSection: (props.initial && props.initial.section) || null,
+      currentWidget: (props.initial && props.initial.widget) || null,
       ...this._getState(
         props.series,
         props.series[0].card.visualization_settings,
@@ -56,8 +57,18 @@ class ChartSettings extends Component {
     };
   }
 
-  handleSelectTab = tab => {
-    this.setState({ currentTab: tab, showWidget: null });
+  handleShowSection = section => {
+    this.setState({ currentSection: section, currentWidget: null });
+  };
+
+  // allows a widget to temporarily replace itself with a different widget
+  handleShowWidget = widget => {
+    this.setState({ currentWidget: widget });
+  };
+
+  // go back to previously selected section
+  handleEndShowWidget = () => {
+    this.setState({ currentWidget: null });
   };
 
   handleResetSettings = () => {
@@ -79,21 +90,13 @@ class ChartSettings extends Component {
     this.props.onClose();
   };
 
-  // allows a widget to temporarily replace itself with a different widget
-  handleShowWidget = widget => {
-    this.setState({ showWidget: widget });
-  };
-  handleEndShowWidget = () => {
-    this.setState({ showWidget: null });
-  };
-
   render() {
     const { isDashboard, question, addField } = this.props;
-    const { rawSeries, transformedSeries, showWidget } = this.state;
+    const { rawSeries, transformedSeries, currentWidget } = this.state;
 
     const widgetsById = {};
 
-    const tabs = {};
+    const sections = {};
     for (const widget of getSettingsWidgetsForSeries(
       transformedSeries,
       this.handleChangeSettings,
@@ -101,38 +104,38 @@ class ChartSettings extends Component {
     )) {
       widgetsById[widget.id] = widget;
       if (widget.widget && !widget.hidden) {
-        tabs[widget.section] = tabs[widget.section] || [];
-        tabs[widget.section].push(widget);
+        sections[widget.section] = sections[widget.section] || [];
+        sections[widget.section].push(widget);
       }
     }
 
     // Move settings from the "undefined" section in the first tab
-    if (tabs["undefined"] && Object.values(tabs).length > 1) {
-      let extra = tabs["undefined"];
-      delete tabs["undefined"];
-      Object.values(tabs)[0].unshift(...extra);
+    if (sections["undefined"] && Object.values(sections).length > 1) {
+      let extra = sections["undefined"];
+      delete sections["undefined"];
+      Object.values(sections)[0].unshift(...extra);
     }
 
-    const tabNames = Object.keys(tabs);
-    const currentTab =
-      this.state.currentTab ||
-      _.find(DEFAULT_TAB_PRIORITY, name => name in tabs) ||
-      tabNames[0];
+    const sectionNames = Object.keys(sections);
+    const currentSection =
+      this.state.currentSection ||
+      _.find(DEFAULT_TAB_PRIORITY, name => name in sections) ||
+      sectionNames[0];
 
     let widgets;
-    let widget = showWidget && widgetsById[showWidget.id];
+    let widget = currentWidget && widgetsById[currentWidget.id];
     if (widget) {
       widget = {
         ...widget,
         hidden: false,
         props: {
           ...(widget.props || {}),
-          ...(showWidget.props || {}),
+          ...(currentWidget.props || {}),
         },
       };
       widgets = [widget];
     } else {
-      widgets = tabs[currentTab];
+      widgets = sections[currentSection];
     }
 
     const extraWidgetProps = {
@@ -145,12 +148,12 @@ class ChartSettings extends Component {
 
     return (
       <div className="flex flex-column spread">
-        {tabNames.length > 1 && (
+        {sectionNames.length > 1 && (
           <div className="border-bottom flex flex-no-shrink pl4">
             <Radio
-              value={currentTab}
-              onChange={this.handleSelectTab}
-              options={tabNames}
+              value={currentSection}
+              onChange={this.handleShowSection}
+              options={sectionNames}
               optionNameFn={v => v}
               optionValueFn={v => v}
               underlined
diff --git a/frontend/src/metabase/visualizations/components/LineAreaBarChart.jsx b/frontend/src/metabase/visualizations/components/LineAreaBarChart.jsx
index 4ae705928611c8917465b12e49f1901f935e3deb..dfa529e0707e87cf85e549a28709681beab67b81 100644
--- a/frontend/src/metabase/visualizations/components/LineAreaBarChart.jsx
+++ b/frontend/src/metabase/visualizations/components/LineAreaBarChart.jsx
@@ -99,7 +99,7 @@ export default class LineAreaBarChart extends Component {
     if (dimensions.length < 1 || metrics.length < 1) {
       throw new ChartSettingsError(
         t`Which fields do you want to use for the X and Y axes?`,
-        t`Data`,
+        { section: t`Data` },
         t`Choose fields`,
       );
     }
diff --git a/frontend/src/metabase/visualizations/components/Visualization.jsx b/frontend/src/metabase/visualizations/components/Visualization.jsx
index 9002d937b0dcfacbe0dae2022ae1fb7097dc8260..0a156a60f301ba240496ced51bf36d25cd5fe039 100644
--- a/frontend/src/metabase/visualizations/components/Visualization.jsx
+++ b/frontend/src/metabase/visualizations/components/Visualization.jsx
@@ -85,7 +85,7 @@ type Props = {
 
   // misc
   onUpdateWarnings: (string[]) => void,
-  onOpenChartSettings: () => void,
+  onOpenChartSettings: ({ section?: ?string, widget?: ?any }) => void,
 
   // number of grid cells wide and tall
   gridSize?: { width: number, height: number },
@@ -367,7 +367,7 @@ export default class Visualization extends Component {
                 <div className="mt2">
                   <button
                     className="Button Button--primary Button--medium"
-                    onClick={this.props.onOpenChartSettings}
+                    onClick={() => this.props.onOpenChartSettings(e.initial)}
                   >
                     {e.buttonText}
                   </button>
diff --git a/frontend/src/metabase/visualizations/lib/errors.js b/frontend/src/metabase/visualizations/lib/errors.js
index d826d117c3382d21bb7bb778e1ff6a89a334d32c..2d69ae85f2c454928c8fb8634ad2ab2b8b8cee4e 100644
--- a/frontend/src/metabase/visualizations/lib/errors.js
+++ b/frontend/src/metabase/visualizations/lib/errors.js
@@ -3,6 +3,8 @@
 import { t, ngettext, msgid } from "c-3po";
 // NOTE: extending Error with Babel requires babel-plugin-transform-builtin-extend
 
+type ChartSettingsInitial = { section?: ?string, widget?: ?any };
+
 export class MinColumnsError extends Error {
   constructor(minColumns: number, actualColumns: number) {
     super(
@@ -42,11 +44,16 @@ export class NoBreakoutError extends Error {
 }
 
 export class ChartSettingsError extends Error {
-  section: ?string;
+  initial: ?ChartSettingsInitial;
   buttonText: ?string;
-  constructor(message: string, section?: string, buttonText?: string) {
+
+  constructor(
+    message: string,
+    initial?: ChartSettingsInitial,
+    buttonText?: string,
+  ) {
     super(message || t`Please configure this chart in the chart settings`);
-    this.section = section;
+    this.initial = initial;
     this.buttonText = buttonText || t`Edit Settings`;
   }
 }
diff --git a/frontend/src/metabase/visualizations/lib/settings/column.js b/frontend/src/metabase/visualizations/lib/settings/column.js
index 6e9ece18390757a735ce5a86e16c3bb72a76a1d1..b264604312fa1e07cc808119dbe9370c28c15991 100644
--- a/frontend/src/metabase/visualizations/lib/settings/column.js
+++ b/frontend/src/metabase/visualizations/lib/settings/column.js
@@ -414,17 +414,6 @@ export const NUMBER_COLUMN_SETTINGS = {
       "currency_header_only",
     ],
   },
-  _column_title_full: {
-    getValue: (column: Column, settings: ColumnSettings) => {
-      let columnTitle = settings["column_title"] || formatColumn(column);
-      const headerUnit = settings["_header_unit"];
-      if (headerUnit) {
-        columnTitle += ` (${headerUnit})`;
-      }
-      return columnTitle;
-    },
-    readDependencies: ["column_title", "_header_unit"],
-  },
 };
 
 const COMMON_COLUMN_SETTINGS = {
@@ -438,6 +427,17 @@ const COMMON_COLUMN_SETTINGS = {
   column: {
     getValue: column => column,
   },
+  _column_title_full: {
+    getValue: (column: Column, settings: ColumnSettings) => {
+      let columnTitle = settings["column_title"] || formatColumn(column);
+      const headerUnit = settings["_header_unit"];
+      if (headerUnit) {
+        columnTitle += ` (${headerUnit})`;
+      }
+      return columnTitle;
+    },
+    readDependencies: ["column_title", "_header_unit"],
+  },
 };
 
 export function getSettingDefintionsForColumn(series: Series, column: Column) {
diff --git a/frontend/src/metabase/visualizations/visualizations/Funnel.jsx b/frontend/src/metabase/visualizations/visualizations/Funnel.jsx
index e7ae26b78d45584feb4ae657e9c3508931847eb0..fd3cf68357a999cd2057da2dcf46a9e55d129902 100644
--- a/frontend/src/metabase/visualizations/visualizations/Funnel.jsx
+++ b/frontend/src/metabase/visualizations/visualizations/Funnel.jsx
@@ -56,7 +56,7 @@ export default class Funnel extends Component {
     if (!settings["funnel.dimension"] || !settings["funnel.metric"]) {
       throw new ChartSettingsError(
         t`Which fields do you want to use?`,
-        t`Data`,
+        { section: t`Data` },
         t`Choose fields`,
       );
     }
diff --git a/frontend/src/metabase/visualizations/visualizations/Map.jsx b/frontend/src/metabase/visualizations/visualizations/Map.jsx
index 11860a58b8abca680a6dc62711b89545235a27c1..039468968fd073c2878a87796f298d0c9ec41b4f 100644
--- a/frontend/src/metabase/visualizations/visualizations/Map.jsx
+++ b/frontend/src/metabase/visualizations/visualizations/Map.jsx
@@ -223,17 +223,19 @@ export default class Map extends Component {
       ) {
         throw new ChartSettingsError(
           t`Please select longitude and latitude columns in the chart settings.`,
-          "Data",
+          { section: t`Data` },
         );
       }
     } else if (settings["map.type"] === "region") {
       if (!settings["map.region"]) {
-        throw new ChartSettingsError(t`Please select a region map.`, "Data");
+        throw new ChartSettingsError(t`Please select a region map.`, {
+          section: t`Data`,
+        });
       }
       if (!settings["map.dimension"] || !settings["map.metric"]) {
         throw new ChartSettingsError(
           t`Please select region and metric columns in the chart settings.`,
-          "Data",
+          { section: t`Data` },
         );
       }
     }
diff --git a/frontend/src/metabase/visualizations/visualizations/PieChart.jsx b/frontend/src/metabase/visualizations/visualizations/PieChart.jsx
index d9ad29d7978c0676c606fbc17ad897795c9ff608..b6d64b727eda02c807d3ca01d9e31081a272a30d 100644
--- a/frontend/src/metabase/visualizations/visualizations/PieChart.jsx
+++ b/frontend/src/metabase/visualizations/visualizations/PieChart.jsx
@@ -50,10 +50,9 @@ export default class PieChart extends Component {
 
   static checkRenderable([{ data: { cols, rows } }], settings) {
     if (!settings["pie.dimension"] || !settings["pie.metric"]) {
-      throw new ChartSettingsError(
-        t`Which columns do you want to use?`,
-        t`Data`,
-      );
+      throw new ChartSettingsError(t`Which columns do you want to use?`, {
+        section: `Data`,
+      });
     }
   }
 
@@ -91,10 +90,13 @@ export default class PieChart extends Component {
       title: t`Colors`,
       widget: "colors",
       getDefault: (series, settings) =>
-        getColorsForValues(settings["pie._dimensionValues"]),
+        settings["pie._dimensionValues"]
+          ? getColorsForValues(settings["pie._dimensionValues"])
+          : [],
       getProps: (series, settings) => ({
-        seriesTitles: settings["pie._dimensionValues"],
+        seriesTitles: settings["pie._dimensionValues"] || [],
       }),
+      getDisabled: (series, settings) => !settings["pie._dimensionValues"],
       readDependencies: ["pie._dimensionValues"],
     },
     // this setting recomputes color assignment using pie.colors as the existing
@@ -122,7 +124,10 @@ export default class PieChart extends Component {
     "pie._dimensionValues": {
       getValue: ([{ data: { rows } }], settings) => {
         const dimensionIndex = settings["pie._dimensionIndex"];
-        return rows.map(row => row[dimensionIndex]);
+        return dimensionIndex >= 0
+          ? // cast to string because getColorsForValues expects strings
+            rows.map(row => String(row[dimensionIndex]))
+          : null;
       },
       readDependencies: ["pie._dimensionIndex"],
     },
diff --git a/frontend/test/services/__snapshots__/MetabaseApi.integ.spec.js.snap b/frontend/test/services/__snapshots__/MetabaseApi.integ.spec.js.snap
index ab4f1810c12a99fefc47f815d3b2bab2a85984be..50962449edd1c91a6181b325a3e0620123973ddc 100644
--- a/frontend/test/services/__snapshots__/MetabaseApi.integ.spec.js.snap
+++ b/frontend/test/services/__snapshots__/MetabaseApi.integ.spec.js.snap
@@ -425,12 +425,13 @@ Object {
       "display_name": "Created At",
       "fingerprint": Object {
         "global": Object {
-          "distinct-count": 1387,
+          "distinct-count": 10001,
+          "nil%": 0,
         },
         "type": Object {
           "type/DateTime": Object {
-            "earliest": "2016-04-30T07:00:00.000Z",
-            "latest": "2020-04-19T07:00:00.000Z",
+            "earliest": "2016-05-01T01:56:13.352Z",
+            "latest": "2020-04-19T21:07:15.657Z",
           },
         },
       },
@@ -1575,6 +1576,7 @@ Object {
       "fingerprint": Object {
         "global": Object {
           "distinct-count": 2308,
+          "nil%": 0,
         },
         "type": Object {
           "type/DateTime": Object {
@@ -1796,12 +1798,13 @@ Object {
       "display_name": "Created At",
       "fingerprint": Object {
         "global": Object {
-          "distinct-count": 992,
+          "distinct-count": 2500,
+          "nil%": 0,
         },
         "type": Object {
           "type/DateTime": Object {
-            "earliest": "2016-04-19T07:00:00.000Z",
-            "latest": "2019-04-19T07:00:00.000Z",
+            "earliest": "2016-04-20T04:35:18.752Z",
+            "latest": "2019-04-19T21:06:27.300Z",
           },
         },
       },
@@ -2783,12 +2786,13 @@ Object {
       "display_name": "Created At",
       "fingerprint": Object {
         "global": Object {
-          "distinct-count": 186,
+          "distinct-count": 200,
+          "nil%": 0,
         },
         "type": Object {
           "type/DateTime": Object {
-            "earliest": "2016-04-26T07:00:00.000Z",
-            "latest": "2019-04-15T07:00:00.000Z",
+            "earliest": "2016-04-27T02:29:55.147Z",
+            "latest": "2019-04-15T20:34:19.931Z",
           },
         },
       },
@@ -3630,12 +3634,13 @@ Object {
       "display_name": "Created At",
       "fingerprint": Object {
         "global": Object {
-          "distinct-count": 720,
+          "distinct-count": 1112,
+          "nil%": 0,
         },
         "type": Object {
           "type/DateTime": Object {
-            "earliest": "2016-06-03T07:00:00.000Z",
-            "latest": "2020-04-19T07:00:00.000Z",
+            "earliest": "2016-06-03T07:37:05.818Z",
+            "latest": "2020-04-19T21:15:25.677Z",
           },
         },
       },
diff --git a/frontend/test/visualizations/components/LineAreaBarChart.unit.spec.js b/frontend/test/visualizations/components/LineAreaBarChart.unit.spec.js
index d0a763602a58eab5886090adf69ee5c98c329c1c..0e12654d61a4ee93e77c50099374a53c3cfa723f 100644
--- a/frontend/test/visualizations/components/LineAreaBarChart.unit.spec.js
+++ b/frontend/test/visualizations/components/LineAreaBarChart.unit.spec.js
@@ -89,7 +89,6 @@ const millisecondCard = {
         name: "timestamp",
         source: "breakout",
         remapped_from: null,
-        extra_info: {},
         fk_field_id: null,
         remapped_to: null,
         id: 8159,
@@ -105,7 +104,6 @@ const millisecondCard = {
         name: "count",
         source: "aggregation",
         remapped_from: null,
-        extra_info: {},
         remapped_to: null,
         id: null,
         target: null,
@@ -267,7 +265,6 @@ const dateTimeCard = {
         name: "CREATED_AT",
         source: "breakout",
         remapped_from: null,
-        extra_info: {},
         fk_field_id: null,
         remapped_to: null,
         id: 1,
@@ -283,7 +280,6 @@ const dateTimeCard = {
         name: "sum",
         source: "aggregation",
         remapped_from: null,
-        extra_info: {},
         remapped_to: null,
         id: null,
         target: null,
@@ -395,7 +391,6 @@ const numberCard = {
         name: "RATING",
         source: "breakout",
         remapped_from: null,
-        extra_info: {},
         fk_field_id: null,
         remapped_to: null,
         id: 33,
@@ -411,7 +406,6 @@ const numberCard = {
         name: "count",
         source: "aggregation",
         remapped_from: null,
-        extra_info: {},
         remapped_to: null,
         id: null,
         target: null,
diff --git a/project.clj b/project.clj
index 454c25021e4123996b01d5778314403736ff66a1..ccb9a195b8b847b3c42f1968e034ead3942eb1e2 100644
--- a/project.clj
+++ b/project.clj
@@ -10,7 +10,8 @@
             "test" ["with-profile" "+expectations" "expectations"]
             "generate-sample-dataset" ["with-profile" "+generate-sample-dataset" "run"]
             "profile" ["with-profile" "+profile" "run" "profile"]
-            "h2" ["with-profile" "+h2-shell" "run" "-url" "jdbc:h2:./metabase.db" "-user" "" "-password" "" "-driver" "org.h2.Driver"]}
+            "h2" ["with-profile" "+h2-shell" "run" "-url" "jdbc:h2:./metabase.db" "-user" "" "-password" "" "-driver" "org.h2.Driver"]
+            "generate-automagic-dashboards-pot" ["with-profile" "+generate-automagic-dashboards-pot" "run"]}
   :dependencies [[org.clojure/clojure "1.9.0"]
                  [org.clojure/core.async "0.3.442"]
                  [org.clojure/core.match "0.3.0-alpha4"]              ; optimized pattern matching library for Clojure
@@ -28,6 +29,7 @@
                  [amalloy/ring-gzip-middleware "0.1.3"]               ; Ring middleware to GZIP responses if client can handle it
                  [aleph "0.4.5-alpha2"                                ; Async HTTP library; WebSockets
                   :exclusions [org.clojure/tools.logging]]
+                 [bigml/histogram "4.1.3"]                            ; Histogram data structure
                  [buddy/buddy-core "1.2.0"]                           ; various cryptograhpic functions
                  [buddy/buddy-sign "1.5.0"]                           ; JSON Web Tokens; High-Level message signing library
                  [cheshire "5.7.0"]                                   ; fast JSON encoding (used by Ring JSON middleware)
@@ -80,11 +82,12 @@
                                com.sun.jmx/jmxri]]
                  [medley "0.8.4"]                                     ; lightweight lib of useful functions
                  [metabase/throttle "1.0.1"]                          ; Tools for throttling access to API endpoints and other code pathways
-                 [mysql/mysql-connector-java "5.1.45"]                ;  !!! Don't upgrade to 6.0+ yet -- that's Java 8 only !!!
+                 [mysql/mysql-connector-java "5.1.45"]                ; !!! Don't upgrade to 6.0+ yet -- that's Java 8 only !!!
                  [jdistlib "0.5.1"                                    ; Distribution statistic tests
                   :exclusions [com.github.wendykierp/JTransforms]]
                  [net.sf.cssbox/cssbox "4.12"                         ; HTML / CSS rendering
                   :exclusions [org.slf4j/slf4j-api]]
+                 [net.snowflake/snowflake-jdbc "3.6.13"]              ; Snowflake JDBC Client Library
                  [org.clojars.pntblnk/clj-ldap "0.0.12"]              ; LDAP client
                  [org.liquibase/liquibase-core "3.6.2"                ; migration management (Java lib)
                   :exclusions [ch.qos.logback/logback-classic]]
@@ -122,8 +125,9 @@
          :reload-paths ["src"]}
   :eastwood {:exclude-namespaces
              [:test-paths
-              metabase.driver.generic-sql                             ; SQLDriver causes Eastwood to fail. Skip this ns until issue is fixed: https://github.com/jonase/eastwood/issues/191
-              metabase.query-processor.middleware.binning]            ; Similarly Eastwood gets confused because this namespace relies on defrecord :load-ns options which it seems to ignore :(
+              ;; SQLDriver causes Eastwood to fail. Skip this ns until issue is fixed: https://github.com/jonase/eastwood/issues/191
+              metabase.driver.generic-sql]
+             :config-files ["./test_resources/eastwood-config.clj"]
              :add-linters [:unused-private-vars
                            :unused-namespaces
                            ;; These linters are pretty useful but give a few false positives and can't be selectively disabled (yet)
@@ -136,7 +140,8 @@
   :docstring-checker {:include [#"^metabase"]
                       :exclude [#"test"
                                 #"^metabase\.http-client$"]}
-  :profiles {:dev {:dependencies [[expectations "2.2.0-beta2"]        ; unit tests
+  :profiles {:dev {:dependencies [[clj-http-fake "1.0.3"]             ; Library to mock clj-http responses
+                                  [expectations "2.2.0-beta2"]        ; unit tests
                                   [ring/ring-mock "0.3.0"]]           ; Library to create mock Ring requests for unit tests
                    :plugins [[docstring-checker "1.0.2"]              ; Check that all public vars have docstrings. Run with 'lein docstring-checker'
                              [jonase/eastwood "0.3.1"
@@ -175,4 +180,5 @@
              :profile {:jvm-opts ["-XX:+CITime"                       ; print time spent in JIT compiler
                                   "-XX:+PrintGC"]}                    ; print a message when garbage collection takes place
              ;; get the H2 shell with 'lein h2'
-             :h2-shell {:main org.h2.tools.Shell}})
+             :h2-shell {:main org.h2.tools.Shell}
+             :generate-automagic-dashboards-pot {:main metabase.automagic-dashboards.rules}})
diff --git a/resources/automagic_dashboards/table/TransactionTable/BySource.yaml b/resources/automagic_dashboards/table/TransactionTable/BySource.yaml
index 109cec4bd2cfb88aa15bd5dd8ad73885eb0c9d13..bcd900c2468cc3434a39755398f3a04b2fc127fe 100644
--- a/resources/automagic_dashboards/table/TransactionTable/BySource.yaml
+++ b/resources/automagic_dashboards/table/TransactionTable/BySource.yaml
@@ -171,7 +171,7 @@ cards:
     height: 8
 - AvgQuantityBySource:
     group: Financial
-    title: Average qunatity per source
+    title: Average quantity per source
     visualization: table
     dimensions: SourceLarge
     metrics: AvgQuantity
diff --git a/resources/automagic_dashboards/table/example/indepth.yaml b/resources/automagic_dashboards/table/example/indepth.yaml
index 974b72df9cbea4e0374e1c8eb86cb11bdbdd4cf5..9621b66258af5d0c6754f09d4ea1fa31c151cc4e 100644
--- a/resources/automagic_dashboards/table/example/indepth.yaml
+++ b/resources/automagic_dashboards/table/example/indepth.yaml
@@ -1,4 +1,4 @@
-title: Indepth example
+title: In-depth example
 applies_to: GenericTable
 metrics:
   - Count: ["count"]
diff --git a/src/metabase/api/database.clj b/src/metabase/api/database.clj
index 6464903f84b22f708b52a78eb3ea51017e9fe0c4..3d12b8dd78f7a4495c352a434dee360adeb36175 100644
--- a/src/metabase/api/database.clj
+++ b/src/metabase/api/database.clj
@@ -97,7 +97,7 @@
    use queries with those aggregations as source queries. This function determines whether CARD is using one
    of those queries so we can filter it out in Clojure-land."
   [{{{aggregations :aggregation} :query} :dataset_query}]
-  (seq (mbql.u/clause-instances #{:cum-count :cum-sum} aggregations)))
+  (mbql.u/match aggregations #{:cum-count :cum-sum}))
 
 (defn- source-query-cards
   "Fetch the Cards that can be used as source queries (e.g. presented as virtual tables)."
diff --git a/src/metabase/api/permissions.clj b/src/metabase/api/permissions.clj
index ea52c7da685fe3e1fa60ea0db998eff394ff6c5b..2c0ffcca67d795dc6692f146bf36568df3e6c96e 100644
--- a/src/metabase/api/permissions.clj
+++ b/src/metabase/api/permissions.clj
@@ -67,7 +67,7 @@
 
 (api/defendpoint PUT "/graph"
   "Do a batch update of Permissions by passing in a modified graph. This should return the same graph, in the same
-  format, that you got from `GET /api/permissions/graph`, with any changes made in the wherever neccesary. This
+  format, that you got from `GET /api/permissions/graph`, with any changes made in the wherever necessary. This
   modified graph must correspond to the `PermissionsGraph` schema. If successful, this endpoint returns the updated
   permissions graph; use this as a base for any further modifications.
 
diff --git a/src/metabase/api/public.clj b/src/metabase/api/public.clj
index 862273c777e9f6a3fc91c20594d6e621c57394d8..04f2f14b00ed4c0f48abb43b8a52d61ab444482a 100644
--- a/src/metabase/api/public.clj
+++ b/src/metabase/api/public.clj
@@ -1,7 +1,6 @@
 (ns metabase.api.public
   "Metabase API endpoints for viewing publicly-accessible Cards and Dashboards."
   (:require [cheshire.core :as json]
-            [clojure.walk :as walk]
             [compojure.core :refer [GET]]
             [medley.core :as m]
             [metabase
@@ -14,6 +13,9 @@
              [dashboard :as dashboard-api]
              [dataset :as dataset-api]
              [field :as field-api]]
+            [metabase.mbql
+             [normalize :as normalize]
+             [util :as mbql.u]]
             [metabase.models
              [card :as card :refer [Card]]
              [dashboard :refer [Dashboard]]
@@ -29,8 +31,7 @@
             [schema.core :as s]
             [toucan
              [db :as db]
-             [hydrate :refer [hydrate]]]
-            [metabase.mbql.normalize :as normalize]))
+             [hydrate :refer [hydrate]]]))
 
 (def ^:private ^:const ^Integer default-embed-max-height 800)
 (def ^:private ^:const ^Integer default-embed-max-width 1024)
@@ -263,18 +264,10 @@
 
 ;;; -------------------------------------------------- Field Values --------------------------------------------------
 
-;; TODO - this is a stupid, inefficient way of doing things. Figure out a better way to do it. :(
 (defn- query->referenced-field-ids
   "Get the IDs of all Fields referenced by an MBQL `query` (not including any parameters)."
   [query]
-  (let [field-ids (atom [])]
-    (walk/postwalk
-     (fn [x]
-       (if (instance? metabase.query_processor.interface.Field x)
-         (swap! field-ids conj (:field-id x))
-         x))
-     (qp/expand query))
-    @field-ids))
+  (mbql.u/match (:query query) [:field-id id] id))
 
 (defn- card->referenced-field-ids
   "Return a set of all Field IDs referenced by `card`, in both the MBQL query itself and in its parameters ('template
diff --git a/src/metabase/api/session.clj b/src/metabase/api/session.clj
index 2d3bfd9097c5d6596806cc98e895517aa44624a8..ecc1829f2f37f29c6b523bff6c193f65646f58fe 100644
--- a/src/metabase/api/session.clj
+++ b/src/metabase/api/session.clj
@@ -65,7 +65,7 @@
       (catch com.unboundid.util.LDAPSDKException e
         (log/error
          (u/format-color 'red
-             (trs "Problem connecting to LDAP server, will fallback to local authentication {0}" (.getMessage e))))))))
+             (trs "Problem connecting to LDAP server, will fall back to local authentication: {0}" (.getMessage e))))))))
 
 (defn- email-login
   "Find a matching `User` if one exists and return a new Session for them, or `nil` if they couldn't be authenticated."
diff --git a/src/metabase/api/table.clj b/src/metabase/api/table.clj
index 9c073d5f1487f68068b3db29f0b15104fd988dd4..7c292d6c24a7672a2f6b99393e2a402258c8b6d7 100644
--- a/src/metabase/api/table.clj
+++ b/src/metabase/api/table.clj
@@ -1,25 +1,27 @@
 (ns metabase.api.table
   "/api/table endpoints."
   (:require [clojure.tools.logging :as log]
-            [compojure.core :refer [GET PUT POST]]
+            [compojure.core :refer [GET POST PUT]]
             [medley.core :as m]
             [metabase
              [driver :as driver]
+             [related :as related]
              [sync :as sync]
              [util :as u]]
             [metabase.api.common :as api]
+            [metabase.mbql.util :as mbql.u]
             [metabase.models
              [card :refer [Card]]
-             [database :as database :refer [Database]]
-             [field :refer [Field with-normal-values]]
-             [field-values :refer [FieldValues] :as fv]
+             [database :as database]
+             [field :refer [Field]]
+             [field-values :as fv :refer [FieldValues]]
              [interface :as mi]
              [table :as table :refer [Table]]]
-            [metabase.related :as related]
             [metabase.sync.field-values :as sync-field-values]
-            [metabase.util.schema :as su]
+            [metabase.util
+             [i18n :refer [trs tru]]
+             [schema :as su]]
             [schema.core :as s]
-            [metabase.util.i18n :refer [trs tru]]
             [toucan
              [db :as db]
              [hydrate :refer [hydrate]]]))
@@ -169,9 +171,8 @@
 
 (defn- supports-date-binning?
   "Time fields don't support binning, returns true if it's a DateTime field and not a time field"
-  [{:keys [base_type special_type]}]
-  (and (or (isa? base_type :type/DateTime)
-           (isa? special_type :type/DateTime))
+  [{:keys [base_type], :as field}]
+  (and (mbql.u/datetime-field? field)
        (not (isa? base_type :type/Time))))
 
 (defn- assoc-field-dimension-options [driver {:keys [base_type special_type fingerprint] :as field}]
diff --git a/src/metabase/automagic_dashboards/comparison.clj b/src/metabase/automagic_dashboards/comparison.clj
index 46e91afedc7c6c728140c187e164e8d52ced8eef..4181c9d9dd12384801016351c3b8de5ed037b052 100644
--- a/src/metabase/automagic_dashboards/comparison.clj
+++ b/src/metabase/automagic_dashboards/comparison.clj
@@ -12,7 +12,7 @@
             [metabase.mbql.normalize :as normalize]
             [metabase.models.table :refer [Table]]
             [metabase.query-processor.util :as qp.util]
-            [puppetlabs.i18n.core :as i18n :refer [tru]]))
+            [metabase.util.i18n :refer [tru]]))
 
 (def ^:private ^{:arglists '([root])} comparison-name
   (comp capitalize-first (some-fn :comparison-name :full-name)))
diff --git a/src/metabase/automagic_dashboards/core.clj b/src/metabase/automagic_dashboards/core.clj
index d0173fcc9ed3a79c96fcb79992fc8ab86595c50d..f1cd5af9410c63fcbe072edbfd94422698f6ab8e 100644
--- a/src/metabase/automagic_dashboards/core.clj
+++ b/src/metabase/automagic_dashboards/core.clj
@@ -35,11 +35,10 @@
              [query :refer [Query]]
              [segment :refer [Segment]]
              [table :refer [Table]]]
-            [metabase.query-processor.middleware.expand-macros :as qp.macros]
             [metabase.query-processor.util :as qp.util]
             [metabase.sync.analyze.classify :as classify]
             [metabase.util.date :as date]
-            [puppetlabs.i18n.core :as i18n :refer [trs tru]]
+            [metabase.util.i18n :refer [trs tru] :as ui18n]
             [ring.util.codec :as codec]
             [schema.core :as s]
             [toucan.db :as db])
@@ -53,17 +52,20 @@
 (defn ->field
   "Return `Field` instance for a given ID or name in the context of root."
   [root id-or-name]
-  (if (->> root :source (instance? (type Table)))
-    (Field id-or-name)
-    (when-let [field (->> root
-                          :source
-                          :result_metadata
-                          (m/find-first (comp #{id-or-name} :name)))]
-      (-> field
-          (update :base_type keyword)
-          (update :special_type keyword)
-          field/map->FieldInstance
-          (classify/run-classifiers {})))))
+  (let [id-or-name (if (sequential? id-or-name)
+                     (filters/field-reference->id id-or-name)
+                     id-or-name)]
+    (if (->> root :source (instance? (type Table)))
+      (Field id-or-name)
+      (when-let [field (->> root
+                            :source
+                            :result_metadata
+                            (m/find-first (comp #{id-or-name} :name)))]
+        (-> field
+            (update :base_type keyword)
+            (update :special_type keyword)
+            field/map->FieldInstance
+            (classify/run-classifiers {}))))))
 
 (def ^{:arglists '([root])} source-name
   "Return the (display) name of the soruce of a given root object."
@@ -82,7 +84,7 @@
 
 (def ^:private ^{:arglists '([metric])} saved-metric?
   (every-pred (partial mbql.u/is-clause? :metric)
-              (complement qp.macros/ga-metric-or-segment?)))
+              (complement mbql.u/ga-metric-or-segment?)))
 
 (def ^:private ^{:arglists '([metric])} custom-expression?
   (partial mbql.u/is-clause? :named))
@@ -94,10 +96,10 @@
   "Return the name of the metric or name by describing it."
   [[op & args :as metric]]
   (cond
-    (qp.macros/ga-metric-or-segment? metric) (-> args first str (subs 3) str/capitalize)
-    (adhoc-metric? metric)                   (-> op qp.util/normalize-token op->name)
-    (saved-metric? metric)                   (-> args first Metric :name)
-    :else                                    (second args)))
+    (mbql.u/ga-metric-or-segment? metric) (-> args first str (subs 3) str/capitalize)
+    (adhoc-metric? metric)                (-> op qp.util/normalize-token op->name)
+    (saved-metric? metric)                (-> args first Metric :name)
+    :else                                 (second args)))
 
 (defn metric-op
   "Return the name op of the metric"
@@ -121,7 +123,6 @@
      (if (adhoc-metric? metric)
        (tru "{0} of {1}" (metric-name metric) (or (some->> metric
                                                            second
-                                                           filters/field-reference->id
                                                            (->field root)
                                                            :display_name)
                                                   (source-name root)))
@@ -134,8 +135,7 @@
         dimensions   (->> (get-in question [:dataset_query :query :breakout])
                           (mapcat filters/collect-field-references)
                           (map (comp :display_name
-                                     (partial ->field root)
-                                     filters/field-reference->id))
+                                     (partial ->field root)))
                           join-enumeration)]
     (if dimensions
       (tru "{0} by {1}" aggregations dimensions)
@@ -417,9 +417,9 @@
                  (fn [[_ identifier attribute]]
                    (let [entity    (bindings identifier)
                          attribute (some-> attribute qp.util/normalize-token)]
-                     (or (and (ifn? entity) (entity attribute))
-                         (root attribute)
-                         (->reference template-type entity)))))))
+                     (str (or (and (ifn? entity) (entity attribute))
+                              (root attribute)
+                              (->reference template-type entity))))))))
 
 (defn- field-candidates
   [context {:keys [field_type links_to named max_cardinality] :as constraints}]
@@ -564,7 +564,8 @@
 (defn capitalize-first
   "Capitalize only the first letter in a given string."
   [s]
-  (str (str/upper-case (subs s 0 1)) (subs s 1)))
+  (let [s (str s)]
+    (str (str/upper-case (subs s 0 1)) (subs s 1))))
 
 (defn- instantiate-metadata
   [x context bindings]
@@ -982,15 +983,15 @@
                                            ;; (no chunking).
                                            first))]
     (let [show (or show max-cards)]
-      (log/infof (trs "Applying heuristic %s to %s.") (:rule rule) full-name)
-      (log/infof (trs "Dimensions bindings:\n%s")
-                 (->> context
-                      :dimensions
-                      (m/map-vals #(update % :matches (partial map :name)))
-                      u/pprint-to-str))
-      (log/infof (trs "Using definitions:\nMetrics:\n%s\nFilters:\n%s")
-                 (-> context :metrics u/pprint-to-str)
-                 (-> context :filters u/pprint-to-str))
+      (log/infof (str (trs "Applying heuristic {0} to {1}." (:rule rule) full-name)))
+      (log/infof (str (trs "Dimensions bindings:\n{0}"
+                           (->> context
+                                :dimensions
+                                (m/map-vals #(update % :matches (partial map :name)))
+                                u/pprint-to-str))))
+      (log/infof (str (trs "Using definitions:\nMetrics:\n{0}\nFilters:\n{1}"
+                           (->> context :metrics (m/map-vals :metric) u/pprint-to-str)
+                           (-> context :filters u/pprint-to-str))))
       (-> dashboard
           (populate/create-dashboard show)
           (assoc :related           (related context rule)
@@ -999,7 +1000,7 @@
                                       (format "%s#show=all" (:url root)))
                  :transient_filters (:query-filter context)
                  :param_fields      (->> context :query-filter (filter-referenced-fields root)))))
-    (throw (ex-info (trs "Can''t create dashboard for {0}" full-name)
+    (throw (ui18n/ex-info (trs "Can''t create dashboard for {0}" full-name)
              {:root            root
               :available-rules (map :rule (or (some-> rule rules/get-rule vector)
                                               (rules/get-rules rules-prefix)))}))))
@@ -1040,7 +1041,6 @@
 (defn- collect-breakout-fields
   [root question]
   (map (comp (partial ->field root)
-             filters/field-reference->id
              first
              filters/collect-field-references)
        (get-in question [:dataset_query :query :breakout])))
@@ -1096,7 +1096,6 @@
   (cond-> (->> field-reference
                filters/collect-field-references
                first
-               filters/field-reference->id
                (->field root))
     (-> field-reference first qp.util/normalize-token (= :datetime-field))
     (assoc :unit (-> field-reference last qp.util/normalize-token))))
@@ -1107,14 +1106,14 @@
   humanize-filter-value (fn [_ [op & args]]
                           (qp.util/normalize-token op)))
 
-(def ^:private unit-name (comp {:minute-of-hour  "minute"
-                                :hour-of-day     "hour"
-                                :day-of-week     "day of week"
-                                :day-of-month    "day of month"
-                                :day-of-year     "day of year"
-                                :week-of-year    "week"
-                                :month-of-year   "month"
-                                :quarter-of-year "quarter"}
+(def ^:private unit-name (comp {:minute-of-hour  (tru "minute")
+                                :hour-of-day     (tru "hour")
+                                :day-of-week     (tru "day of week")
+                                :day-of-month    (tru "day of month")
+                                :day-of-year     (tru "day of year")
+                                :week-of-year    (tru "week")
+                                :month-of-year   (tru "month")
+                                :quarter-of-year (tru "quarter")}
                                qp.util/normalize-token))
 
 (defn- field-name
@@ -1122,7 +1121,7 @@
    (->> field-reference (field-reference->field root) field-name))
   ([{:keys [display_name unit] :as field}]
    (cond->> display_name
-     (and (filters/periodic-datetime? field) unit) (format "%s of %s" (unit-name unit)))))
+     (and (filters/periodic-datetime? field) unit) (tru "{0} of {1}" (unit-name unit)))))
 
 (defmethod humanize-filter-value :=
   [root [_ field-reference value]]
diff --git a/src/metabase/automagic_dashboards/rules.clj b/src/metabase/automagic_dashboards/rules.clj
index 22b7d3987fc59f598e5d4ee830b731c8edf9f705..2f7d009f9053c693cbd11fa9f87ce1c20f903d1f 100644
--- a/src/metabase/automagic_dashboards/rules.clj
+++ b/src/metabase/automagic_dashboards/rules.clj
@@ -7,11 +7,12 @@
             [metabase.query-processor.util :as qp.util]
             [metabase.util :as u]
             [metabase.util
-             [i18n :refer [trs]]
+             [i18n :refer [trs tru LocalizedString]]
              [schema :as su]]
             [schema
              [coerce :as sc]
              [core :as s]]
+            [schema.spec.core :as spec]
             [yaml.core :as yaml])
   (:import [java.nio.file Files FileSystem FileSystems Path]))
 
@@ -28,7 +29,7 @@
 
 (def ^:private Metric {Identifier {(s/required-key :metric) MBQL
                                    (s/required-key :score)  Score
-                                   (s/optional-key :name)   s/Str}})
+                                   (s/optional-key :name)   LocalizedString}})
 
 (def ^:private Filter {Identifier {(s/required-key :filter) MBQL
                                    (s/required-key :score)  Score}})
@@ -87,28 +88,28 @@
 (def ^:private CardDimension {Identifier {(s/optional-key :aggregation) s/Str}})
 
 (def ^:private Card
-  {Identifier {(s/required-key :title)         s/Str
+  {Identifier {(s/required-key :title)         LocalizedString
                (s/required-key :score)         Score
                (s/optional-key :visualization) Visualization
-               (s/optional-key :text)          s/Str
+               (s/optional-key :text)          LocalizedString
                (s/optional-key :dimensions)    [CardDimension]
                (s/optional-key :filters)       [s/Str]
                (s/optional-key :metrics)       [s/Str]
                (s/optional-key :limit)         su/IntGreaterThanZero
                (s/optional-key :order_by)      [OrderByPair]
-               (s/optional-key :description)   s/Str
+               (s/optional-key :description)   LocalizedString
                (s/optional-key :query)         s/Str
                (s/optional-key :width)         Width
                (s/optional-key :height)        Height
                (s/optional-key :group)         s/Str
-               (s/optional-key :y_label)       s/Str
-               (s/optional-key :x_label)       s/Str
-               (s/optional-key :series_labels) [s/Str]}})
+               (s/optional-key :y_label)       LocalizedString
+               (s/optional-key :x_label)       LocalizedString
+               (s/optional-key :series_labels) [LocalizedString]}})
 
 (def ^:private Groups
-  {Identifier {(s/required-key :title)            s/Str
-               (s/optional-key :comparison_title) s/Str
-               (s/optional-key :description)      s/Str}})
+  {Identifier {(s/required-key :title)            LocalizedString
+               (s/optional-key :comparison_title) LocalizedString
+               (s/optional-key :description)      LocalizedString}})
 
 (def ^{:arglists '([definition])} identifier
   "Return `key` in `{key {}}`."
@@ -141,8 +142,7 @@
                    (dimension-form? subform) [(second subform)]
                    (string? subform)         (->> subform
                                                   (re-seq #"\[\[(\w+)\]\]")
-                                                  (map second))
-                   :else                     nil)))
+                                                  (map second)))))
        distinct))
 
 (defn- valid-metrics-references?
@@ -189,15 +189,14 @@
 (def Rule
   "Rules defining an automagic dashboard."
   (constrained-all
-   {(s/required-key :title)             s/Str
+   {(s/required-key :title)             LocalizedString
     (s/required-key :rule)              s/Str
     (s/required-key :specificity)       s/Int
     (s/optional-key :cards)             [Card]
     (s/optional-key :dimensions)        [Dimension]
     (s/optional-key :applies_to)        AppliesTo
-    (s/optional-key :transient_title)   s/Str
-    (s/optional-key :short_title)       s/Str
-    (s/optional-key :description)       s/Str
+    (s/optional-key :transient_title)   LocalizedString
+    (s/optional-key :description)       LocalizedString
     (s/optional-key :metrics)           [Metric]
     (s/optional-key :filters)           [Filter]
     (s/optional-key :groups)            Groups
@@ -274,7 +273,8 @@
                           [(->entity table-type) (->type field-type)]
                           [(if (-> table-type ->entity table-type?)
                              (->entity table-type)
-                             (->type table-type))])))}))
+                             (->type table-type))])))
+    LocalizedString #(tru %)}))
 
 (def ^:private rules-dir "automagic_dashboards/")
 
@@ -360,3 +360,48 @@
   "Get rule at path `path`."
   [path]
   (get-in @rules (concat path [::leaf])))
+
+(defn- extract-localized-strings
+  [[path rule]]
+  (let [strings (atom [])]
+    ((spec/run-checker
+      (fn [s params]
+        (let [walk (spec/checker (s/spec s) params)]
+          (fn [x]
+            (when (= LocalizedString s)
+              (swap! strings conj x))
+            (walk x))))
+      false
+      Rule)
+     rule)
+    (map vector (distinct @strings) (repeat path))))
+
+(defn- make-pot
+  [strings]
+  (->> strings
+       (group-by first)
+       (mapcat (fn [[s ctxs]]
+                 (concat (for [[_ ctx] ctxs]
+                           (format "#: resources/%s%s.yaml" rules-dir (str/join "/" ctx)))
+                         [(format "msgid \"%s\"\nmsgstr \"\"\n" s)])))
+       (str/join "\n")))
+
+(defn- all-rules
+  ([]
+   (all-rules [] @rules))
+  ([path rules]
+   (when (map? rules)
+     (mapcat (fn [[k v]]
+               (if (= k ::leaf)
+                 [[path v]]
+                 (all-rules (conj path k) v)))
+             rules))))
+
+(defn -main
+  "Entry point for lein task `generate-automagic-dashboards-pot`"
+  [& _]
+  (->> (all-rules)
+       (mapcat extract-localized-strings)
+       make-pot
+       (spit "locales/metabase-automatic-dashboards.pot"))
+  (System/exit 0))
diff --git a/src/metabase/db.clj b/src/metabase/db.clj
index 9a552f63289b4097e4d9f72f73b4469aedd104d6..37726e46b1de359f00573098e9d558156cf8509a 100644
--- a/src/metabase/db.clj
+++ b/src/metabase/db.clj
@@ -129,7 +129,7 @@
 
   MySQL gets snippy if we try to run the entire DB migration as one single string; it seems to only like it if we run
   one statement at a time; Liquibase puts each DDL statement on its own line automatically so just split by lines and
-  filter out blank / comment lines. Even though this is not neccesary for H2 or Postgres go ahead and do it anyway
+  filter out blank / comment lines. Even though this is not necessary for H2 or Postgres go ahead and do it anyway
   because it keeps the code simple and doesn't make a significant performance difference."
   [^Liquibase liquibase]
   (for [line  (s/split-lines (migrations-sql liquibase))
diff --git a/src/metabase/driver.clj b/src/metabase/driver.clj
index dfda34729905c35a8ef38afebb5292783c11e51e..6d1c2e3cafdb4c63110c2e8538edfda36e92eadc 100644
--- a/src/metabase/driver.clj
+++ b/src/metabase/driver.clj
@@ -22,6 +22,7 @@
             [metabase.models
              [database :refer [Database]]
              [setting :refer [defsetting]]]
+            [metabase.query-processor.store :as qp.store]
             [metabase.sync.interface :as si]
             [metabase.util
              [date :as du]
@@ -397,13 +398,16 @@
   parses the date returned preserving it's timezone"
   [native-query date-formatters]
   (fn [driver database]
+    {:pre [(map? database)]}
     (let [settings (when-let [report-tz (report-timezone-if-supported driver)]
                      {:settings {:report-timezone report-tz}})
           time-str (try
-                     (->> (merge settings {:database database, :native {:query native-query}})
-                          (execute-query driver)
-                          :rows
-                          ffirst)
+                     (qp.store/with-store
+                       (qp.store/store-database! database)
+                       (->> (merge settings {:database database, :native {:query native-query}})
+                            (execute-query driver)
+                            :rows
+                            ffirst))
                      (catch Exception e
                        (throw
                         (Exception.
@@ -414,8 +418,9 @@
         (catch Exception e
           (throw
            (Exception.
-            (tru "Unable to parse date string ''{0}'' for database engine ''{1}''"
-                    time-str (-> database :engine name)) e)))))))
+            (str
+             (tru "Unable to parse date string ''{0}'' for database engine ''{1}''"
+                  time-str (-> database :engine name))) e)))))))
 
 (defn class->base-type
   "Return the `Field.base_type` that corresponds to a given class returned by the DB.
diff --git a/src/metabase/driver/bigquery.clj b/src/metabase/driver/bigquery.clj
index 2aac92b914f695c4550826bca99125d1dc9b3a0d..993331dfd02d7e3f586e4aae2faee56f92df4b8b 100644
--- a/src/metabase/driver/bigquery.clj
+++ b/src/metabase/driver/bigquery.clj
@@ -5,9 +5,7 @@
              [format :as tformat]]
             [clojure
              [set :as set]
-             [string :as str]
-             [walk :as walk]]
-            [clojure.tools.logging :as log]
+             [string :as str]]
             [honeysql
              [core :as hsql]
              [format :as hformat]
@@ -21,14 +19,20 @@
              [google :as google]]
             [metabase.driver.generic-sql.query-processor :as sqlqp]
             [metabase.driver.generic-sql.util.unprepare :as unprepare]
+            [metabase.mbql
+             [schema :as mbql.s]
+             [util :as mbql.u]]
+            [metabase.models.table :as table]
             [metabase.query-processor
-             [annotate :as annotate]
              [store :as qp.store]
              [util :as qputil]]
+            [metabase.query-processor.middleware.annotate :as annotate]
             [metabase.util
              [date :as du]
              [honeysql-extensions :as hx]
-             [i18n :refer [tru]]]
+             [i18n :refer [tru]]
+             [schema :as su]]
+            [schema.core :as s]
             [toucan.db :as db])
   (:import com.google.api.client.googleapis.auth.oauth2.GoogleCredential
            com.google.api.client.http.HttpRequestInitializer
@@ -37,8 +41,7 @@
             TableList$Tables TableReference TableRow TableSchema]
            honeysql.format.ToSql
            java.sql.Time
-           [java.util Collections Date]
-           [metabase.query_processor.interface AggregationWithField AggregationWithoutField Expression Field TimeValue]))
+           [java.util Collections Date]))
 
 (defrecord BigQueryDriver []
   :load-ns true
@@ -62,7 +65,7 @@
   you will want to avoid this function for SQL queries."
   []
   {:pre [(map? sqlqp/*query*)], :post [(valid-bigquery-identifier? %)]}
-  (get-in sqlqp/*query* [:database :details :dataset-id]))
+  (:dataset-id sqlqp/*query*))
 
 
 ;;; +----------------------------------------------------------------------------------------------------------------+
@@ -241,8 +244,10 @@
                                       :let [parser-fn (type->parser (.getType field))]]
                                   (parser-fn *bigquery-timezone*)))
            columns             (for [column (table-schema->metabase-field-info schema)]
-                                 (set/rename-keys column {:base-type :base_type}))]
-       {:columns (map :name columns)
+                                 (-> column
+                                     (set/rename-keys {:base-type :base_type})
+                                     (dissoc :database-type)))]
+       {:columns (map (comp u/keyword->qualified-name :name) columns)
         :cols    columns
         :rows    (for [^TableRow row (.getRows response)]
                    (for [[^TableCell cell, parser] (partition 2 (interleave (.getF row) parsers))]
@@ -254,6 +259,7 @@
                          (parser v)))))}))))
 
 (defn- process-native* [database query-string]
+  {:pre [(map? database) (map? (:details database))]}
   ;; automatically retry the query if it times out or otherwise fails. This is on top of the auto-retry added by
   ;; `execute` so operations going through `process-native*` may be retried up to 3 times.
   (u/auto-retry 1
@@ -307,6 +313,8 @@
 ;; This record type used for BigQuery table and field identifiers, since BigQuery has some stupid rules about how to
 ;; quote them (tables are like `dataset.table` and fields are like `dataset.table`.`field`)
 ;; This implements HoneySql's ToSql protocol, so we can just output this directly in most of our QP code below
+;;
+;; TODO - this is totally unnecessary now, we can just override `->honeysql` for `Field` and `Table` instead. FIXME!
 (defrecord ^:private BigQueryIdentifier [dataset-name ; optional; will use (dataset-name-for-current-query) otherwise
                                          table-name
                                          field-name]
@@ -331,7 +339,6 @@
 
 (defn- honeysql-form->sql ^String [honeysql-form]
   {:pre [(map? honeysql-form)]}
-  ;; replace identifiers like `shakespeare`.`word` with ones like `shakespeare.word` since that's what BigQuery expects
   (let [[sql & args] (sql/honeysql-form->sql+args bq-driver honeysql-form)]
     (when (seq args)
       (throw (Exception. (str (tru "BigQuery statements can't be parameterized!")))))
@@ -343,8 +350,7 @@
   ;; we can go ahead and strip off the table name from the alias since we don't want it to show up in the result
   ;; column names
   (let [demangle-name #(str/replace % (re-pattern (str \^ table-name "___")) "")
-        columns       (for [column columns]
-                        (keyword (demangle-name column)))
+        columns       (map demangle-name columns)
         rows          (for [row rows]
                         (zipmap columns row))
         columns       (vec (keys (first rows)))]
@@ -360,34 +366,19 @@
                          (str/replace #"(^\d)" "_$1"))]
     (subs replaced-str 0 (min 128 (count replaced-str)))))
 
-(defn- agg-or-exp? [x]
-  (or (instance? Expression x)
-      (instance? AggregationWithField x)
-      (instance? AggregationWithoutField x)))
+(s/defn ^:private bq-aggregate-name :- su/NonBlankString
+  "Return an approriate name for an `ag-clause`."
+  [ag-clause :- mbql.s/Aggregation]
+  (-> ag-clause annotate/aggregation-name format-custom-field-name))
 
-(defn- bg-aggregate-name [aggregate]
-  (-> aggregate annotate/aggregation-name format-custom-field-name))
-
-(defn- pre-alias-aggregations
+(s/defn ^:private pre-alias-aggregations
   "Expressions are not allowed in the order by clauses of a BQ query. To sort by a custom expression, that custom
   expression must be aliased from the order by. This code will find the aggregations and give them a name if they
   don't already have one. This name can then be used in the order by if one is present."
-  [query]
-  (let [aliases (atom {})]
-    (walk/postwalk (fn [maybe-agg]
-                     (if-let [exp-name (and (agg-or-exp? maybe-agg)
-                                            (bg-aggregate-name maybe-agg))]
-                       (if-let [usage-count (get @aliases exp-name)]
-                         (let [new-custom-name (str exp-name "_" (inc usage-count))]
-                           (swap! aliases assoc
-                                  exp-name (inc usage-count)
-                                  new-custom-name 1)
-                           (assoc maybe-agg :custom-name new-custom-name))
-                         (do
-                           (swap! aliases assoc exp-name 1)
-                           (assoc maybe-agg :custom-name exp-name)))
-                       maybe-agg))
-                   query)))
+  [{{aggregations :aggregation} :query, :as outer-query}]
+  (if-not (seq aggregations)
+    outer-query
+    (update-in outer-query [:query :aggregation] (partial mbql.u/pre-alias-and-uniquify-aggregations bq-aggregate-name))))
 
 ;; These provide implementations of `->honeysql` that prevent HoneySQL from converting forms to prepared statement
 ;; parameters (`?` symbols)
@@ -404,67 +395,79 @@
   [_ date]
   (hsql/call :timestamp (hx/literal (du/date->iso-8601 date))))
 
-(defmethod sqlqp/->honeysql [BigQueryDriver TimeValue]
-  [driver {:keys [value]}]
+(defmethod sqlqp/->honeysql [BigQueryDriver :time]
+  [driver [_ value unit]]
   (->> value
        (unparse-bigquery-time *bigquery-timezone*)
        (sqlqp/->honeysql driver)
+       (sql/date driver unit)
        hx/->time))
 
-(defmethod sqlqp/->honeysql [BigQueryDriver Field]
-  [_ {:keys [table-name field-name special-type] :as field}]
-  (let [field (map->BigQueryIdentifier {:table-name table-name, :field-name field-name})]
+(defmethod sqlqp/->honeysql [Object :datetime-field]
+  [driver [_ field unit]]
+  (sql/date driver unit (sqlqp/->honeysql driver field)))
+
+(defmethod sqlqp/->honeysql [BigQueryDriver :field-id]
+  [_ [_ field-id]]
+  (let [{field-name :name, special-type :special_type, table-id :table_id} (qp.store/field field-id)
+        {table-name :name}                                                 (qp.store/table table-id)
+        field                                                              (map->BigQueryIdentifier
+                                                                            {:table-name table-name
+                                                                             :field-name field-name})]
     (cond
       (isa? special-type :type/UNIXTimestampSeconds)      (unix-timestamp->timestamp field :seconds)
       (isa? special-type :type/UNIXTimestampMilliseconds) (unix-timestamp->timestamp field :milliseconds)
       :else                                               field)))
 
+(defn- ag-ref->alias [[_ index]]
+  (let [{{aggregations :aggregation} :query} sqlqp/*query*
+        [ag-type :as ag]                     (nth aggregations index)]
+    (mbql.u/match-one ag
+      [:distinct _]              "count"
+      [:expression operator & _] operator
+      [:named _ ag-name]         ag-name
+      [ag-type & _]              ag-type)))
+
 (defn- field->alias
   "Generate an appropriate alias for a `field`. This will normally be something like `tableName___fieldName` (done this
   way because BigQuery will not let us include symbols in identifiers, so we can't make our alias be
   `tableName.fieldName`, like we do for other drivers)."
-  [driver {:keys [^String field-name, ^String table-name, ^Integer index, field], :as this}]
-  {:pre [(map? this) (or field
-                         index
-                         (and (seq field-name) (seq table-name))
-                         (log/error "Don't know how to alias: " this))]}
-  (cond
-    field (recur driver field) ; type/DateTime
-    index (let [{{aggregations :aggregation} :query} sqlqp/*query*
-                {ag-type :aggregation-type :as agg}  (nth aggregations index)]
-            (cond
-              (= ag-type :distinct)
-              "count"
-
-              (instance? Expression agg)
-              (:custom-name agg)
-
-              :else
-              (name ag-type)))
-
-    :else (str table-name "___" field-name)))
+  [driver {field-name :name, table-id :table_id, :as field}]
+  (let [{table-name :name} (qp.store/table table-id)]
+    (str table-name "___" field-name)))
 
 (defn- field->identifier
   "Generate appropriate identifier for a Field for SQL parameters. (NOTE: THIS IS ONLY USED FOR SQL PARAMETERS!)"
-  ;; TODO - Making 2 DB calls for each field to fetch its dataset is inefficient and makes me cry, but this method is
+  ;; TODO - Making a DB call for each field to fetch its Table is inefficient and makes me cry, but this method is
   ;; currently only used for SQL params so it's not a huge deal at this point
+  ;;
+  ;; TODO - we should make sure these are in the QP store somewhere and then could at least batch the calls
   [{table-id :table_id, :as field}]
-  (let [{table-name :name, database-id :db_id} (db/select-one ['Table :name :db_id], :id (u/get-id table-id))
-        details                                (db/select-one-field :details 'Database, :id (u/get-id database-id))]
+  (let [table-name (db/select-one-field :name table/Table :id (u/get-id table-id))
+        details    (:details (qp.store/database))]
     (map->BigQueryIdentifier {:dataset-name (:dataset-id details), :table-name table-name, :field-name (:name field)})))
 
-(defn- field->breakout-identifier [driver field]
-  (hsql/raw (str \` (field->alias driver field) \`)))
+(defn- field-clause->field [field-clause]
+  (when field-clause
+    (let [id-or-name (mbql.u/field-clause->id-or-literal field-clause)]
+      (when (integer? id-or-name)
+        (qp.store/field id-or-name)))))
 
-(defn- apply-breakout [driver honeysql-form {breakout-fields :breakout, fields-fields :fields}]
+(defn- field->breakout-identifier [driver field-clause]
+  (let [alias (if (mbql.u/is-clause? :aggregation field-clause)
+                (ag-ref->alias field-clause)
+                (field->alias driver (field-clause->field field-clause)))]
+    (hsql/raw (str \` alias \`))))
+
+(defn- apply-breakout [driver honeysql-form {breakout-field-clauses :breakout, fields-field-clauses :fields}]
   (-> honeysql-form
       ;; Group by all the breakout fields
-      ((partial apply h/group)  (map #(field->breakout-identifier driver %) breakout-fields))
+      ((partial apply h/group) (map #(field->breakout-identifier driver %) breakout-field-clauses))
       ;; Add fields form only for fields that weren't specified in :fields clause -- we don't want to include it
       ;; twice, or HoneySQL will barf
-      ((partial apply h/merge-select) (for [field breakout-fields
-                                            :when (not (contains? (set fields-fields) field))]
-                                        (sqlqp/as driver (sqlqp/->honeysql driver field) field)))))
+      ((partial apply h/merge-select) (for [field-clause breakout-field-clauses
+                                            :when        (not (contains? (set fields-field-clauses) field-clause))]
+                                        (sqlqp/as driver (sqlqp/->honeysql driver field-clause) field-clause)))))
 
 (defn apply-source-table
   "Copy of the Generic SQL implementation of `apply-source-table` that prepends the current dataset ID to the table
@@ -477,24 +480,26 @@
   "Copy of the Generic SQL implementation of `apply-join-tables`, but prepends the current dataset ID to join-alias."
   [honeysql-form {join-tables :join-tables, source-table-id :source-table}]
   (let [{source-table-name :name} (qp.store/table source-table-id)]
-    (loop [honeysql-form honeysql-form, [{:keys [table-name pk-field source-field join-alias]} & more] join-tables]
-      (let [honeysql-form
+    (loop [honeysql-form honeysql-form, [{:keys [table-id pk-field-id fk-field-id join-alias]} & more] join-tables]
+      (let [{table-name :name} (qp.store/table table-id)
+            source-field       (qp.store/field fk-field-id)
+            pk-field           (qp.store/field pk-field-id)
+
+            honeysql-form
             (h/merge-left-join honeysql-form
               [(map->BigQueryIdentifier {:table-name table-name})
                (map->BigQueryIdentifier {:table-name join-alias})]
               [:=
-               (map->BigQueryIdentifier {:table-name source-table-name, :field-name (:field-name source-field)})
-               (map->BigQueryIdentifier {:table-name join-alias, :field-name (:field-name pk-field)})])]
+               (map->BigQueryIdentifier {:table-name source-table-name, :field-name (:name source-field)})
+               (map->BigQueryIdentifier {:table-name join-alias, :field-name (:name pk-field)})])]
         (if (seq more)
           (recur honeysql-form more)
           honeysql-form)))))
 
-(defn- apply-order-by [driver honeysql-form {subclauses :order-by}]
-  (loop [honeysql-form honeysql-form, [{:keys [field direction]} & more] subclauses]
-    (let [honeysql-form (h/merge-order-by honeysql-form [(field->breakout-identifier driver field)
-                                                         (case direction
-                                                           :ascending  :asc
-                                                           :descending :desc)])]
+(defn- apply-order-by [driver honeysql-form {subclauses :order-by, :as query}]
+  (loop [honeysql-form honeysql-form, [[direction field-clause] & more] subclauses]
+    (let [honeysql-form (h/merge-order-by honeysql-form [(field->breakout-identifier driver field-clause)
+                                                         direction])]
       (if (seq more)
         (recur honeysql-form more)
         honeysql-form))))
@@ -516,16 +521,18 @@
      *  Runs our customs `honeysql-form->sql` method
      *  Incldues `table-name` in the resulting map (do not remember why we are doing so, perhaps it is needed to run the
         query)"
-  [{{{:keys [dataset-id]} :details, :as database} :database
-    {source-table-id :source-table}               :query
-    :as                                           outer-query}]
-  {:pre [(map? database) (seq dataset-id)]}
-  (let [aliased-query      (pre-alias-aggregations outer-query)
+  [{database-id                     :database
+    {source-table-id :source-table} :query
+    :as                             outer-query}]
+  {:pre [(integer? database-id)]}
+  (let [dataset-id         (-> (qp.store/database) :details :dataset-id)
+        aliased-query      (pre-alias-aggregations outer-query)
         {table-name :name} (qp.store/table source-table-id)]
-    (binding [sqlqp/*query* aliased-query]
+    (assert (seq dataset-id))
+    (binding [sqlqp/*query* (assoc aliased-query :dataset-id dataset-id)]
       {:query      (->> aliased-query
-                       (sqlqp/build-honeysql-form bq-driver)
-                       honeysql-form->sql)
+                        (sqlqp/build-honeysql-form bq-driver)
+                        honeysql-form->sql)
        :table-name table-name
        :mbql?      true})))
 
@@ -535,18 +542,16 @@
     (time/time-zone-for-id (.getID jvm-tz))
     time/utc))
 
-(defn- execute-query [{database                                               :database
-                       {sql :query, params :params, :keys [table-name mbql?]} :native
+(defn- execute-query [{{sql :query, params :params, :keys [table-name mbql?]} :native
                        :as                                                    outer-query}]
-  (binding [*bigquery-timezone* (effective-query-timezone database)]
-    (let [sql     (str "-- " (qputil/query->remark outer-query) "\n" (if (seq params)
-                                                                       (unprepare/unprepare (cons sql params))
-                                                                       sql))
-          results (process-native* database sql)
-          results (if mbql?
-                    (post-process-mbql table-name results)
-                    (update results :columns (partial map keyword)))]
-      (assoc results :annotate? mbql?))))
+  (let [database (qp.store/database)]
+    (binding [*bigquery-timezone* (effective-query-timezone database)]
+      (let [sql     (str "-- " (qputil/query->remark outer-query) "\n" (if (seq params)
+                                                                         (unprepare/unprepare (cons sql params))
+                                                                         sql))
+            results (process-native* database sql)]
+        (cond->> results
+          mbql? (post-process-mbql table-name))))))
 
 
 ;; BigQuery doesn't return a timezone with it's time strings as it's always UTC, JodaTime parsing also defaults to UTC
diff --git a/src/metabase/driver/druid.clj b/src/metabase/driver/druid.clj
index 11018b9cb091567aaf6e06bf62d2d4e194142eac..ae8a08ee149761b554c471b3242e2ff722df9259 100644
--- a/src/metabase/driver/druid.clj
+++ b/src/metabase/driver/druid.clj
@@ -46,6 +46,7 @@
 ;;; ### Misc. Driver Fns
 
 (defn- can-connect? [details]
+  {:pre [(map? details)]}
   (ssh/with-ssh-tunnel [details-with-tunnel details]
     (= 200 (:status (http/get (details->url details-with-tunnel "/status"))))))
 
@@ -53,7 +54,7 @@
 ;;; ### Query Processing
 
 (defn- do-query [details query]
-  {:pre [(map? query)]}
+  {:pre [(map? details) (map? query)]}
   (ssh/with-ssh-tunnel [details-with-tunnel details]
     (try
       (POST (details->url details-with-tunnel "/druid/v2"), :body query)
@@ -70,6 +71,7 @@
           (throw (Exception. message e)))))))
 
 (defn- do-query-with-cancellation [details query]
+  {:pre [(map? details) (map? query)]}
   (let [query-id  (get-in query [:context :queryId])
         query-fut (future (do-query details query))]
     (try
@@ -96,6 +98,7 @@
 ;;; ### Sync
 
 (defn- do-segment-metadata-query [details datasource]
+  {:pre [(map? details)]}
   (do-query details {"queryType"     "segmentMetadata"
                      "dataSource"    datasource
                      "intervals"     ["1999-01-01/2114-01-01"]
diff --git a/src/metabase/driver/druid/query_processor.clj b/src/metabase/driver/druid/query_processor.clj
index 8bf38aa489e5d5ef5264611d5e1b98d2578981b7..c045fc2e8be36d3b324eeff0e0b0163072c3743b 100644
--- a/src/metabase/driver/druid/query_processor.clj
+++ b/src/metabase/driver/druid/query_processor.clj
@@ -9,15 +9,16 @@
             [clojure.string :as str]
             [clojure.tools.logging :as log]
             [metabase.driver.druid.js :as js]
+            [metabase.mbql.util :as mbql.u]
             [metabase.query-processor
-             [annotate :as annotate]
-             [store :as qp.store]
-             [interface :as i]]
+             [interface :as i]
+             [store :as qp.store]]
+            [metabase.query-processor.middleware.annotate :as annotate]
             [metabase.util :as u]
-            [metabase.util.date :as du])
+            [metabase.util
+             [date :as du]
+             [i18n :as ui18n :refer [tru]]])
   (:import java.util.TimeZone
-           [metabase.query_processor.interface AgFieldRef DateTimeField DateTimeValue Expression Field
-            RelativeDateTimeValue Value]
            org.joda.time.DateTimeZone))
 
 (def ^:private ^:const topN-max-results
@@ -51,38 +52,65 @@
 
 (defn- query-type-dispatch-fn [query-type & _] query-type)
 
-(defprotocol ^:private IRValue
-  (^:private ->rvalue [this]))
-
-(extend-protocol IRValue
-  nil                   (->rvalue [_] nil)
-  Object                (->rvalue [this] this)
-  AgFieldRef            (->rvalue [{index :index}] (let [ag      (nth (:aggregation *query*) index)
-                                                         ag-type (or (:aggregation-type ag)
-                                                                     (throw (Exception. "Unknown aggregation type!")))]
-                                                     (if (= ag-type :distinct)
-                                                       :distinct___count
-                                                       ag-type)))
-  Field                 (->rvalue [this] (:field-name this))
-  DateTimeField         (->rvalue [this] (->rvalue (:field this)))
-  Value                 (->rvalue [this] (:value this))
-  DateTimeValue         (->rvalue [{{unit :unit} :field, value :value}] (du/date->iso-8601 (du/date-trunc unit value (get-timezone-id))))
-  RelativeDateTimeValue (->rvalue [{:keys [unit amount]}] (du/date->iso-8601 (du/date-trunc unit (du/relative-date unit amount) (get-timezone-id)))))
-
-(defprotocol ^:private IDimensionOrMetric
-  (^:private dimension-or-metric? [this]
-   "Is this `Field`/`DateTimeField` a `:dimension` or `:metric`?"))
-
-(extend-protocol IDimensionOrMetric
-  Field         (dimension-or-metric? [{:keys [base-type]}]
-                  (cond
-                    (isa? base-type :type/Text)             :dimension
-                    (isa? base-type :type/Float)            :metric
-                    (isa? base-type :type/Integer)          :metric
-                    (isa? base-type :type/DruidHyperUnique) :metric))
-
-  DateTimeField (dimension-or-metric? [this]
-                  (dimension-or-metric? (:field this))))
+(defmulti ^:private ->rvalue mbql.u/dispatch-by-clause-name-or-class)
+
+(defmethod ->rvalue nil [_]
+  nil)
+
+(defmethod ->rvalue Object [this]
+  this)
+
+(defmethod ->rvalue :aggregation [[_ index]]
+  (let [ag      (nth (:aggregation *query*) index)
+        ag-type (:aggregation-type ag)]
+    (cond
+
+      (= [:count] ag)
+      :count
+
+      (= ag-type :distinct)
+      :distinct___count
+
+      ag-type
+      ag-type
+
+      :else
+      (throw (Exception. "Unknown aggregation type!")))))
+
+(defmethod ->rvalue :field-id [[_ field-id]]
+  (:name (qp.store/field field-id)))
+
+(defmethod ->rvalue :datetime-field [[_ field]]
+  (->rvalue field))
+
+(defmethod ->rvalue :absolute-datetime [[_ timestamp unit]]
+  (du/date->iso-8601 (du/date-trunc unit timestamp (get-timezone-id))))
+
+;; TODO - not 100% sure how to handle times here, just treating it exactly like a date will have to do for now
+(defmethod ->rvalue :time [[_ time unit]]
+  (du/date->iso-8601 (du/date-trunc unit time (get-timezone-id))))
+
+(defmethod ->rvalue :relative-datetime [[_ amount unit]]
+  (du/date->iso-8601 (du/date-trunc unit (du/relative-date unit amount) (get-timezone-id))))
+
+(defmethod ->rvalue :value [[_ value]]
+  (->rvalue value))
+
+
+(defmulti ^:private ^{:doc "Is this field clause a `:dimension` or `:metric`?"}
+  dimension-or-metric?
+  mbql.u/dispatch-by-clause-name-or-class)
+
+(defmethod dimension-or-metric? :field-id [[_ field-id]]
+  (let [{base-type :base_type} (qp.store/field field-id)]
+    (cond
+      (isa? base-type :type/Text)             :dimension
+      (isa? base-type :type/Float)            :metric
+      (isa? base-type :type/Integer)          :metric
+      (isa? base-type :type/DruidHyperUnique) :metric)))
+
+(defmethod dimension-or-metric? :datetime-field [[_ field]]
+  (dimension-or-metric? field))
 
 
 (def ^:private ^:const query-type->default-query
@@ -101,29 +129,28 @@
 
 
 
-;;; ### handle-source-table
+;;; ---------------------------------------------- handle-source-table -----------------------------------------------
 
 (defn- handle-source-table [_ {source-table-id :source-table} query-context]
   (let [{source-table-name :name} (qp.store/table source-table-id)]
     (assoc-in query-context [:query :dataSource] source-table-name)))
 
 
-;;; ### handle-aggregation
+;;; ----------------------------------------------- handle-aggregation -----------------------------------------------
 
 (declare filter:not filter:nil?)
 
-(defn- field? [arg]
-  (or (instance? Field arg)
-      (instance? DateTimeField arg)))
+(def ^:private ^{:arglists '([clause])} field?
+  (partial mbql.u/is-clause? #{:field-id :datetime-field}))
 
-(defn- expression->field-names [{:keys [args]}]
+(defn- expression->field-names [[_ & args]]
   {:post [(every? (some-fn keyword? string?) %)]}
   (flatten (for [arg   args
                  :when (or (field? arg)
-                           (instance? Expression arg))]
+                           (mbql.u/is-clause? #{:+ :- :/ :*} arg))]
              (cond
-               (instance? Expression arg) (expression->field-names arg)
-               (field? arg)               (->rvalue arg)))))
+               (mbql.u/is-clause? #{:+ :- :/ :*} arg) (expression->field-names arg)
+               (field? arg)                           (->rvalue arg)))))
 
 (defn- expression-arg->js [arg default-value]
   (if-not (field? arg)
@@ -131,7 +158,7 @@
     (js/or (js/parse-float (->rvalue arg))
            default-value)))
 
-(defn- expression->js [{:keys [operator args]} default-value]
+(defn- expression->js [[operator & args] default-value]
   (apply (case operator
            :+ js/+
            :- js/-
@@ -140,7 +167,7 @@
          (for [arg args]
            (expression-arg->js arg default-value))))
 
-(defn- ag:doubleSum:expression [{operator :operator,  :as expression} output-name]
+(defn- ag:doubleSum:expression [[operator :as expression] output-name]
   (let [field-names (expression->field-names expression)]
     {:type        :javascript
      :name        output-name
@@ -152,18 +179,18 @@
      :fnCombine   (js/function [:x :y]
                     (js/return (js/+ :x :y)))}))
 
-(defn- ag:doubleSum [field output-name]
-  (if (instance? Expression field)
-    (ag:doubleSum:expression field output-name)
+(defn- ag:doubleSum [field-clause output-name]
+  (if (mbql.u/is-clause? #{:+ :- :/ :*} field-clause)
+    (ag:doubleSum:expression field-clause output-name)
     ;; metrics can use the built-in :doubleSum aggregator, but for dimensions we have to roll something that does the
     ;; same thing in JS
-    (case (dimension-or-metric? field)
+    (case (dimension-or-metric? field-clause)
       :metric    {:type      :doubleSum
                   :name      output-name
-                  :fieldName (->rvalue field)}
+                  :fieldName (->rvalue field-clause)}
       :dimension {:type        :javascript
                   :name        output-name
-                  :fieldNames  [(->rvalue field)]
+                  :fieldNames  [(->rvalue field-clause)]
                   :fnReset     "function() { return 0 ; }"
                   :fnAggregate "function(current, x) { return current + (parseFloat(x) || 0); }"
                   :fnCombine   "function(x, y) { return x + y; }"})))
@@ -180,16 +207,16 @@
      :fnCombine   (js/function [:x :y]
                     (js/return (js/fn-call :Math.min :x :y)))}))
 
-(defn- ag:doubleMin [field output-name]
-  (if (instance? Expression field)
-    (ag:doubleMin:expression field output-name)
-    (case (dimension-or-metric? field)
+(defn- ag:doubleMin [field-clause output-name]
+  (if (mbql.u/is-clause? #{:+ :- :/ :*} field-clause)
+    (ag:doubleMin:expression field-clause output-name)
+    (case (dimension-or-metric? field-clause)
       :metric    {:type      :doubleMin
                   :name      output-name
-                  :fieldName (->rvalue field)}
+                  :fieldName (->rvalue field-clause)}
       :dimension {:type        :javascript
                   :name        output-name
-                  :fieldNames  [(->rvalue field)]
+                  :fieldNames  [(->rvalue field-clause)]
                   :fnReset     "function() { return Number.MAX_VALUE ; }"
                   :fnAggregate "function(current, x) { return Math.min(current, (parseFloat(x) || Number.MAX_VALUE)); }"
                   :fnCombine   "function(x, y) { return Math.min(x, y); }"})))
@@ -207,7 +234,7 @@
                     (js/return (js/fn-call :Math.max :x :y)))}))
 
 (defn- ag:doubleMax [field output-name]
-  (if (instance? Expression field)
+  (if (mbql.u/is-clause? #{:+ :- :/ :*} field)
     (ag:doubleMax:expression field output-name)
     (case (dimension-or-metric? field)
       :metric    {:type      :doubleMax
@@ -245,7 +272,7 @@
 
       [:count  nil] [[(or output-name-kwd :count)] {:aggregations [(ag:count (or output-name :count))]}]
 
-      [:count    _] [[(or output-name-kwd :count)] {:aggregations [(ag:count ag-field (or output-name :count))]}]
+      [:count    _] [[(or output-name-kwd :count)] {:aggregations [(ag:count ag-field (or (name output-name) :count))]}]
 
       [:avg      _] (let [count-name (name (gensym "___count_"))
                           sum-name   (name (gensym "___sum_"))]
@@ -259,15 +286,15 @@
                                                      {:type :fieldAccess, :fieldName count-name}]}]}])
       [:distinct _] [[(or output-name-kwd :distinct___count)]
                      {:aggregations [(ag:distinct ag-field (or output-name :distinct___count))]}]
-      [:sum      _] [[(or output-name-kwd :sum)] {:aggregations [(ag:doubleSum ag-field (or output-name :sum))]}]
+      [:sum      _] [[(or output-name-kwd :sum)] {:aggregations [(ag:doubleSum ag-field (or (name output-name) :sum))]}]
       [:min      _] [[(or output-name-kwd :min)] {:aggregations [(ag:doubleMin ag-field (or output-name :min))]}]
       [:max      _] [[(or output-name-kwd :max)] {:aggregations [(ag:doubleMax ag-field (or output-name :max))]}])))
 
-(defn- handle-aggregation
-  [query-type
-   {ag-type :aggregation-type, ag-field :field, output-name :output-name, custom-name :custom-name, :as ag}
-   query-context]
-  (let [output-name (or custom-name output-name)]
+(defn- handle-aggregation [query-type ag-clause query-context]
+  (let [output-name        (annotate/aggregation-name ag-clause)
+        [ag-type ag-field] (mbql.u/match-one ag-clause
+                             [:named ag _] (recur ag)
+                             [_ _]         &match)]
     (if-not (isa? query-type ::ag-query)
       query-context
       (let [[projections ag-clauses] (create-aggregation-clause output-name ag-type ag-field)]
@@ -275,44 +302,67 @@
             (update :projections #(vec (concat % projections)))
             (update :query #(merge-with concat % ag-clauses)))))))
 
-(defn- add-expression-aggregation-output-names [args]
-  (for [arg args]
-    (cond
-      (number? arg)
-      arg
-
-      (:aggregation-type arg)
-      (assoc arg :output-name (or (:output-name arg)
-                                  (name (gensym (str "___" (name (:aggregation-type arg)) "_")))))
-
-      (instance? Expression arg)
-      (update arg :args add-expression-aggregation-output-names))))
-
-(defn- expression-post-aggregation [{:keys [operator args], :as expression}]
-  {:type   :arithmetic
-   :name   (annotate/aggregation-name expression)
-   :fn     operator
-   :fields (for [arg args]
-             (cond
-               (number? arg)              {:type :constant, :name (str arg), :value arg}
-               (:output-name arg)         {:type :fieldAccess, :fieldName (:output-name arg)}
-               (instance? Expression arg) (expression-post-aggregation arg)))})
+(defn- add-expression-aggregation-output-names [[operator & args :as expression]]
+  (if (mbql.u/is-clause? :named expression)
+    [:named (add-expression-aggregation-output-names (second expression)) (last expression)]
+    (apply
+     vector
+     operator
+     (for [arg args]
+       (cond
+         (number? arg)
+         arg
+
+         (mbql.u/is-clause? :named arg)
+         arg
+
+         (mbql.u/is-clause? #{:count :avg :distinct :stddev :sum :min :max} arg)
+         [:named arg (name (gensym (str "___" (name (first arg)) "_")))]
+
+         (mbql.u/is-clause? #{:+ :- :/ :*} arg)
+         (add-expression-aggregation-output-names arg))))))
+
+(defn- expression-post-aggregation [[operator & args, :as expression]]
+  (if (mbql.u/is-clause? :named expression)
+    ;; If it's a named expression, we want to preserve the included name, so recurse, but merge in the name
+    (merge (expression-post-aggregation (second expression))
+           {:name (annotate/aggregation-name expression {:top-level? true})})
+    {:type   :arithmetic
+     :name   (annotate/aggregation-name expression {:top-level? true})
+     :fn     operator
+     :fields (for [arg args]
+               (cond
+                 (number? arg)
+                 {:type :constant, :name (str arg), :value arg}
+
+                 (mbql.u/is-clause? :named arg)
+                 {:type :fieldAccess, :fieldName (last arg)}
+
+                 (mbql.u/is-clause? #{:+ :- :/ :*} arg)
+                 (expression-post-aggregation arg)))}))
 
 (declare handle-aggregations)
 
 (defn- expression->actual-ags
   "Return a flattened list of actual aggregations that are needed for EXPRESSION."
-  [expression]
-  (apply concat (for [arg   (:args expression)
+  [[_ & args]]
+  (apply concat (for [arg   args
                       :when (not (number? arg))]
-                  (if (instance? Expression arg)
+                  (if (mbql.u/is-clause? #{:+ :- :/ :*} arg)
                     (expression->actual-ags arg)
                     [arg]))))
 
-(defn- handle-expression-aggregation [query-type {:keys [operator args], :as expression} query-context]
+(defn- unwrap-name [x]
+  (if (mbql.u/is-clause? :named x)
+    (second x)
+    x))
+
+(defn- handle-expression-aggregation [query-type [operator & args, :as expression] query-context]
   ;; filter out constants from the args list
-  (let [expression    (update expression :args add-expression-aggregation-output-names)
-        ags           (expression->actual-ags expression)
+  (let [expression    (add-expression-aggregation-output-names expression)
+        ;; The QP will automatically add a generated name to the expression, if it's there, unwrap it before looking
+        ;; for the aggregation
+        ags           (expression->actual-ags (unwrap-name expression))
         query-context (handle-aggregations query-type {:aggregation ags} query-context)
         post-agg      (expression-post-aggregation expression)]
     (-> query-context
@@ -321,19 +371,26 @@
 
 (defn- handle-aggregations [query-type {aggregations :aggregation} query-context]
   (loop [[ag & more] aggregations, query query-context]
-    (if (instance? Expression ag)
-      (handle-expression-aggregation query-type ag query-context)
-      (let [query (handle-aggregation query-type ag query)]
-        (if-not (seq more)
-          query
-          (recur more query))))))
+    (cond
+      (and (mbql.u/is-clause? :named ag)
+           (mbql.u/is-clause? #{:+ :- :/ :*} (second ag)))
+      (handle-expression-aggregation query-type ag query)
+
+      (mbql.u/is-clause? #{:+ :- :/ :*} ag)
+      (handle-expression-aggregation query-type ag query)
 
+      (not ag)
+      query
 
-;;; ### handle-breakout
+      :else
+      (recur more (handle-aggregation query-type ag query)))))
 
-(defprotocol ^:private IDimension
-  (^:private ->dimension-rvalue [this]
-   "Format `Field` for use in a `:dimension` or `:dimensions` clause."))
+
+;;; ------------------------------------------------ handle-breakout -------------------------------------------------
+
+(defmulti ^:private ^{:doc "Format `Field` for use in a `:dimension` or `:dimensions` clause."}
+  ->dimension-rvalue
+  mbql.u/dispatch-by-clause-name-or-class)
 
 (defn- extract:timeFormat
   "Create a time format extraction. Returns a string. See
@@ -425,19 +482,22 @@
     :quarter-of-year
     :year})
 
-(extend-protocol IDimension
-  nil    (->dimension-rvalue [this] (->rvalue this))
-  Object (->dimension-rvalue [this] (->rvalue this))
-  ;; :timestamp is a special case, and we need to do an 'extraction' against the secret special value :__time to get
-  ;; at it
-  DateTimeField
-  (->dimension-rvalue [{:keys [unit]}]
-    {:type         :extraction
-     :dimension    :__time
-     :outputName   (if (contains? units-that-need-post-processing-int-parsing unit)
-                     :timestamp___int
-                     :timestamp)
-     :extractionFn (unit->extraction-fn unit)}))
+(defmethod ->dimension-rvalue nil [_] (->rvalue nil))
+
+(defmethod ->dimension-rvalue Object [this] (->rvalue this))
+
+(defmethod ->dimension-rvalue :field-id [this] (->rvalue this))
+
+(defmethod ->dimension-rvalue :datetime-field [[_ _ unit]]
+  {:type         :extraction
+   :dimension    :__time
+   ;; :timestamp is a special case, and we need to do an 'extraction' against the secret special value :__time to get
+   ;; at it
+   :outputName   (if (contains? units-that-need-post-processing-int-parsing unit)
+                   :timestamp___int
+                   :timestamp)
+   :extractionFn (unit->extraction-fn unit)})
+
 
 (defmulti ^:private handle-breakout query-type-dispatch-fn)
 
@@ -447,13 +507,20 @@
 (defmethod handle-breakout ::grouped-timeseries [_ {[breakout-field] :breakout} query-context]
   (assoc-in query-context [:query :granularity] (unit->granularity (:unit breakout-field))))
 
+(defn- field-clause->name [field-clause]
+  (when field-clause
+    (let [id (mbql.u/field-clause->id-or-literal field-clause)]
+      (if (integer? id)
+        (:name (qp.store/field id))
+        id))))
+
 (defmethod handle-breakout ::topN [_ {[breakout-field] :breakout} query-context]
   (let [dim-rvalue (->dimension-rvalue breakout-field)]
     (-> query-context
         (update :projections conj (keyword (if (and (map? dim-rvalue)
                                                     (contains? dim-rvalue :outputName))
                                              (:outputName dim-rvalue)
-                                             (name breakout-field))))
+                                             (field-clause->name breakout-field))))
         (assoc-in [:query :dimension] dim-rvalue))))
 
 (defmethod handle-breakout ::groupBy [_ {breakout-fields :breakout} query-context]
@@ -463,12 +530,12 @@
                                          (keyword (if (and (map? dim-rvalue)
                                                            (contains? dim-rvalue :outputName))
                                                     (:outputName dim-rvalue)
-                                                    (name breakout-field)))))
+                                                    (field-clause->name breakout-field)))))
                                      breakout-fields))
       (assoc-in [:query :dimensions] (mapv ->dimension-rvalue breakout-fields))))
 
 
-;;; ### handle-filter. See http://druid.io/docs/latest/querying/filters.html
+;;; ---------------------- handle-filter. See http://druid.io/docs/latest/querying/filters.html ----------------------
 
 (defn- filter:and [filters]
   {:type   :and
@@ -483,16 +550,16 @@
 (defn- filter:= [field value]
   {:type      :selector
    :dimension (->rvalue field)
-   :value     value})
+   :value     (->rvalue value)})
 
-(defn- filter:nil? [field]
-  (if (instance? Expression field)
-    (filter:and (for [arg   (:args field)
+(defn- filter:nil? [clause-or-field]
+  (if (mbql.u/is-clause? #{:+ :- :/ :*} clause-or-field)
+    (filter:and (for [arg   (rest clause-or-field)
                       :when (field? arg)]
                   (filter:nil? arg)))
-    (filter:= field (case (dimension-or-metric? field)
-                      :dimension nil
-                      :metric    0))))
+    (filter:= clause-or-field (case (dimension-or-metric? clause-or-field)
+                                :dimension nil
+                                :metric    0))))
 
 (defn- filter:like
   "Build a `like` filter clause, which is almost just like a SQL `LIKE` clause."
@@ -516,7 +583,7 @@
   (str/replace s #"([%_\\])" "\\\\$1"))
 
 (defn- filter:bound
-  "Numeric `bound` filter, for finding values of `field` that are less than some value, greater than some value, or
+  "Numeric `bound` filter, for finding values of `field` that are less than some value-or-field, greater than some value-or-field, or
   both. Defaults to being `inclusive` (e.g. `<=` instead of `<`) but specify option `inclusive?` to change this."
   [field & {:keys [lower upper inclusive?]
             :or   {inclusive? true}}]
@@ -528,69 +595,80 @@
    :lowerStrict (not inclusive?)
    :upperStrict (not inclusive?)})
 
-(defn- check-filter-fields [filter-type & fields]
-  (doseq [field fields]
-    (when (= (dimension-or-metric? field) :metric)
-      (throw
-       (IllegalArgumentException.
-        (u/format-color 'red "WARNING: Filtering only works on dimensions! '%s' is a metric. Ignoring %s filter."
-          (->rvalue field) filter-type))))))
-
-(defn- parse-filter-subclause:filter [{:keys [filter-type field value case-sensitive?] :as filter}]
-  {:pre [filter]}
-  ;; We'll handle :timestamp separately. It needs to go in :intervals instead
-  (when-not (instance? DateTimeField field)
-    (try (when field
-           (check-filter-fields filter-type field))
-         (let [value (->rvalue value)]
-           (case filter-type
-             :inside
-             (let [lat       (:lat filter)
-                   lon       (:lon filter)
-                   lat-field (:field lat)
-                   lon-field (:field lon)]
-               (check-filter-fields :inside lat-field lon-field)
-               (filter:and
-                [(filter:bound lat-field, :lower (:min lat), :upper (:max lat))
-                 (filter:bound lon-field, :lower (:min lon), :upper (:max lon))]))
-
-             :between
-             (let [{:keys [min-val max-val]} filter]
-               (filter:bound field, :lower min-val, :upper max-val))
-
-             :is-null
-             (filter:nil? field)
-
-             :not-null
-             (filter:not (filter:nil? field))
-
-             :contains
-             {:type      :search
-              :dimension (->rvalue field)
-              :query     {:type          :contains
-                          :value         value
-                          :caseSensitive case-sensitive?}}
-
-             :starts-with (filter:like field (str (escape-like-filter-pattern value) \%) case-sensitive?)
-             :ends-with   (filter:like field (str \% (escape-like-filter-pattern value)) case-sensitive?)
-
-             :=  (filter:= field value)
-             :!= (filter:not (filter:= field value))
-             :<  (filter:bound field, :upper value, :inclusive? false)
-             :>  (filter:bound field, :lower value, :inclusive? false)
-             :<= (filter:bound field, :upper value)
-             :>= (filter:bound field, :lower value)))
-         (catch Throwable e
-           (log/warn (.getMessage e))))))
-
-(defn- parse-filter-clause:filter [{:keys [compound-type subclauses subclause], :as clause}]
-  {:pre [clause]}
-  (case compound-type
-    :and {:type :and, :fields (filterv identity (map parse-filter-clause:filter subclauses))}
-    :or  {:type :or,  :fields (filterv identity (map parse-filter-clause:filter subclauses))}
-    :not (when-let [subclause (parse-filter-clause:filter subclause)]
-           (filter:not subclause))
-    nil  (parse-filter-subclause:filter clause)))
+(defn- filter-fields-are-dimensions? [fields]
+  (reduce
+   #(and %1 %2)
+   true
+   (for [field fields]
+     (or
+      (not= (dimension-or-metric? field) :metric)
+      (log/warn
+       (u/format-color 'red
+           (tru "WARNING: Filtering only works on dimensions! ''{0}'' is a metric. Ignoring filter."
+                (->rvalue field))))))))
+
+(defmulti ^:private parse-filter
+  ;; dispatch function first checks to make sure this is a valid filter clause, then dispatches off of the clause name
+  ;; if it is.
+  (fn [[clause-name & args, :as filter-clause]]
+    (let [fields (filter (partial mbql.u/is-clause? #{:field-id :datetime-field}) args)]
+      (when (and
+             ;; make sure all Field args are dimensions
+             (filter-fields-are-dimensions? fields)
+             ;; and make sure none of the Fields are datetime Fields
+             ;; We'll handle :timestamp separately. It needs to go in :intervals instead
+             (not-any? (partial mbql.u/is-clause? :datetime-field) fields))
+        clause-name))))
+
+(defmethod parse-filter nil [_] nil)
+
+(defmethod parse-filter :between [[_ field min-val max-val]]
+  (filter:bound field, :lower min-val, :upper max-val))
+
+(defmethod parse-filter :contains [[_ field string-or-field options]]
+  {:type      :search
+   :dimension (->rvalue field)
+   :query     {:type          :contains
+               :value         (->rvalue string-or-field)
+               :caseSensitive (get options :case-sensitive true)}})
+
+(defmethod parse-filter :starts-with [[_ field string-or-field options]]
+  (filter:like field
+               (str (escape-like-filter-pattern (->rvalue string-or-field)) \%)
+               (get options :case-sensitive true)))
+
+(defmethod parse-filter :ends-with [[_ field string-or-field options]]
+  (filter:like field
+               (str \% (escape-like-filter-pattern (->rvalue string-or-field)))
+               (get options :case-sensitive true)))
+
+(defmethod parse-filter := [[_ field value-or-field]]
+  (filter:= field value-or-field))
+
+(defmethod parse-filter :!= [[_ field value-or-field]]
+  (filter:not (filter:= field value-or-field)))
+
+(defmethod parse-filter :< [[_ field value-or-field]]
+  (filter:bound field, :upper value-or-field, :inclusive? false))
+
+(defmethod parse-filter :> [[_ field value-or-field]]
+  (filter:bound field, :lower value-or-field, :inclusive? false))
+
+(defmethod parse-filter :<= [[_ field value-or-field]]
+  (filter:bound field, :upper value-or-field))
+
+(defmethod parse-filter :>= [[_ field value-or-field]]
+  (filter:bound field, :lower value-or-field))
+
+(defmethod parse-filter :and [[_ & args]]
+  {:type :and, :fields (filterv identity (map parse-filter args))})
+
+(defmethod parse-filter :or [[_ & args]]
+  {:type :or, :fields (filterv identity (map parse-filter args))})
+
+(defmethod parse-filter :not [[_ subclause]]
+  (when-let [subclause (parse-filter subclause)]
+    (filter:not subclause)))
 
 
 (defn- make-intervals
@@ -603,34 +681,26 @@
                (when (seq more)
                  (apply make-intervals more)))))
 
-
-(defn- parse-filter-subclause:intervals [{:keys [filter-type field value] :as filter}]
-  (when (instance? DateTimeField field)
+(defn- parse-filter-subclause:intervals [[filter-type field value maybe-max-value]]
+  (when (mbql.u/is-clause? :datetime-field field)
     (case filter-type
       ;; BETWEEN "2015-12-09", "2015-12-11" -> ["2015-12-09/2015-12-12"], because BETWEEN is inclusive
-      :between  (let [{:keys [min-val max-val]} filter]
-                  (make-intervals min-val (i/add-date-time-units max-val 1)))
+      :between (make-intervals value (mbql.u/add-datetime-units maybe-max-value 1))
       ;; =  "2015-12-11" -> ["2015-12-11/2015-12-12"]
-      :=        (make-intervals value (i/add-date-time-units value 1))
+      :=       (make-intervals value (mbql.u/add-datetime-units value 1))
       ;; != "2015-12-11" -> ["-5000/2015-12-11", "2015-12-12/5000"]
-      :!=       (make-intervals nil value, (i/add-date-time-units value 1) nil)
+      :!=      (make-intervals nil value, (mbql.u/add-datetime-units value 1) nil)
       ;; >  "2015-12-11" -> ["2015-12-12/5000"]
-      :>        (make-intervals (i/add-date-time-units value 1) nil)
+      :>       (make-intervals (mbql.u/add-datetime-units value 1) nil)
       ;; >= "2015-12-11" -> ["2015-12-11/5000"]
-      :>=       (make-intervals value nil)
+      :>=      (make-intervals value nil)
       ;; <  "2015-12-11" -> ["-5000/2015-12-11"]
-      :<        (make-intervals nil value)
+      :<       (make-intervals nil value)
       ;; <= "2015-12-11" -> ["-5000/2015-12-12"]
-      :<=       (make-intervals nil (i/add-date-time-units value 1))
-      ;; This is technically allowed by the QL here but doesn't make sense since every Druid event has a timestamp.
-      ;; Just ignore it
-      :is-null  (log/warn (u/format-color 'red (str "WARNING: timestamps can never be nil. Ignoring IS_NULL filter "
-                                                    "for timestamp.")))
-      ;; :timestamp is always non-nil so nothing to do here
-      :not-null nil)))
-
-(defn- parse-filter-clause:intervals [{:keys [compound-type subclauses], :as clause}]
-  (if-not compound-type
+      :<=      (make-intervals nil (mbql.u/add-datetime-units value 1)))))
+
+(defn- parse-filter-clause:intervals [[compound-type & subclauses, :as clause]]
+  (if-not (#{:and :or :not} compound-type)
     (parse-filter-subclause:intervals clause)
     (let [subclauses (filterv identity (mapcat parse-filter-clause:intervals subclauses))]
       (when (seq subclauses)
@@ -638,75 +708,91 @@
           ;; A date can't be in more than one interval, so ANDing them together doesn't really make sense. In this
           ;; situation, just ignore all intervals after the first
           :and (do (when (> (count subclauses) 1)
-                     (log/warn (u/format-color 'red (str "WARNING: A date can't belong to multiple discrete "
-                                                         "intervals, so ANDing them together doesn't make sense.\n"
-                                                         "Ignoring these intervals: %s") (rest subclauses))))
+                     (log/warn
+                      (u/format-color 'red
+                          (str
+                           (tru "WARNING: A date can't belong to multiple discrete intervals, so ANDing them together doesn't make sense.")
+                           "\n"
+                           (tru "Ignoring these intervals: {0}" (rest subclauses))) )))
                    [(first subclauses)])
           ;; Ok to specify multiple intervals for OR
           :or  subclauses
           ;; We should never get to this point since the all non-string negations should get automatically rewritten
           ;; by the query expander.
-          :not (log/warn (u/format-color 'red "WARNING: Don't know how to negate: %s" clause)))))))
+          :not (log/warn (u/format-color 'red (tru "WARNING: Don't know how to negate: {0}" clause))))))))
 
 
 (defn- handle-filter [_ {filter-clause :filter} query-context]
   (if-not filter-clause
     query-context
-    (let [filter    (parse-filter-clause:filter    filter-clause)
+    (let [filter    (parse-filter    filter-clause)
           intervals (parse-filter-clause:intervals filter-clause)]
       (cond-> query-context
         (seq filter)    (assoc-in [:query :filter] filter)
         (seq intervals) (assoc-in [:query :intervals] intervals)))))
 
 
-;;; ### handle-order-by
+;;; ------------------------------------------------ handle-order-by -------------------------------------------------
 
 (defmulti ^:private handle-order-by query-type-dispatch-fn)
 
 (defmethod handle-order-by ::query [_ _ query-context]
-  (log/warn (u/format-color 'red (str "Sorting with Druid is only allowed in queries that have one or more breakout "
-                                      "columns. Ignoring :order-by clause.")))
+  (log/warn
+   (u/format-color 'red
+       (tru "Sorting with Druid is only allowed in queries that have one or more breakout columns. Ignoring :order-by clause.")))
   query-context)
 
 
 (defmethod handle-order-by ::topN
   [_
-   {[{ag-type :aggregation-type}] :aggregation, [breakout-field] :breakout, [{:keys [field direction]}] :order-by}
+   {[[ag-type]] :aggregation, [breakout-field] :breakout, [[direction field]] :order-by}
    query-context]
   (let [field             (->rvalue field)
         breakout-field    (->rvalue breakout-field)
         sort-by-breakout? (= field breakout-field)
         ag-field          (if (= ag-type :distinct) :distinct___count ag-type)]
     (assoc-in query-context [:query :metric] (match [sort-by-breakout? direction]
-                                               [true  :ascending]  {:type :alphaNumeric}
-                                               [true  :descending] {:type :inverted, :metric {:type :alphaNumeric}}
-                                               [false :ascending]  {:type :inverted, :metric ag-field}
-                                               [false :descending] ag-field))))
+                                               [true  :asc]  {:type :alphaNumeric}
+                                               [true  :desc] {:type :inverted, :metric {:type :alphaNumeric}}
+                                               [false :asc]  {:type :inverted, :metric ag-field}
+                                               [false :desc] ag-field))))
 
 (defmethod handle-order-by ::groupBy [_ {:keys [order-by]} query-context]
-  (assoc-in query-context [:query :limitSpec :columns] (vec (for [{:keys [field direction]} order-by]
+  (assoc-in query-context [:query :limitSpec :columns] (vec (for [[direction field] order-by]
                                                               {:dimension (->rvalue field)
-                                                               :direction direction}))))
+                                                               :direction (case direction
+                                                                            :desc :descending
+                                                                            :asc  :ascending)}))))
+(defn- datetime-field?
+  "Similar to `mbql.u/datetime-field?` but works on field ids wrapped in a datetime or on fields that happen to be a
+  datetime"
+  [field]
+  (when field
+    (or (mbql.u/is-clause? :datetime-field field)
+        (mbql.u/datetime-field? (qp.store/field (second field))))))
 
 ;; Handle order by timstamp field
 (defn- handle-order-by-timestamp [field direction query-context]
-  (assoc-in query-context [:query :descending] (and (instance? DateTimeField field)
-                                                    (= direction :descending))))
+  (assoc-in query-context [:query :descending] (and (datetime-field? field)
+                                                    (= direction :desc))))
 
-(defmethod handle-order-by ::grouped-timeseries [_ {[{field :field, direction :direction}] :order-by} query-context]
+(defmethod handle-order-by ::grouped-timeseries [_ {[[direction field]] :order-by} query-context]
   (handle-order-by-timestamp field direction query-context))
 
-(defmethod handle-order-by ::select [_ {[{field :field, direction :direction}] :order-by} query-context]
+(defmethod handle-order-by ::select [_ {[[direction field]] :order-by} query-context]
   (handle-order-by-timestamp field direction query-context))
 
-;;; ### handle-fields
+
+;;; ------------------------------------------------- handle-fields --------------------------------------------------
 
 (defmulti ^:private handle-fields query-type-dispatch-fn)
 
 (defmethod handle-fields ::query [_ {fields :fields} query-context]
   (when fields
-    (log/warn (u/format-color 'red (str "WARNING: It only makes sense to specify :fields for a bare rows query. "
-                                        "Ignoring the clause."))))
+    (log/warn
+     (u/format-color 'red
+         ;; TODO - this is not really true, is it
+         (tru "WARNING: It only makes sense to specify :fields for a query with no aggregation. Ignoring the clause."))))
   query-context)
 
 (defmethod handle-fields ::select [_ {fields :fields} query-context]
@@ -724,17 +810,20 @@
             (assoc-in [:query :dimensions] (or (seq dimensions) [:___dummy]))
             (assoc-in [:query :metrics]    (or (seq metrics)    [:___dummy])))
 
-        (instance? DateTimeField field)
+        (datetime-field? field)
         (recur dimensions metrics projections more)
 
         (= (dimension-or-metric? field) :dimension)
-        (recur (conj dimensions (->rvalue field)) metrics (conj projections (keyword (name field))) more)
+        (recur (conj dimensions (->rvalue field)) metrics (conj projections (keyword (field-clause->name field))) more)
 
         (= (dimension-or-metric? field) :metric)
-        (recur dimensions (conj metrics (->rvalue field)) (conj projections (keyword (name field))) more)))))
+        (recur dimensions (conj metrics (->rvalue field)) (conj projections (keyword (field-clause->name field))) more)
+
+        :else
+        (throw (Exception. "bad field"))))))
 
 
-;;; ### handle-limit
+;;; -------------------------------------------------- handle-limit --------------------------------------------------
 
 (defmulti ^:private handle-limit query-type-dispatch-fn)
 
@@ -745,8 +834,9 @@
 
 (defmethod handle-limit ::timeseries [_ {limit :limit} query-context]
   (when limit
-    (log/warn (u/format-color 'red (str "WARNING: Druid doenst allow limitSpec in timeseries queries. Ignoring the "
-                                        "LIMIT clause."))))
+    (log/warn
+     (u/format-color 'red
+         (tru "WARNING: Druid does not allow limitSpec in time series queries. Ignoring the LIMIT clause."))))
   query-context)
 
 (defmethod handle-limit ::topN [_ {limit :limit} query-context]
@@ -762,7 +852,9 @@
         (assoc-in [:query :limitSpec :limit] limit))))
 
 
-;;; ### handle-page TODO - no real way to implement this DB side, probably have to do Clojure-side w/ `take`/`drop`
+;;; -------------------------------------------------- handle-page ---------------------------------------------------
+
+;; TODO - no real way to implement this DB side, probably have to do Clojure-side w/ `take`/`drop`
 
 (defmulti ^:private handle-page query-type-dispatch-fn)
 
@@ -779,13 +871,13 @@
 
 (defn- druid-query-type
   "What type of Druid query type should we perform?"
-  [{breakout-fields :breakout, [{ag-type :aggregation-type}] :aggregation, limit :limit}]
+  [{breakout-fields :breakout, [[ag-type]] :aggregation, limit :limit}]
   (let [breakouts (condp = (count breakout-fields)
                     0 :none
                     1 :one
                       :many)
         agg?      (boolean ag-type)
-        ts?       (and (instance? DateTimeField (first breakout-fields))            ; Checks whether the query is a timeseries
+        ts?       (and (mbql.u/is-clause? :datetime-field (first breakout-fields))  ; Checks whether the query is a timeseries
                        (contains? timeseries-units (:unit (first breakout-fields))) ; (excludes x-of-y type breakouts)
                        (nil? limit))]                                               ; (excludes queries with LIMIT)
     (match [breakouts agg? ts?]
@@ -801,10 +893,10 @@
   (let [query-type (druid-query-type query)]
     (reduce (fn [query-context f]
               (f query-type query query-context))
-            {:projections [] :query (query-type->default-query query-type) :query-type query-type :mbql? true}
+            {:projections [], :query (query-type->default-query query-type), :query-type query-type, :mbql? true}
             [handle-source-table
-             handle-aggregations
              handle-breakout
+             handle-aggregations
              handle-filter
              handle-order-by
              handle-fields
@@ -812,7 +904,7 @@
              handle-page])))
 
 
-;;;  ### post-processing
+;;; ------------------------------------------------ post-processing -------------------------------------------------
 
 (defmulti ^:private post-process query-type-dispatch-fn)
 
@@ -849,10 +941,10 @@
          (map (comp update-ts-fn :event))
          (post-process-map projections))))
 
-(defmethod post-process ::total   [_ projections _ results]
+(defmethod post-process ::total [_ projections _ results]
   (post-process-map projections (map :result results)))
 
-(defmethod post-process ::topN    [_ projections {:keys [middleware]} results]
+(defmethod post-process ::topN [_ projections {:keys [middleware]} results]
   (post-process-map projections
                     (let [results (-> results first :result)]
                       (if (:format-rows? middleware true)
@@ -886,14 +978,9 @@
 (defn- remove-bonus-keys
   "Remove keys that start with `___` from the results -- they were temporary, and we don't want to return them."
   [columns]
-  (let [keys-to-remove (for [k     columns
-                             :when (re-find #"^___" (name k))]
-                         k)]
-    (if-not (seq keys-to-remove)
-      columns
-      (filterv (complement (set keys-to-remove)) columns))))
+  (vec (remove #(re-find #"^___" (name %)) columns)))
 
-;;; ### MBQL Processor
+;;; ------------------------------------------------- MBQL Processor -------------------------------------------------
 
 (defn mbql->native
   "Transpile an MBQL (inner) query into a native form suitable for a Druid DB."
@@ -940,12 +1027,12 @@
 (defn execute-query
   "Execute a query for a Druid DB."
   [do-query
-   {database                                     :database
+   {database-id                                  :database
     {:keys [query query-type mbql? projections]} :native
     middleware                                   :middleware
     :as                                          query-context}]
-  {:pre [database query]}
-  (let [details       (:details database)
+  {:pre [query]}
+  (let [details       (:details (qp.store/database))
         query         (if (string? query)
                         (json/parse-string query keyword)
                         query)
@@ -965,8 +1052,9 @@
     ;; rename any occurances of `:timestamp___int` to `:timestamp` in the results so the user doesn't know about our
     ;; behind-the-scenes conversion and apply any other post-processing on the value such as parsing some units to int
     ;; and rounding up approximate cardinality values.
-    {:columns   (vec (replace {:timestamp___int :timestamp :distinct___count :count} columns))
-     :rows      (for [row (:results post-proc-map)]
-                  (for [getter getters]
-                    (getter row)))
-     :annotate? mbql?}))
+    {:columns (->> columns
+                   (replace {:timestamp___int :timestamp :distinct___count :count})
+                   (map u/keyword->qualified-name))
+     :rows    (for [row (:results post-proc-map)]
+                (for [getter getters]
+                  (getter row)))}))
diff --git a/src/metabase/driver/generic_sql.clj b/src/metabase/driver/generic_sql.clj
index 360d97218510fd43615d6ec32302277742958ffd..b24f39ee0599e3ac9de5824fcb920ef446b71bc3 100644
--- a/src/metabase/driver/generic_sql.clj
+++ b/src/metabase/driver/generic_sql.clj
@@ -4,30 +4,28 @@
              [set :as set]
              [string :as str]]
             [clojure.java.jdbc :as jdbc]
-            [clojure.math.numeric-tower :as math]
             [clojure.tools.logging :as log]
             [honeysql
              [core :as hsql]
              [format :as hformat]]
             [metabase
-             [db :as db]
+             [db :as mdb]
              [driver :as driver]
              [util :as u]]
             [metabase.models
-             [field :as field]
-             [table :as table]]
-            metabase.query-processor.interface
+             [database :refer [Database]]
+             [field :as field]]
             [metabase.util
              [honeysql-extensions :as hx]
              [ssh :as ssh]]
-            [schema.core :as s])
+            [schema.core :as s]
+            [toucan.db :as db])
   (:import [clojure.lang Keyword PersistentVector]
            com.mchange.v2.c3p0.ComboPooledDataSource
            honeysql.types.SqlCall
            [java.sql DatabaseMetaData ResultSet]
            [java.util Date Map]
-           metabase.models.field.FieldInstance
-           [metabase.query_processor.interface Field Value]))
+           metabase.models.field.FieldInstance))
 
 (defprotocol ISQLDriver
   "Methods SQL-based drivers should implement in order to use `IDriverSQLDefaultsMixin`.
@@ -86,9 +84,9 @@
      dataset name as well. (At the time of this writing, this is only used by the SQL parameters implementation; in
      the future it will probably be used in more places as well.)")
 
-  (field->alias ^String [this, ^Field field]
+  (field->alias ^String [this, ^FieldInstance field]
     "*OPTIONAL*. Return the alias that should be used to for FIELD, i.e. in an `AS` clause. The default implementation
-     calls `name`, which returns the *unqualified* name of `Field`.
+     calls `:name`, which returns the *unqualified* name of `Field`.
 
      Return `nil` to prevent FIELD from being aliased.")
 
@@ -97,7 +95,7 @@
      building a SQL statement. Defaults to `:ansi`, but other valid options are `:mysql`, `:sqlserver`, `:oracle`, and
      `:h2` (added in `metabase.util.honeysql-extensions`; like `:ansi`, but uppercases the result).
 
-        (hsql/format ... :quoting (quote-style driver))")
+        (hsql/format ... :quoting (quote-style driver), :allow-dashed-names? true)")
 
   (set-timezone-sql ^String [this]
     "*OPTIONAL*. This should be a format string containing a SQL statement to be used to set the timezone for the
@@ -105,10 +103,6 @@
 
        \"SET @@session.timezone = %s;\"")
 
-  (stddev-fn ^clojure.lang.Keyword [this]
-    "*OPTIONAL*. Keyword name of the SQL function that should be used to do a standard deviation aggregation. Defaults
-     to `:STDDEV`.")
-
   (string-length-fn ^clojure.lang.Keyword [this, ^Keyword field-key]
     "Return a HoneySQL form appropriate for getting the length of a `Field` identified by fully-qualified FIELD-KEY.
      An implementation should return something like:
@@ -133,16 +127,17 @@
 
 (defn- create-connection-pool
   "Create a new C3P0 `ComboPooledDataSource` for connecting to the given DATABASE."
-  [{:keys [id engine details]}]
+  [{:keys [id engine details], :as database}]
+  {:pre [(map? database)]}
   (log/debug (u/format-color 'cyan "Creating new connection pool for database %d ..." id))
   (let [details-with-tunnel (ssh/include-ssh-tunnel details) ;; If the tunnel is disabled this returned unchanged
-        spec (connection-details->spec (driver/engine->driver engine) details-with-tunnel)]
-    (assoc (db/connection-pool (assoc spec
-                                 :minimum-pool-size           1
-                                 ;; prevent broken connections closed by dbs by testing them every 3 mins
-                                 :idle-connection-test-period (* 3 60)
-                                 ;; prevent overly large pools by condensing them when connections are idle for 15m+
-                                 :excess-timeout              (* 15 60)))
+        spec                (connection-details->spec (driver/engine->driver engine) details-with-tunnel)]
+    (assoc (mdb/connection-pool (assoc spec
+                                  :minimum-pool-size           1
+                                  ;; prevent broken connections closed by dbs by testing them every 3 mins
+                                  :idle-connection-test-period (* 3 60)
+                                  ;; prevent overly large pools by condensing them when connections are idle for 15m+
+                                  :excess-timeout              (* 15 60)))
       :ssh-tunnel (:tunnel-connection details-with-tunnel))))
 
 (defn- notify-database-updated
@@ -161,18 +156,20 @@
 (defn db->pooled-connection-spec
   "Return a JDBC connection spec that includes a cp30 `ComboPooledDataSource`.
    Theses connection pools are cached so we don't create multiple ones to the same DB."
-  [{:keys [id], :as database}]
-  (if (contains? @database-id->connection-pool id)
+  [database-or-id]
+  (if (contains? @database-id->connection-pool (u/get-id database-or-id))
     ;; we have an existing pool for this database, so use it
-    (get @database-id->connection-pool id)
+    (get @database-id->connection-pool (u/get-id database-or-id))
     ;; create a new pool and add it to our cache, then return it
-    (u/prog1 (create-connection-pool database)
-      (swap! database-id->connection-pool assoc id <>))))
+    (let [db (if (map? database-or-id) database-or-id (db/select-one [Database :id :engine :details]
+                                                        :id database-or-id))]
+      (u/prog1 (create-connection-pool db)
+        (swap! database-id->connection-pool assoc (u/get-id database-or-id) <>)))))
 
 (defn db->jdbc-connection-spec
   "Return a JDBC connection spec for DATABASE. This will have a C3P0 pool as its datasource."
-  [{:keys [engine details], :as database}]
-  (db->pooled-connection-spec database))
+  [database-or-id]
+  (db->pooled-connection-spec database-or-id))
 
 (defn handle-additional-options
   "If DETAILS contains an `:addtional-options` key, append those options to the connection string in CONNECTION-SPEC.
@@ -241,7 +238,7 @@
 
 (def ^:private ^:dynamic *jdbc-options* {})
 
-(defn- query
+(defn query
   "Execute a HONEYSQL-FROM query against DATABASE, DRIVER, and optionally TABLE."
   ([driver database honeysql-form]
    (jdbc/query (db->jdbc-connection-spec database)
@@ -273,14 +270,16 @@
 
 ;;; ## Database introspection methods used by sync process
 
-(defmacro with-metadata
+;; Don't use this anymore! Use the new `jdbc/with-db-metadata` fn
+(defmacro ^:deprecated with-metadata
   "Execute BODY with `java.sql.DatabaseMetaData` for DATABASE."
   [[binding _ database] & body]
   `(with-open [^java.sql.Connection conn# (jdbc/get-connection (db->jdbc-connection-spec ~database))]
      (let [~binding (.getMetaData conn#)]
        ~@body)))
 
-(defmacro ^:private with-resultset-open
+;; Don't use this anymore! You can just `with-metadata` and `jdbc/result-set-seq` instead!!!
+(defmacro ^:private ^:deprecated with-resultset-open
   "This is like `with-open` but with JDBC ResultSet objects. Will execute `body` with a `jdbc/result-set-seq` bound
   the the symbols provided in the binding form. The binding form is just like `let` or `with-open`, but yield a
   `ResultSet`. That `ResultSet` will be closed upon exit of `body`."
@@ -291,10 +290,15 @@
        (let ~(vec (interleave (map first binding-pairs) (map #(list `~jdbc/result-set-seq %) rs-syms)))
          ~@body))))
 
+(defn get-catalogs
+  "Returns a set of all of the catalogs found via `metadata`"
+  [^DatabaseMetaData metadata]
+  (set (map :table_cat (jdbc/result-set-seq (.getCatalogs metadata)))))
+
 (defn- get-tables
   "Fetch a JDBC Metadata ResultSet of tables in the DB, optionally limited to ones belonging to a given schema."
-  ^ResultSet [^DatabaseMetaData metadata, ^String schema-or-nil]
-  (with-resultset-open [rs-seq (.getTables metadata nil schema-or-nil "%" ; tablePattern "%" = match all tables
+  ^ResultSet [^DatabaseMetaData metadata, ^String schema-or-nil, ^String database-name-or-nil]
+  (with-resultset-open [rs-seq (.getTables metadata database-name-or-nil schema-or-nil "%" ; tablePattern "%" = match all tables
                                            (into-array String ["TABLE", "VIEW", "FOREIGN TABLE", "MATERIALIZED VIEW"]))]
     ;; Ensure we read all rows before exiting
     (doall rs-seq)))
@@ -306,12 +310,12 @@
 
    This is as much as 15x faster for Databases with lots of system tables than `post-filtered-active-tables` (4
    seconds vs 60)."
-  [driver, ^DatabaseMetaData metadata]
+  [driver, ^DatabaseMetaData metadata, & [database-name-or-nil]]
   (with-resultset-open [rs-seq (.getSchemas metadata)]
     (let [all-schemas (set (map :table_schem rs-seq))
           schemas     (set/difference all-schemas (excluded-schemas driver))]
       (set (for [schema schemas
-                 table  (get-tables metadata schema)]
+                 table  (get-tables metadata schema database-name-or-nil)]
              (let [remarks (:remarks table)]
                {:name        (:table_name table)
                 :schema      schema
@@ -321,9 +325,9 @@
 (defn post-filtered-active-tables
   "Alternative implementation of `ISQLDriver/active-tables` best suited for DBs with little or no support for schemas.
    Fetch *all* Tables, then filter out ones whose schema is in `excluded-schemas` Clojure-side."
-  [driver, ^DatabaseMetaData metadata]
+  [driver, ^DatabaseMetaData metadata  & [database-name-or-nil]]
   (set (for [table   (filter #(not (contains? (excluded-schemas driver) (:table_schem %)))
-                             (get-tables metadata nil))]
+                             (get-tables metadata nil nil))]
          (let [remarks (:remarks table)]
            {:name        (:table_name  table)
             :schema      (:table_schem table)
@@ -346,8 +350,10 @@
       (str "Invalid type: " special-type))
     special-type))
 
-(defn- describe-table-fields [^DatabaseMetaData metadata, driver, {schema :schema, table-name :name}]
-  (with-resultset-open [rs-seq (.getColumns metadata nil schema table-name nil)]
+(defn describe-table-fields
+  "Returns a set of column metadata for `schema` and `table-name` using `metadata`. "
+  [^DatabaseMetaData metadata, driver, {schema :schema, table-name :name}, & [database-name-or-nil]]
+  (with-resultset-open [rs-seq (.getColumns metadata database-name-or-nil schema table-name nil)]
     (set (for [{database-type :type_name, column-name :column_name, remarks :remarks} rs-seq]
            (merge {:name          column-name
                    :database-type database-type
@@ -357,7 +363,8 @@
                   (when-let [special-type (calculated-special-type driver column-name database-type)]
                     {:special-type special-type}))))))
 
-(defn- add-table-pks
+(defn add-table-pks
+  "Using `metadata` find any primary keys for `table` and assoc `:pk?` to true for those columns."
   [^DatabaseMetaData metadata, table]
   (with-resultset-open [rs-seq (.getPrimaryKeys metadata nil nil (:name table))]
     (let [pks (set (map :column_name rs-seq))]
@@ -383,9 +390,11 @@
          ;; find PKs and mark them
          (add-table-pks metadata))))
 
-(defn- describe-table-fks [driver database table]
+(defn describe-table-fks
+  "Default implementation of `describe-table-fks` for JDBC based drivers."
+  [driver database table & [database-name-or-nil]]
   (with-metadata [metadata driver database]
-    (with-resultset-open [rs-seq (.getImportedKeys metadata nil (:schema table) (:name table))]
+    (with-resultset-open [rs-seq (.getImportedKeys metadata database-name-or-nil (:schema table) (:name table))]
       (set (for [result rs-seq]
              {:fk-column-name   (:fkcolumn_name result)
               :dest-table       {:name   (:pktable_name result)
@@ -418,7 +427,7 @@
 (s/defn ^:private honeysql->prepared-stmt-subs
   "Convert X to a replacement snippet info map by passing it to HoneySQL's `format` function."
   [driver x]
-  (let [[snippet & args] (hsql/format x, :quoting (quote-style driver))]
+  (let [[snippet & args] (hsql/format x, :quoting (quote-style driver), :allow-dashed-names? true)]
     (make-stmt-subs snippet args)))
 
 (s/defmethod ->prepared-substitution [Object nil] :- PreparedStatementSubstitution
@@ -469,10 +478,9 @@
    :current-datetime-fn  (constantly :%now)
    :excluded-schemas     (constantly nil)
    :field->identifier    (u/drop-first-arg (comp (partial apply hsql/qualify) field/qualified-name-components))
-   :field->alias         (u/drop-first-arg name)
+   :field->alias         (u/drop-first-arg :name)
    :quote-style          (constantly :ansi)
-   :set-timezone-sql     (constantly nil)
-   :stddev-fn            (constantly :STDDEV)})
+   :set-timezone-sql     (constantly nil)})
 
 
 (defn IDriverSQLDefaultsMixin
diff --git a/src/metabase/driver/generic_sql/query_processor.clj b/src/metabase/driver/generic_sql/query_processor.clj
index cfa8b84ed2ab69b0ff501e3b39b203bed08e17ad..4092e3ab980af39490546aedd13f2a2555019f69 100644
--- a/src/metabase/driver/generic_sql/query_processor.clj
+++ b/src/metabase/driver/generic_sql/query_processor.clj
@@ -10,20 +10,24 @@
              [driver :as driver]
              [util :as u]]
             [metabase.driver.generic-sql :as sql]
-            [metabase.mbql.util :as mbql.u]
+            [metabase.mbql
+             [schema :as mbql.s]
+             [util :as mbql.u]]
+            [metabase.models
+             [field :refer [Field]]
+             [table :refer [Table]]]
             [metabase.query-processor
-             [annotate :as annotate]
              [interface :as i]
              [store :as qp.store]
              [util :as qputil]]
+            [metabase.query-processor.middleware.annotate :as annotate]
             [metabase.util
              [date :as du]
              [honeysql-extensions :as hx]
-             [i18n :refer [trs]]])
+             [i18n :refer [tru]]]
+            [schema.core :as s])
   (:import [java.sql PreparedStatement ResultSet ResultSetMetaData SQLException]
-           [java.util Calendar Date TimeZone]
-           [metabase.query_processor.interface AgFieldRef BinnedField DateTimeField DateTimeValue Expression
-            ExpressionRef Field FieldLiteral JoinQuery JoinTable RelativeDateTimeValue TimeField TimeValue Value]))
+           [java.util Calendar Date TimeZone]))
 
 ;; TODO - yet another `*query*` dynamic var. We should really consolidate them all so we only need a single one.
 (def ^:dynamic *query*
@@ -36,84 +40,104 @@
   Each nested query increments this counter by 1."
   0)
 
-;;; ## Formatting
+;;; +----------------------------------------------------------------------------------------------------------------+
+;;; |                                                Other Formatting                                                |
+;;; +----------------------------------------------------------------------------------------------------------------+
 
-(defn- qualified-alias
-  "Convert the given `FIELD` to a stringified alias"
-  [driver field]
+
+(s/defn ^:private qualified-alias
+  "Convert the given `FIELD` to a stringified alias, for use in a SQL `AS` clause."
+  [driver, field :- (class Field)]
   (some->> field
            (sql/field->alias driver)
            hx/qualify-and-escape-dots))
 
 (defn as
   "Generate a FORM `AS` FIELD alias using the name information of FIELD."
-  [driver form field]
-  (if-let [alias (qualified-alias driver field)]
-    [form alias]
-    form))
-
-;; TODO - Consider moving this into query processor interface and making it a method on `ExpressionRef` instead ?
-(defn- expression-with-name
-  "Return the `Expression` referenced by a given (keyword or string) EXPRESSION-NAME."
-  [expression-name]
-  (or (get-in *query* [:query :expressions (keyword expression-name)]) (:expressions (:query *query*))
-      (throw (Exception. (format "No expression named '%s'." (name expression-name))))))
-
-(defn- aggregation-at-index
-  "Fetch the aggregation at index. This is intended to power aggregate field references (e.g. [:aggregation 0]).
-   This also handles nested queries, which could be potentially ambiguous if multiple levels had aggregations."
-  ([index]
-   (aggregation-at-index index (:query *query*) *nested-query-level*))
-  ;; keep recursing deeper into the query until we get to the same level the aggregation reference was defined at
-  ([index query aggregation-level]
-   (if (zero? aggregation-level)
-     (nth (:aggregation query) index)
-     (recur index (:source-query query) (dec aggregation-level)))))
-
-(defmulti ^{:doc          (str "Return an appropriate HoneySQL form for an object. Dispatches off both driver and object "
-                               "classes making this easy to override in any places needed for a given driver.")
+  [driver form field-clause]
+  (let [expression-name (when (mbql.u/is-clause? :expression field-clause)
+                          (second field-clause))
+        field           (when-not expression-name
+                          (let [id-or-name (mbql.u/field-clause->id-or-literal field-clause)]
+                            (when (integer? id-or-name)
+                              (qp.store/field id-or-name))))]
+    (if-let [alias (cond
+                     expression-name expression-name
+                     field           (qualified-alias driver field))]
+      [form alias]
+      form)))
+
+
+;;; +----------------------------------------------------------------------------------------------------------------+
+;;; |                              ->honeysql multimethod def & low-level method impls                               |
+;;; +----------------------------------------------------------------------------------------------------------------+
+
+;; this is the primary way to override behavior for a specific clause or object class.
+
+(defmulti ^{:doc (str "Return an appropriate HoneySQL form for an object. Dispatches off both driver and either clause "
+                      "name or object class making this easy to override in any places needed for a given driver.")
             :arglists     '([driver x])
             :style/indent 1}
   ->honeysql
   (fn [driver x]
-    [(class driver) (class x)]))
+    [(class driver) (mbql.u/dispatch-by-clause-name-or-class x)]))
 
 (defmethod ->honeysql [Object nil]    [_ _]    nil)
 (defmethod ->honeysql [Object Object] [_ this] this)
 
-(defmethod ->honeysql [Object Expression]
-  [driver {:keys [operator args]}]
-  (apply (partial hsql/call operator)
-         (map (partial ->honeysql driver) args)))
+(defmethod ->honeysql [Object :value] [driver [_ value]] (->honeysql driver value))
 
-(defmethod ->honeysql [Object ExpressionRef]
-  [driver {:keys [expression-name]}]
+(defmethod ->honeysql [Object :expression]
+  [driver [_ expression-name]]
   ;; Unfortunately you can't just refer to the expression by name in other clauses like filter, but have to use the
   ;; original formula.
-  (->honeysql driver (expression-with-name expression-name)))
+  (->honeysql driver (mbql.u/expression-with-name *query* expression-name)))
 
-(defmethod ->honeysql [Object Field]
-  [driver {:keys [schema-name table-name special-type field-name]}]
-  (let [field (keyword (hx/qualify-and-escape-dots schema-name table-name field-name))]
-    (cond
-      (isa? special-type :type/UNIXTimestampSeconds)      (sql/unix-timestamp->timestamp driver field :seconds)
-      (isa? special-type :type/UNIXTimestampMilliseconds) (sql/unix-timestamp->timestamp driver field :milliseconds)
-      :else                                               field)))
+(defn cast-unix-timestamp-field-if-needed
+  "Wrap a `field-identifier` in appropriate HoneySQL expressions if it refers to a UNIX timestamp Field."
+  [driver field field-identifier]
+  (condp #(isa? %2 %1) (:special_type field)
+    :type/UNIXTimestampSeconds      (sql/unix-timestamp->timestamp driver field-identifier :seconds)
+    :type/UNIXTimestampMilliseconds (sql/unix-timestamp->timestamp driver field-identifier :milliseconds)
+    field-identifier))
 
-(defmethod ->honeysql [Object FieldLiteral]
-  [driver {:keys [field-name]}]
+(defmethod ->honeysql [Object (class Field)]
+  [driver field]
+  (let [table            (qp.store/table (:table_id field))
+        field-identifier (keyword (hx/qualify-and-escape-dots (:schema table) (:name table) (:name field)))]
+    (cast-unix-timestamp-field-if-needed driver field field-identifier)))
+
+(defmethod ->honeysql [Object :field-id]
+  [driver [_ field-id]]
+  (->honeysql driver (qp.store/field field-id)))
+
+(defmethod ->honeysql [Object :fk->]
+  [driver [_ _ dest-field-clause :as fk-clause]]
+  ;; because the dest field needs to be qualified like `categories__via_category_id.name` instead of the normal
+  ;; `public.category.name` we will temporarily swap out the `categories` Table in the QP store for the duration of
+  ;; converting this `fk->` clause to HoneySQL. We'll remove the `:schema` and swap out the `:name` with the alias so
+  ;; other `->honeysql` impls (e.g. the `(class Field` one) will do the correct thing automatically without having to
+  ;; worry about the context in which they are being called
+  (qp.store/with-pushed-store
+    (when-let [{:keys [join-alias table-id]} (mbql.u/fk-clause->join-info *query* fk-clause)]
+      (when table-id
+        (qp.store/store-table! (assoc (qp.store/table table-id)
+                                 :schema nil
+                                 :name   join-alias
+                                 ;; for drivers that need to know these things, like Snowflake
+                                 :alias? true))))
+    (->honeysql driver dest-field-clause)))
+
+(defmethod ->honeysql [Object :field-literal]
+  [driver [_ field-name]]
   (->honeysql driver (keyword (hx/escape-dots (name field-name)))))
 
-(defmethod ->honeysql [Object DateTimeField]
-  [driver {unit :unit, field :field}]
+(defmethod ->honeysql [Object :datetime-field]
+  [driver [_ field unit]]
   (sql/date driver unit (->honeysql driver field)))
 
-(defmethod ->honeysql [Object TimeField]
-  [driver {field :field}]
-  (->honeysql driver field))
-
-(defmethod ->honeysql [Object BinnedField]
-  [driver {:keys [bin-width min-value max-value field] :as binned-field}]
+(defmethod ->honeysql [Object :binning-strategy]
+  [driver [_ field _ _ {:keys [bin-width min-value max-value]}]]
   (let [honeysql-field-form (->honeysql driver field)]
     ;;
     ;; Equation is | (value - min) |
@@ -127,102 +151,95 @@
         (hx/* bin-width)
         (hx/+ min-value))))
 
-(defn- aggregation->honeysql
-  "Generate the HoneySQL form for an aggregation."
-  [driver aggregation-type field]
-  {:pre [(keyword? aggregation-type)]}
-  (if-not field
-    ;; aggregation clauses w/o a field
-    (do (assert (or (= aggregation-type :count)
-                    (= aggregation-type :cumulative-count))
-          (format "Aggregations of type '%s' must specify a field." aggregation-type))
-        :%count.*)
-    ;; aggregation clauses w/ a Field
-    (hsql/call (case aggregation-type
-                 :avg      :avg
-                 :count    :count
-                 :distinct :distinct-count
-                 :stddev   (sql/stddev-fn driver)
-                 :sum      :sum
-                 :min      :min
-                 :max      :max)
-      (->honeysql driver field))))
-
-;; TODO - can't we just roll this into the ->honeysql method for `expression`?
-(defn expression-aggregation->honeysql
-  "Generate the HoneySQL form for an expression aggregation."
-  [driver expression]
-  (->honeysql driver
-    (update expression :args
-            (fn [args]
-              (for [arg args]
-                (cond
-                  (number? arg)           arg
-                  (:aggregation-type arg) (aggregation->honeysql driver (:aggregation-type arg) (:field arg))
-                  (:operator arg)         (expression-aggregation->honeysql driver arg)))))))
-
-;; e.g. the ["aggregation" 0] fields we allow in order-by
-(defmethod ->honeysql [Object AgFieldRef]
-  [driver {index :index}]
-  (let [{:keys [aggregation-type] :as aggregation} (aggregation-at-index index)]
+
+(defmethod ->honeysql [Object :count] [driver [_ field]]
+  (if field
+    (hsql/call :count (->honeysql driver field))
+    :%count.*))
+
+(defmethod ->honeysql [Object :avg]      [driver [_ field]] (hsql/call :avg            (->honeysql driver field)))
+(defmethod ->honeysql [Object :distinct] [driver [_ field]] (hsql/call :distinct-count (->honeysql driver field)))
+(defmethod ->honeysql [Object :stddev]   [driver [_ field]] (hsql/call :stddev         (->honeysql driver field)))
+(defmethod ->honeysql [Object :sum]      [driver [_ field]] (hsql/call :sum            (->honeysql driver field)))
+(defmethod ->honeysql [Object :min]      [driver [_ field]] (hsql/call :min            (->honeysql driver field)))
+(defmethod ->honeysql [Object :max]      [driver [_ field]] (hsql/call :max            (->honeysql driver field)))
+
+(defmethod ->honeysql [Object :+] [driver [_ & args]] (apply hsql/call :+ (map (partial ->honeysql driver) args)))
+(defmethod ->honeysql [Object :-] [driver [_ & args]] (apply hsql/call :- (map (partial ->honeysql driver) args)))
+(defmethod ->honeysql [Object :*] [driver [_ & args]] (apply hsql/call :* (map (partial ->honeysql driver) args)))
+
+;; for division we want to go ahead and convert any integer args to floats, because something like field / 2 will do
+;; integer division and give us something like 1.0 where we would rather see something like 1.5
+(defmethod ->honeysql [Object :/]
+  [driver [_ & args]]
+  (apply hsql/call :/ (for [arg args]
+                        (->honeysql driver (if (integer? arg)
+                                             (double arg)
+                                             arg)))))
+
+(defmethod ->honeysql [Object :named] [driver [_ ag ag-name]]
+  (->honeysql driver ag))
+
+;;  aggregation REFERENCE e.g. the ["aggregation" 0] fields we allow in order-by
+(defmethod ->honeysql [Object :aggregation]
+  [driver [_ index]]
+  (let [aggregation (mbql.u/aggregation-at-index *query* index *nested-query-level*)]
     (cond
       ;; For some arcane reason we name the results of a distinct aggregation "count",
       ;; everything else is named the same as the aggregation
-      (= aggregation-type :distinct)
+      (mbql.u/is-clause? :distinct aggregation)
       :count
 
-      (instance? Expression aggregation)
-      (expression-aggregation->honeysql driver aggregation)
+      (mbql.u/is-clause? #{:+ :- :* :/} aggregation)
+      (->honeysql driver aggregation)
 
+      ;; for everything else just use the name of the aggregation as an identifer, e.g. `:sum`
+      ;; TODO - this obviously doesn't work right for multiple aggregations of the same type
       :else
-      aggregation-type)))
+      (first aggregation))))
 
-(defmethod ->honeysql [Object Value]
-  [driver {:keys [value]}]
-  (->honeysql driver value))
+(defmethod ->honeysql [Object :absolute-datetime]
+  [driver [_ timestamp unit]]
+  (sql/date driver unit (->honeysql driver timestamp)))
 
-(defmethod ->honeysql [Object DateTimeValue]
-  [driver {{unit :unit} :field, value :value}]
+(defmethod ->honeysql [Object :time]
+  [driver [_ value unit]]
   (sql/date driver unit (->honeysql driver value)))
 
-(defmethod ->honeysql [Object RelativeDateTimeValue]
-  [driver {:keys [amount unit], {field-unit :unit} :field}]
-  (sql/date driver field-unit (if (zero? amount)
-                                (sql/current-datetime-fn driver)
-                                (driver/date-interval driver unit amount))))
-
-(defmethod ->honeysql [Object TimeValue]
-  [driver  {:keys [value]}]
-  (->honeysql driver value))
+(defmethod ->honeysql [Object :relative-datetime]
+  [driver [_ amount unit]]
+  (sql/date driver unit (if (zero? amount)
+                          (sql/current-datetime-fn driver)
+                          (driver/date-interval driver unit amount))))
 
-;;; ## Clause Handlers
 
-(defn- apply-expression-aggregation [driver honeysql-form expression]
-  (h/merge-select honeysql-form [(expression-aggregation->honeysql driver expression)
-                                 (hx/escape-dots (driver/format-custom-field-name driver (annotate/aggregation-name expression)))]))
+;;; +----------------------------------------------------------------------------------------------------------------+
+;;; |                                                Clause Handlers                                                 |
+;;; +----------------------------------------------------------------------------------------------------------------+
 
-(defn- apply-single-aggregation [driver honeysql-form {:keys [aggregation-type field], :as aggregation}]
-  (h/merge-select honeysql-form [(aggregation->honeysql driver aggregation-type field)
-                                 (hx/escape-dots (annotate/aggregation-name aggregation))]))
+;;; -------------------------------------------------- aggregation ---------------------------------------------------
 
 (defn apply-aggregation
-  "Apply a `aggregation` clauses to HONEYSQL-FORM. Default implementation of `apply-aggregation` for SQL drivers."
+  "Apply `aggregation` clauses to HONEYSQL-FORM. Default implementation of `apply-aggregation` for SQL drivers."
   [driver honeysql-form {aggregations :aggregation}]
   (loop [form honeysql-form, [ag & more] aggregations]
-    (let [form (if (instance? Expression ag)
-                 (apply-expression-aggregation driver form ag)
-                 (apply-single-aggregation driver form ag))]
+    (let [form (h/merge-select
+                form
+                [(->honeysql driver ag)
+                 (hx/escape-dots (driver/format-custom-field-name driver (annotate/aggregation-name ag)))])]
       (if-not (seq more)
         form
         (recur form more)))))
 
+;;; ----------------------------------------------- breakout & fields ------------------------------------------------
+
 (defn apply-breakout
   "Apply a `breakout` clause to HONEYSQL-FORM. Default implementation of `apply-breakout` for SQL drivers."
   [driver honeysql-form {breakout-fields :breakout, fields-fields :fields :as query}]
   (as-> honeysql-form new-hsql
-    (apply h/merge-select new-hsql (for [field breakout-fields
-                                         :when (not (contains? (set fields-fields) field))]
-                                     (as driver (->honeysql driver field) field)))
+    (apply h/merge-select new-hsql (for [field-clause breakout-fields
+                                         :when        (not (contains? (set fields-fields) field-clause))]
+                                     (as driver (->honeysql driver field-clause) field-clause)))
     (apply h/group new-hsql (map (partial ->honeysql driver) breakout-fields))))
 
 (defn apply-fields
@@ -231,101 +248,121 @@
   (apply h/merge-select honeysql-form (for [field fields]
                                         (as driver (->honeysql driver field) field))))
 
+
+;;; ----------------------------------------------------- filter -----------------------------------------------------
+
 (defn- like-clause
   "Generate a SQL `LIKE` clause. `value` is assumed to be a `Value` object (a record type with a key `:value` as well as
   some sort of type info) or similar as opposed to a raw value literal."
-  [driver field value case-sensitive?]
+  [driver field value options]
   ;; TODO - don't we need to escape underscores and percent signs in the pattern, since they have special meanings in
   ;; LIKE clauses? That's what we're doing with Druid...
   ;;
   ;; TODO - Postgres supports `ILIKE`. Does that make a big enough difference performance-wise that we should do a
   ;; custom implementation?
-  (if case-sensitive?
+  (if (get options :case-sensitive true)
     [:like field                    (->honeysql driver value)]
-    [:like (hsql/call :lower field) (->honeysql driver (update value :value str/lower-case))]))
-
-(defn filter-subclause->predicate
-  "Given a filter SUBCLAUSE, return a HoneySQL filter predicate form for use in HoneySQL `where`."
-  [driver {:keys [filter-type field value case-sensitive?], :as filter}]
-  {:pre [(map? filter) field]}
-  (let [field (->honeysql driver field)]
-    (case          filter-type
-      :between     [:between field
-                    (->honeysql driver (:min-val filter))
-                    (->honeysql driver (:max-val filter))]
-
-      :starts-with (like-clause driver field (update value :value #(str    % \%)) case-sensitive?)
-      :contains    (like-clause driver field (update value :value #(str \% % \%)) case-sensitive?)
-      :ends-with   (like-clause driver field (update value :value #(str \% %))    case-sensitive?)
-
-      :>           [:>    field (->honeysql driver value)]
-      :<           [:<    field (->honeysql driver value)]
-      :>=          [:>=   field (->honeysql driver value)]
-      :<=          [:<=   field (->honeysql driver value)]
-      :=           [:=    field (->honeysql driver value)]
-      :!=          [:not= field (->honeysql driver value)])))
-
-(defn filter-clause->predicate
-  "Given a filter CLAUSE, return a HoneySQL filter predicate form for use in HoneySQL `where`.
-   If this is a compound clause then we call `filter-subclause->predicate` on all of the subclauses."
-  [driver {:keys [compound-type subclause subclauses], :as clause}]
-  (case compound-type
-    :and (apply vector :and (map (partial filter-clause->predicate driver) subclauses))
-    :or  (apply vector :or  (map (partial filter-clause->predicate driver) subclauses))
-    :not [:not (filter-subclause->predicate driver subclause)]
-    nil  (filter-subclause->predicate driver clause)))
+    [:like (hsql/call :lower field) (->honeysql driver (update value 1 str/lower-case))]))
+
+(s/defn ^:private update-string-value :- mbql.s/value
+  [value :- (s/constrained mbql.s/value #(string? (second %)) "string value"), f]
+  (update value 1 f))
+
+(defmethod ->honeysql [Object :starts-with] [driver [_ field value options]]
+  (like-clause driver (->honeysql driver field) (update-string-value value #(str % \%)) options))
+
+(defmethod ->honeysql [Object :contains] [driver [_ field value options]]
+  (like-clause driver (->honeysql driver field) (update-string-value value #(str \% % \%)) options))
+
+(defmethod ->honeysql [Object :ends-with] [driver [_ field value options]]
+  (like-clause driver (->honeysql driver field) (update-string-value value #(str \% %)) options))
+
+(defmethod ->honeysql [Object :between] [driver [_ field min-val max-val]]
+  [:between (->honeysql driver field) (->honeysql driver min-val) (->honeysql driver max-val)])
+
+
+(defmethod ->honeysql [Object :>] [driver [_ field value]]
+  [:> (->honeysql driver field) (->honeysql driver value)])
+
+(defmethod ->honeysql [Object :<] [driver [_ field value]]
+  [:< (->honeysql driver field) (->honeysql driver value)])
+
+(defmethod ->honeysql [Object :>=] [driver [_ field value]]
+  [:>= (->honeysql driver field) (->honeysql driver value)])
+
+(defmethod ->honeysql [Object :<=] [driver [_ field value]]
+  [:<= (->honeysql driver field) (->honeysql driver value)])
+
+(defmethod ->honeysql [Object :=] [driver [_ field value]]
+  [:= (->honeysql driver field) (->honeysql driver value)])
+
+(defmethod ->honeysql [Object :!=] [driver [_ field value]]
+  [:not= (->honeysql driver field) (->honeysql driver value)])
+
+
+(defmethod ->honeysql [Object :and] [driver [_ & subclauses]]
+  (apply vector :and (map (partial ->honeysql driver) subclauses)))
+
+(defmethod ->honeysql [Object :or] [driver [_ & subclauses]]
+  (apply vector :or (map (partial ->honeysql driver) subclauses)))
+
+(defmethod ->honeysql [Object :not] [driver [_ subclause]]
+  [:not (->honeysql driver subclause)])
 
 (defn apply-filter
   "Apply a `filter` clause to HONEYSQL-FORM. Default implementation of `apply-filter` for SQL drivers."
   [driver honeysql-form {clause :filter}]
-  (h/where honeysql-form (filter-clause->predicate driver clause)))
+  (h/where honeysql-form (->honeysql driver clause)))
+
+
+;;; -------------------------------------------------- join tables ---------------------------------------------------
 
 (declare build-honeysql-form)
 
 (defn- make-honeysql-join-clauses
   "Returns a seq of honeysql join clauses, joining to `table-or-query-expr`. `jt-or-jq` can be either a `JoinTable` or
   a `JoinQuery`"
-  [table-or-query-expr {:keys [table-name pk-field source-field schema join-alias] :as jt-or-jq}]
-  (let [source-table-id                                  (mbql.u/query->source-table-id *query*)
-        {source-table-name :name, source-schema :schema} (qp.store/table source-table-id)]
+  [driver table-or-query-expr {:keys [join-alias fk-field-id pk-field-id]}]
+  (let [source-field (qp.store/field fk-field-id)
+        pk-field     (qp.store/field pk-field-id)]
     [[table-or-query-expr (keyword join-alias)]
      [:=
-      (hx/qualify-and-escape-dots source-schema source-table-name (:field-name source-field))
-      (hx/qualify-and-escape-dots join-alias (:field-name pk-field))]]))
+      (->honeysql driver source-field)
+      (hx/qualify-and-escape-dots join-alias (:name pk-field))]]))
 
-(defmethod ->honeysql [Object JoinTable]
-  ;; Returns a seq of clauses used in a honeysql join clause
-  [driver {:keys [schema table-name] :as jt} ]
-  (make-honeysql-join-clauses (hx/qualify-and-escape-dots schema table-name) jt))
-
-(defmethod ->honeysql [Object JoinQuery]
-  ;; Returns a seq of clauses used in a honeysql join clause
-  [driver {:keys [query] :as jq}]
-  (make-honeysql-join-clauses (build-honeysql-form driver query) jq))
+(s/defn ^:private join-info->honeysql
+  [driver , {:keys [query table-id], :as info} :- mbql.s/JoinInfo]
+  (if query
+    (make-honeysql-join-clauses driver (build-honeysql-form driver query) info)
+    (let [table (qp.store/table table-id)]
+      (make-honeysql-join-clauses driver (->honeysql driver table) info))))
 
 (defn apply-join-tables
   "Apply expanded query `join-tables` clause to `honeysql-form`. Default implementation of `apply-join-tables` for SQL
   drivers."
   [driver honeysql-form {:keys [join-tables]}]
-  (reduce (partial apply h/merge-left-join) honeysql-form (map #(->honeysql driver %) join-tables)))
+  (reduce (partial apply h/merge-left-join) honeysql-form (map (partial join-info->honeysql driver) join-tables)))
 
-(defn apply-limit
-  "Apply `limit` clause to HONEYSQL-FORM. Default implementation of `apply-limit` for SQL drivers."
-  [_ honeysql-form {value :limit}]
-  (h/limit honeysql-form value))
+
+;;; ---------------------------------------------------- order-by ----------------------------------------------------
 
 (defn apply-order-by
   "Apply `order-by` clause to HONEYSQL-FORM. Default implementation of `apply-order-by` for SQL drivers."
   [driver honeysql-form {subclauses :order-by breakout-fields :breakout}]
   (let [[{:keys [special-type] :as first-breakout-field}] breakout-fields]
-    (loop [honeysql-form honeysql-form, [{:keys [field direction]} & more] subclauses]
-      (let [honeysql-form (h/merge-order-by honeysql-form [(->honeysql driver field) (case direction
-                                                                                       :ascending  :asc
-                                                                                       :descending :desc)])]
+    (loop [honeysql-form honeysql-form, [[direction field] & more] subclauses]
+      (let [honeysql-form (h/merge-order-by honeysql-form [(->honeysql driver field) direction])]
         (if (seq more)
           (recur honeysql-form more)
           honeysql-form)))))
 
+;;; -------------------------------------------------- limit & page --------------------------------------------------
+
+(defn apply-limit
+  "Apply `limit` clause to HONEYSQL-FORM. Default implementation of `apply-limit` for SQL drivers."
+  [_ honeysql-form {value :limit}]
+  (h/limit honeysql-form value))
+
 (defn apply-page
   "Apply `page` clause to HONEYSQL-FORM. Default implementation of `apply-page` for SQL drivers."
   [_ honeysql-form {{:keys [items page]} :page}]
@@ -333,23 +370,24 @@
       (h/limit items)
       (h/offset (* items (dec page)))))
 
+
+;;; -------------------------------------------------- source-table --------------------------------------------------
+
+(defmethod ->honeysql [Object (class Table)]
+  [_ table]
+  (let [{table-name :name, schema :schema} table]
+    (hx/qualify-and-escape-dots schema table-name)))
+
 (defn apply-source-table
   "Apply `source-table` clause to `honeysql-form`. Default implementation of `apply-source-table` for SQL drivers.
   Override as needed."
-  [_ honeysql-form {source-table-id :source-table}]
-  (let [{table-name :name, schema :schema} (qp.store/table source-table-id)]
-    (h/from honeysql-form (hx/qualify-and-escape-dots schema table-name))))
+  [driver honeysql-form {source-table-id :source-table}]
+  (h/from honeysql-form (->honeysql driver (qp.store/table source-table-id))))
 
-(declare apply-clauses)
 
-(defn- apply-source-query [driver honeysql-form {{:keys [native], :as source-query} :source-query}]
-  ;; TODO - what alias should we give the source query?
-  (assoc honeysql-form
-    :from [[(if native
-              (hsql/raw (str "(" (str/replace native #";+\s*$" "") ")")) ; strip off any trailing slashes
-              (binding [*nested-query-level* (inc *nested-query-level*)]
-                (apply-clauses driver {} source-query)))
-            :source]]))
+;;; +----------------------------------------------------------------------------------------------------------------+
+;;; |                                           Building the HoneySQL Form                                           |
+;;; +----------------------------------------------------------------------------------------------------------------+
 
 (def ^:private clause-handlers
   ;; 1) Use the vars rather than the functions themselves because them implementation
@@ -357,9 +395,8 @@
   ;; 2) This is a vector rather than a map because the order the clauses get handled is important for some drivers.
   ;;    For example, Oracle needs to wrap the entire query in order to apply its version of limit (`WHERE ROWNUM`).
   [:source-table #'sql/apply-source-table
-   :source-query apply-source-query
-   :aggregation  #'sql/apply-aggregation
    :breakout     #'sql/apply-breakout
+   :aggregation  #'sql/apply-aggregation
    :fields       #'sql/apply-fields
    :filter       #'sql/apply-filter
    :join-tables  #'sql/apply-join-tables
@@ -367,12 +404,13 @@
    :page         #'sql/apply-page
    :limit        #'sql/apply-limit])
 
-(defn- apply-clauses
-  "Loop through all the `clause->handler` entries; if the query contains a given clause, apply the handler fn."
-  [driver honeysql-form query]
+(defn- apply-top-level-clauses
+  "Loop through all the `clause->handler` entries; if the query contains a given clause, apply the handler fn. Doesn't
+  handle `:source-query`; since that must be handled in a special way, that is handled separately."
+  [driver honeysql-form inner-query]
   (loop [honeysql-form honeysql-form, [clause f & more] (seq clause-handlers)]
-    (let [honeysql-form (if (clause query)
-                          (f driver honeysql-form query)
+    (let [honeysql-form (if (clause inner-query)
+                          (f driver honeysql-form inner-query)
                           honeysql-form)]
       (if (seq more)
         (recur honeysql-form more)
@@ -381,13 +419,65 @@
         (update honeysql-form :select #(if (seq %) % [:*]))))))
 
 
+;;; -------------------------------------------- Handling source queries ---------------------------------------------
+
+(declare apply-clauses)
+
+;; TODO - it seems to me like we could actually properly handle nested nested queries by giving each level of nesting
+;; a different alias
+(def ^:private source-query-alias :source)
+
+(defn- apply-source-query
+  "Handle a `:source-query` clause by adding a recursive `SELECT` or native query. At the time of this writing, all
+  source queries are aliased as `source`."
+  [driver honeysql-form {{:keys [native], :as source-query} :source-query}]
+  (assoc honeysql-form
+    :from [[(if native
+              (hsql/raw (str "(" (str/replace native #";+\s*$" "") ")")) ; strip off any trailing slashes
+              (binding [*nested-query-level* (inc *nested-query-level*)]
+                (apply-clauses driver {} source-query)))
+            source-query-alias]]))
+
+(defn- apply-clauses-with-aliased-source-query-table
+  "For queries that have a source query that is a normal MBQL query with a source table, temporarily swap the name of
+  that table to the `source` alias and handle other clauses. This is done so `field-id` references and the like
+  referring to Fields belonging to the Table in the source query work normally."
+  [driver honeysql-form {:keys [source-query], :as inner-query}]
+  (qp.store/with-pushed-store
+    (when-let [source-table-id (:source-table source-query)]
+      (qp.store/store-table! (assoc (qp.store/table source-table-id)
+                               :schema nil
+                               :name   (name source-query-alias)
+                               ;; some drivers like Snowflake need to know this so they don't include Database name
+                               :alias? true)))
+    (apply-top-level-clauses driver honeysql-form (dissoc inner-query :source-query))))
+
+
+;;; -------------------------------------------- putting it all togetrher --------------------------------------------
+
+(defn- apply-clauses
+  "Like `apply-top-level-clauses`, but handles `source-query` as well, which needs to be handled in a special way
+  because it is aliased."
+  [driver honeysql-form {:keys [source-query], :as inner-query}]
+  (if source-query
+    (apply-clauses-with-aliased-source-query-table
+     driver
+     (apply-source-query driver honeysql-form inner-query)
+     inner-query)
+    (apply-top-level-clauses driver honeysql-form inner-query)))
+
 (defn build-honeysql-form
   "Build the HoneySQL form we will compile to SQL and execute."
   [driverr {inner-query :query}]
   {:pre [(map? inner-query)]}
   (u/prog1 (apply-clauses driverr {} inner-query)
     (when-not i/*disable-qp-logging*
-      (log/debug "HoneySQL Form: 🍯\n" (u/pprint-to-str 'cyan <>)))))
+      (log/debug (tru "HoneySQL Form:") (u/emoji "🍯") "\n" (u/pprint-to-str 'cyan <>)))))
+
+
+;;; +----------------------------------------------------------------------------------------------------------------+
+;;; |                                                 MBQL -> Native                                                 |
+;;; +----------------------------------------------------------------------------------------------------------------+
 
 (defn mbql->native
   "Transpile MBQL query into a native SQL statement."
@@ -398,6 +488,11 @@
       {:query  sql
        :params args})))
 
+
+;;; +----------------------------------------------------------------------------------------------------------------+
+;;; |                                                Parsing Results                                                 |
+;;; +----------------------------------------------------------------------------------------------------------------+
+
 (defn- parse-date-as-string
   "Most databases will never invoke this code. It's possible with SQLite to get here if the timestamp was stored
   without milliseconds. Currently the SQLite JDBC driver will throw an exception even though the SQLite datetime
@@ -407,7 +502,7 @@
   (let [date-string (.getString rs i)]
     (if-let [parsed-date (du/str->date-time date-string tz)]
       parsed-date
-      (throw (Exception. (format "Unable to parse date '%s'" date-string))))))
+      (throw (Exception. (str (tru "Unable to parse date ''{0}''" date-string)))))))
 
 (defn- get-date [^TimeZone tz]
   (fn [^ResultSet rs _ ^Integer i]
@@ -448,6 +543,11 @@
       (mapv (fn [^Integer i data-read-fn]
               (jdbc/result-set-read-column (data-read-fn rs rsmeta i) rsmeta i)) idxs data-read-functions))))
 
+
+;;; +----------------------------------------------------------------------------------------------------------------+
+;;; |                                                Running Queries                                                 |
+;;; +----------------------------------------------------------------------------------------------------------------+
+
 (defn- set-parameters-with-timezone
   "Returns a function that will set date/timestamp PreparedStatement
   parameters with the correct timezone"
@@ -493,7 +593,7 @@
           ;; we can give up on the query running in the future
           @query-future)
         (catch InterruptedException e
-          (log/warn e "Client closed connection, cancelling query")
+          (log/warn e (tru "Client closed connection, cancelling query"))
           ;; This is what does the real work of cancelling the query. We aren't checking the result of
           ;; `query-future` but this will cause an exception to be thrown, saying the query has been cancelled.
           (.cancel stmt)
@@ -501,14 +601,19 @@
 
 (defn- run-query
   "Run the query itself."
-  [{sql :query, params :params, remark :remark} timezone connection]
+  [driver {sql :query, params :params, remark :remark} timezone connection]
   (let [sql              (str "-- " remark "\n" (hx/unescape-dots sql))
         statement        (into [sql] params)
-        [columns & rows] (cancellable-run-query connection sql params {:identifiers    identity, :as-arrays? true
-                                                                       :read-columns   (read-columns-with-date-handling timezone)
-                                                                       :set-parameters (set-parameters-with-timezone timezone)})]
-       {:rows    (or rows [])
-        :columns columns}))
+        [columns & rows] (cancellable-run-query connection sql params
+                                                {:identifiers    identity
+                                                 :as-arrays?     true
+                                                 :read-columns   (read-columns-with-date-handling timezone)
+                                                 :set-parameters (set-parameters-with-timezone timezone)})]
+    {:rows    (or rows [])
+     :columns (map u/keyword->qualified-name columns)}))
+
+
+;;; -------------------------- Running queries: exception handling & disabling auto-commit ---------------------------
 
 (defn- exception->nice-error-message ^String [^SQLException e]
   (or (->> (.getMessage e)     ; error message comes back like 'Column "ZID" not found; SQL statement: ... [error-code]' sometimes
@@ -544,6 +649,9 @@
   (jdbc/with-db-transaction [transaction-connection connection]
     (do-with-auto-commit-disabled transaction-connection (partial f transaction-connection))))
 
+
+;;; ---------------------------------------------- Running w/ Timezone -----------------------------------------------
+
 (defn- set-timezone!
   "Set the timezone for the current connection."
   [driver settings connection]
@@ -551,34 +659,37 @@
                         (assert (re-matches #"[A-Za-z\/_]+" <>)))
         format-string (sql/set-timezone-sql driver)
         sql           (format format-string (str \' timezone \'))]
-    (log/debug (u/format-color 'green "Setting timezone with statement: %s" sql))
+    (log/debug (u/format-color 'green (tru "Setting timezone with statement: {0}" sql)))
     (jdbc/db-do-prepared connection [sql])))
 
-(defn- run-query-without-timezone [_ _ connection query]
-  (do-in-transaction connection (partial run-query query nil)))
+(defn- run-query-without-timezone [driver _ connection query]
+  (do-in-transaction connection (partial run-query driver query nil)))
 
 (defn- run-query-with-timezone [driver {:keys [^String report-timezone] :as settings} connection query]
   (try
     (do-in-transaction connection (fn [transaction-connection]
                                     (set-timezone! driver settings transaction-connection)
-                                    (run-query query
+                                    (run-query driver
+                                               query
                                                (some-> report-timezone TimeZone/getTimeZone)
                                                transaction-connection)))
     (catch SQLException e
-      (log/error (trs "Failed to set timezone:") "\n" (with-out-str (jdbc/print-sql-exception-chain e)))
+      (log/error (tru "Failed to set timezone:") "\n" (with-out-str (jdbc/print-sql-exception-chain e)))
       (run-query-without-timezone driver settings connection query))
     (catch Throwable e
-      (log/error (trs "Failed to set timezone:") "\n" (.getMessage e))
+      (log/error (tru "Failed to set timezone:") "\n" (.getMessage e))
       (run-query-without-timezone driver settings connection query))))
 
 
+;;; ------------------------------------------------- execute-query --------------------------------------------------
+
 (defn execute-query
   "Process and run a native (raw SQL) QUERY."
-  [driver {:keys [database settings], query :native, :as outer-query}]
+  [driver {settings :settings, query :native, :as outer-query}]
   (let [query (assoc query :remark (qputil/query->remark outer-query))]
     (do-with-try-catch
       (fn []
-        (let [db-connection (sql/db->jdbc-connection-spec database)]
+        (let [db-connection (sql/db->jdbc-connection-spec (qp.store/database))]
           ((if (seq (:report-timezone settings))
              run-query-with-timezone
              run-query-without-timezone) driver settings db-connection query))))))
diff --git a/src/metabase/driver/google.clj b/src/metabase/driver/google.clj
index 0d4c43f03cb780fc95a59b5520c906e8588cf127..95b13f6f19e0975846a6f1a535493fe7f568fa61 100644
--- a/src/metabase/driver/google.clj
+++ b/src/metabase/driver/google.clj
@@ -77,11 +77,7 @@
   created `Database` <3"
   (memoize fetch-access-and-refresh-tokens*))
 
-
-(defn database->credential
-  "Get a `GoogleCredential` for a `DatabaseInstance`."
-  {:arglists '([scopes database])}
-  ^com.google.api.client.googleapis.auth.oauth2.GoogleCredential
+(defn- database->credential*
   [scopes, {{:keys [^String client-id, ^String client-secret, ^String auth-code, ^String access-token, ^String refresh-token], :as details} :details, id :id, :as db}]
   {:pre [(map? db) (seq client-id) (seq client-secret) (or (seq auth-code)
                                                            (and (seq access-token) (seq refresh-token)))]}
@@ -101,6 +97,15 @@
       (.setAccessToken  access-token)
       (.setRefreshToken refresh-token))))
 
+(defn database->credential
+  "Get a `GoogleCredential` for a `DatabaseInstance`."
+  ^com.google.api.client.googleapis.auth.oauth2.GoogleCredential [scopes database-or-id]
+  (database->credential*
+   scopes
+   (if (integer? database-or-id)
+     (db/select-one [Database :id :details], :id database-or-id)
+     database-or-id)))
+
 (defn -init-driver
   "Nothing to init as this is code used by the google drivers, but is not a driver itself"
   []
diff --git a/src/metabase/driver/googleanalytics.clj b/src/metabase/driver/googleanalytics.clj
index 10b9ceed69741666d5c7527395a8234ded275fa2..e0f5b6ceed17f5bf659b0ea9f759c95ae9823dcd 100644
--- a/src/metabase/driver/googleanalytics.clj
+++ b/src/metabase/driver/googleanalytics.clj
@@ -170,7 +170,7 @@
   (when-let [ga-column (column-with-name database-id column-name)]
     (merge
      {:display_name (column-attribute ga-column :uiName)
-      :description   (column-attribute ga-column :description)}
+      :description  (column-attribute ga-column :description)}
      (let [data-type (column-attribute ga-column :dataType)]
        (when-let [base-type (cond
                               (= column-name "ga:date") :type/Date
@@ -190,9 +190,9 @@
   (update-in results [:data :cols] (partial map (partial add-col-metadata query))))
 
 (defn- process-query-in-context [qp]
-  (comp (fn [query]
-          (add-built-in-column-metadata query (qp query)))
-        qp/transform-query))
+  (fn [query]
+    (let [results (qp query)]
+      (add-built-in-column-metadata query results))))
 
 (defn- mbql-query->request ^Analytics$Data$Ga$Get [{{:keys [query]} :native, database :database}]
   (let [query  (if (string? query)
diff --git a/src/metabase/driver/googleanalytics/query_processor.clj b/src/metabase/driver/googleanalytics/query_processor.clj
index f28ec4b5919f9259a2e2002a548610b75663bf05..500513ee16aa922ea812ba3cf9553f19b090406d 100644
--- a/src/metabase/driver/googleanalytics/query_processor.clj
+++ b/src/metabase/driver/googleanalytics/query_processor.clj
@@ -3,14 +3,14 @@
   See https://developers.google.com/analytics/devguides/reporting/core/v3"
   (:require [clojure.string :as str]
             [clojure.tools.reader.edn :as edn]
-            [medley.core :as m]
-            [metabase.query-processor
-             [store :as qp.store]
-             [util :as qputil]]
-            [metabase.util :as u]
-            [metabase.util.date :as du])
-  (:import [com.google.api.services.analytics.model GaData GaData$ColumnHeaders]
-           [metabase.query_processor.interface AgFieldRef DateTimeField DateTimeValue Field RelativeDateTimeValue Value]))
+            [metabase.mbql.util :as mbql.u]
+            [metabase.query-processor.store :as qp.store]
+            [metabase.util
+             [date :as du]
+             [i18n :as ui18n :refer [tru]]
+             [schema :as su]]
+            [schema.core :as s])
+  (:import [com.google.api.services.analytics.model GaData GaData$ColumnHeaders]))
 
 (def ^:private ^:const earliest-date "2005-01-01")
 (def ^:private ^:const latest-date "today")
@@ -27,22 +27,33 @@
    "US_CURRENCY" :type/Float})
 
 
-(defprotocol ^:private IRValue
-  (^:private ->rvalue [this]))
+(defmulti ^:private ->rvalue mbql.u/dispatch-by-clause-name-or-class)
 
-(extend-protocol IRValue
-  nil                   (->rvalue [_] nil)
-  Object                (->rvalue [this] this)
-  Field                 (->rvalue [this] (:field-name this))
-  DateTimeField         (->rvalue [this] (->rvalue (:field this)))
-  Value                 (->rvalue [this] (:value this))
-  DateTimeValue         (->rvalue [{{unit :unit} :field, value :value}] (du/format-date "yyyy-MM-dd" (du/date-trunc unit value)))
-  RelativeDateTimeValue (->rvalue [{:keys [unit amount]}]
-                          (cond
-                            (and (= unit :day) (= amount 0))  "today"
-                            (and (= unit :day) (= amount -1)) "yesterday"
-                            (and (= unit :day) (< amount -1)) (str (- amount) "daysAgo")
-                            :else                             (du/format-date "yyyy-MM-dd" (du/date-trunc unit (du/relative-date unit amount))))))
+(defmethod ->rvalue nil [_] nil)
+
+(defmethod ->rvalue Object [this] this)
+
+(defmethod ->rvalue :field-id [[_ field-id]]
+  (:name (qp.store/field field-id)))
+
+(defmethod ->rvalue :field-literal [[_ field-name]]
+  field-name)
+
+(defmethod ->rvalue :datetime-field [[_ field]]
+  (->rvalue field))
+
+(defmethod ->rvalue :absolute-datetime [[_ timestamp unit]]
+  (du/format-date "yyyy-MM-dd" (du/date-trunc unit timestamp)))
+
+(defmethod ->rvalue :relative-datetime [[_ amount unit]]
+  (cond
+    (and (= unit :day) (= amount 0))  "today"
+    (and (= unit :day) (= amount -1)) "yesterday"
+    (and (= unit :day) (< amount -1)) (str (- amount) "daysAgo")
+    :else                             (du/format-date "yyyy-MM-dd" (du/date-trunc unit (du/relative-date unit amount)))))
+
+(defmethod ->rvalue :value [[_ value _]]
+  value)
 
 
 (defn- char-escape-map
@@ -51,26 +62,30 @@
   (into {} (for [c chars-to-escape]
              {c (str "\\" c)})))
 
-(def ^:private ^{:arglists '([s])} escape-for-regex         (u/rpartial str/escape (char-escape-map ".\\+*?[^]$(){}=!<>|:-")))
-(def ^:private ^{:arglists '([s])} escape-for-filter-clause (u/rpartial str/escape (char-escape-map ",;\\")))
+(def ^:private ^{:arglists '([s])} escape-for-regex         #(str/escape % (char-escape-map ".\\+*?[^]$(){}=!<>|:-")))
+(def ^:private ^{:arglists '([s])} escape-for-filter-clause #(str/escape % (char-escape-map ",;\\")))
 
 (defn- ga-filter ^String [& parts]
   (escape-for-filter-clause (apply str parts)))
 
 
-;;; ### source-table
+;;; -------------------------------------------------- source-table --------------------------------------------------
 
 (defn- handle-source-table [{source-table-id :source-table}]
   (let [{source-table-name :name} (qp.store/table source-table-id)]
     {:ids (str "ga:" source-table-name)}))
 
 
-;;; ### breakout
+;;; -------------------------------------------------- aggregation ---------------------------------------------------
+
+(defn- handle-aggregation
+  [{ags :aggregation}]
+  (when (seq ags)
+    {:metrics (str/join "," (mbql.u/match ags [:metric (metric-name :guard string?)] metric-name))}))
+
+
+;;; ---------------------------------------------------- breakout ----------------------------------------------------
 
-(defn- aggregations [{aggregations :aggregation}]
-  (if (every? sequential? aggregations)
-    aggregations
-    [aggregations]))
 
 (defn- unit->ga-dimension
   [unit]
@@ -91,98 +106,169 @@
   {:dimensions (if-not breakout-clause
                  ""
                  (str/join "," (for [breakout-field breakout-clause]
-                                 (if (instance? DateTimeField breakout-field)
-                                   (unit->ga-dimension (:unit breakout-field))
-                                   (->rvalue breakout-field)))))})
-
-
-;;; ### filter
-
-;; TODO: implement negate?
-(defn- parse-filter-subclause:filters
-  (^String [filter-clause negate?]
-   ;; if optional arg `negate?` is truthy then prepend a `!` to negate the filter.
-   ;; See https://developers.google.com/analytics/devguides/reporting/core/v3/segments-feature-reference#not-operator
-   (str (when negate? "!") (parse-filter-subclause:filters filter-clause)))
-
-  (^String [{:keys [filter-type field value case-sensitive?], :as filter-clause}]
-   (when-not (instance? DateTimeField field)
-     (let [field (when field (->rvalue field))
-           value (when value (->rvalue value))]
-       (case filter-type
-         :contains    (ga-filter field "=~" (if case-sensitive? "(?-i)" "(?i)")    (escape-for-regex value))
-         :starts-with (ga-filter field "=~" (if case-sensitive? "(?-i)" "(?i)") \^ (escape-for-regex value))
-         :ends-with   (ga-filter field "=~" (if case-sensitive? "(?-i)" "(?i)")    (escape-for-regex value) \$)
-         :=           (ga-filter field "==" value)
-         :!=          (ga-filter field "!=" value)
-         :>           (ga-filter field ">" value)
-         :<           (ga-filter field "<" value)
-         :>=          (ga-filter field ">=" value)
-         :<=          (ga-filter field "<=" value)
-         :between     (str (ga-filter field ">=" (->rvalue (:min-val filter-clause)))
-                           ";"
-                           (ga-filter field "<=" (->rvalue (:max-val filter-clause)))))))))
-
-(defn- parse-filter-clause:filters ^String [{:keys [compound-type subclause subclauses], :as clause}]
-  (case compound-type
-    :and (str/join ";" (remove nil? (map parse-filter-clause:filters subclauses)))
-    :or  (str/join "," (remove nil? (map parse-filter-clause:filters subclauses)))
-    :not (parse-filter-subclause:filters subclause :negate)
-    nil  (parse-filter-subclause:filters clause)))
+                                 (mbql.u/match-one breakout-field
+                                   [:datetime-field _ unit] (unit->ga-dimension unit)
+                                   _                        (->rvalue &match)))))})
+
+
+
+;;; ----------------------------------------------------- filter -----------------------------------------------------
+
+(defmulti ^:private parse-filter mbql.u/dispatch-by-clause-name-or-class)
+
+(defmethod parse-filter nil [& _]
+  nil)
+
+(defmethod parse-filter :contains [[_ field value {:keys [case-sensitive], :or {case-sensitive true}}]]
+  (ga-filter (->rvalue field) "=~" (if case-sensitive "(?-i)" "(?i)") (escape-for-regex (->rvalue value))))
+
+(defmethod parse-filter :starts-with [[_ field value {:keys [case-sensitive], :or {case-sensitive true}}]]
+  (ga-filter (->rvalue field) "=~" (if case-sensitive "(?-i)" "(?i)") \^ (escape-for-regex (->rvalue value))))
+
+(defmethod parse-filter :ends-with [[_ field value {:keys [case-sensitive], :or {case-sensitive true}}]]
+  (ga-filter (->rvalue field) "=~" (if case-sensitive "(?-i)" "(?i)") (escape-for-regex (->rvalue value)) \$))
+
+(defmethod parse-filter := [[_ field value]]
+  (ga-filter (->rvalue field) "==" (->rvalue value)))
+
+(defmethod parse-filter :!= [[_ field value]]
+  (ga-filter (->rvalue field) "!=" (->rvalue value)))
+
+(defmethod parse-filter :> [[_ field value]]
+  (ga-filter (->rvalue field) ">" (->rvalue value)))
+
+(defmethod parse-filter :< [[_ field value]]
+  (ga-filter (->rvalue field) "<" (->rvalue value)))
+
+(defmethod parse-filter :>= [[_ field value]]
+  (ga-filter (->rvalue field) ">=" (->rvalue value)))
+
+(defmethod parse-filter :<= [[_ field value]]
+  (ga-filter (->rvalue field) "<=" (->rvalue value)))
+
+(defmethod parse-filter :between [[_ field min-val max-val]]
+  (str (ga-filter (->rvalue field) ">=" (->rvalue min-val))
+       ";"
+       (ga-filter (->rvalue field) "<=" (->rvalue max-val))))
+
+(defmethod parse-filter :and [[_ & clauses]]
+  (str/join ";" (filter some? (map parse-filter clauses))))
+
+(defmethod parse-filter :or [[_ & clauses]]
+  (str/join "," (filter some? (map parse-filter clauses))))
+
+(defmethod parse-filter :not [[_ clause]]
+  (str "!" (parse-filter clause)))
 
 (defn- handle-filter:filters [{filter-clause :filter}]
   (when filter-clause
-    (let [filter (parse-filter-clause:filters filter-clause)]
+    ;; remove all clauses that operate on datetime fields or built-in segments because we don't want to handle them
+    ;; here, we'll do that seperately with the filter:interval and handle-filter:built-in-segment stuff below
+    ;;
+    ;; (Recall that `auto-bucket-datetime-breakouts` guarantees all datetime Fields will be wrapped by
+    ;; `:datetime-field` clauses in a fully-preprocessed query.)
+    (let [filter (parse-filter (mbql.u/replace filter-clause
+                                 [:segment (_ :guard mbql.u/ga-id?)] nil
+                                 [_ [:datetime-field & _] & _] nil))]
+
       (when-not (str/blank? filter)
         {:filters filter}))))
 
-(defn- parse-filter-subclause:interval [{:keys [filter-type field value], :as filter} & [negate?]]
-  (when negate?
-    (throw (Exception. ":not is :not yet implemented")))
-  (when (instance? DateTimeField field)
-    (case filter-type
-      :between {:start-date (->rvalue (:min-val filter))
-                :end-date   (->rvalue (:max-val filter))}
-      :>       {:start-date (->rvalue (:value filter))
-                :end-date   latest-date}
-      :<       {:start-date earliest-date
-                :end-date   (->rvalue (:value filter))}
-      :=       {:start-date (->rvalue (:value filter))
-                :end-date   (condp instance? (:value filter)
-                              DateTimeValue         (->rvalue (:value filter))
-                              RelativeDateTimeValue (->rvalue (update (:value filter) :amount inc)))}))) ;; inc the end date so we'll get a proper date range once everything is bucketed
-
-(defn- parse-filter-clause:interval [{:keys [compound-type subclause subclauses], :as clause}]
-  (case compound-type
-    :and (apply concat (remove nil? (map parse-filter-clause:interval subclauses)))
-    :or  (apply concat (remove nil? (map parse-filter-clause:interval subclauses)))
-    :not (remove nil? [(parse-filter-subclause:interval subclause :negate)])
-    nil  (remove nil? [(parse-filter-subclause:interval clause)])))
+;;; ----------------------------------------------- filter (intervals) -----------------------------------------------
+
+(defmulti ^:private parse-filter:interval mbql.u/dispatch-by-clause-name-or-class)
+
+(defmethod parse-filter:interval :default [_] nil)
+
+(defmethod parse-filter:interval :between [[_ field min-val max-val]]
+  {:start-date (->rvalue min-val), :end-date (->rvalue max-val)})
+
+(defmethod parse-filter:interval :> [[_ field value]]
+  {:start-date (->rvalue value), :end-date latest-date})
+
+(defmethod parse-filter:interval :< [[_ field value]]
+  {:start-date earliest-date, :end-date (->rvalue value)})
+
+;; TODO - why we don't support `:>=` or `:<=` in GA?
+
+(defmethod parse-filter:interval := [[_ field value]]
+  {:start-date (->rvalue value)
+   :end-date   (->rvalue
+                (cond-> value
+                  ;; for relative datetimes, inc the end date so we'll get a proper date range once everything is
+                  ;; bucketed
+                  (mbql.u/is-clause? :relative-datetime value)
+                  (mbql.u/add-datetime-units 1)))})
+
+(defn- maybe-get-only-filter-or-throw [filters]
+  (when-let [filters (seq (filter some? filters))]
+    (when (> (count filters) 1)
+      (throw (Exception. (str (tru "Multiple date filters are not supported")))))
+    (first filters)))
+
+(defmethod parse-filter:interval :and [[_ & subclauses]]
+  (maybe-get-only-filter-or-throw (map parse-filter:interval subclauses)))
+
+(defmethod parse-filter:interval :or [[_ & subclauses]]
+  (maybe-get-only-filter-or-throw (map parse-filter:interval subclauses)))
+
+(defmethod parse-filter:interval :not [[& _]]
+  (throw (Exception. (str (tru ":not is not yet implemented")))))
+
+(defn- remove-non-datetime-filter-clauses
+  "Replace any filter clauses that operate on a non-datetime Field with `nil`."
+  [filter-clause]
+  (mbql.u/replace filter-clause
+    ;; we don't support any of the following as datetime filters
+    #{:!= :<= :>= :starts-with :ends-with :contains}
+    nil
+
+    [(_ :guard #{:< :> :between :=}) [(_ :guard (partial not= :datetime-field)) & _] & _]
+    nil))
 
 (defn- handle-filter:interval
-  "Handle datetime filter clauses. (Anything that *isn't* a datetime filter will be removed by the `handle-builtin-segment` logic)."
+  "Handle datetime filter clauses. (Anything that *isn't* a datetime filter will be removed by the
+  `handle-builtin-segment` logic)."
+  [{filter-clause :filter}]
+  (or (when filter-clause
+        (parse-filter:interval (remove-non-datetime-filter-clauses filter-clause)))
+      {:start-date earliest-date, :end-date latest-date}))
+
+
+;;; ------------------------------------------- filter (built-in segments) -------------------------------------------
+
+(s/defn ^:private built-in-segment :- (s/maybe su/NonBlankString)
   [{filter-clause :filter}]
-  (let [date-filters (when filter-clause
-                       (parse-filter-clause:interval filter-clause))]
-    (case (count date-filters)
-      0 {:start-date earliest-date, :end-date latest-date}
-      1 (first date-filters)
-      (throw (Exception. "Multiple date filters are not supported")))))
+  (let [segments (mbql.u/match filter-clause [:segment (segment-name :guard mbql.u/ga-id?)] segment-name)]
+    (when (> (count segments) 1)
+      (throw (Exception. (str (tru "Only one Google Analytics segment allowed at a time.")))))
+    (first segments)))
 
-;;; ### order-by
+(defn- handle-filter:built-in-segment
+  "Handle a built-in GA segment (a `[:segment <ga id>]` filter clause), if present. This is added to the native query
+  under a separate `:segment` key."
+  [inner-query]
+  (when-let [built-in-segment (built-in-segment inner-query)]
+    {:segment built-in-segment}))
+
+
+;;; ---------------------------------------------------- order-by ----------------------------------------------------
 
 (defn- handle-order-by [{:keys [order-by], :as query}]
   (when order-by
-    {:sort (str/join "," (for [{:keys [field direction]} order-by]
-                         (str (case direction
-                                :ascending  ""
-                                :descending "-")
-                              (cond
-                                (instance? DateTimeField field) (unit->ga-dimension (:unit field))
-                                (instance? AgFieldRef field)    (second (nth (aggregations query) (:index field))) ; aggregation is of format [ag-type metric-name]; get the metric-name
-                                :else                           (->rvalue field)))))}))
+    {:sort (str/join
+            ","
+            (for [[direction field] order-by]
+              (str (case direction
+                     :asc  ""
+                     :desc "-")
+                   (mbql.u/match-one field
+                     [:datetime-field _ unit] (unit->ga-dimension unit)
+                     [:aggregation index]     (mbql.u/aggregation-at-index query index)
+                     [& _]                    (->rvalue &match)))))}))
+
 
-;;; ### limit
+;;; ----------------------------------------------------- limit ------------------------------------------------------
 
 (defn- handle-limit [{limit-clause :limit}]
   {:max-results (int (if (nil? limit-clause)
@@ -191,17 +277,19 @@
 
 (defn mbql->native
   "Transpile MBQL query into parameters required for a Google Analytics request."
-  [{:keys [query], :as raw}]
-  {:query (merge (handle-source-table    query)
-                 (handle-breakout        query)
-                 (handle-filter:interval query)
-                 (handle-filter:filters  query)
-                 (handle-order-by        query)
-                 (handle-limit           query)
-                 ;; segments and metrics are pulled out in transform-query
-                 (get raw :ga)
-                 ;; set to false to match behavior of other drivers
-                 {:include-empty-rows false})
+  [{inner-query :query, :as raw}]
+  {:query (into
+           ;; set to false to match behavior of other drivers
+           {:include-empty-rows false}
+           (for [f [handle-source-table
+                    handle-aggregation
+                    handle-breakout
+                    handle-filter:interval
+                    handle-filter:filters
+                    handle-filter:built-in-segment
+                    handle-order-by
+                    handle-limit]]
+             (f inner-query)))
    :mbql? true})
 
 (defn- parse-number [s]
@@ -223,11 +311,10 @@
 (defn- header->column [^GaData$ColumnHeaders header]
   (let [date-parser (ga-dimension->date-format-fn (.getName header))]
     (if date-parser
-      {:name      (keyword "ga:date")
-       :base-type :type/DateTime}
-      {:name               (keyword (.getName header))
-       :base-type          (ga-type->base-type (.getDataType header))
-       :field-display-name "COOL"})))
+      {:name      "ga:date"
+       :base_type :type/DateTime}
+      {:name      (.getName header)
+       :base_type (ga-type->base-type (.getDataType header))})))
 
 (defn- header->getter-fn [^GaData$ColumnHeaders header]
   (let [date-parser (ga-dimension->date-format-fn (.getName header))
@@ -240,89 +327,11 @@
 (defn execute-query
   "Execute a QUERY using the provided DO-QUERY function, and return the results in the usual format."
   [do-query query]
-  (let [mbql?            (:mbql? (:native query))
-        ^GaData response (do-query query)
+  (let [^GaData response (do-query query)
         columns          (map header->column (.getColumnHeaders response))
         getters          (map header->getter-fn (.getColumnHeaders response))]
-    {:columns  (map :name columns)
-     :cols     columns
+    {:cols     columns
+     :columns  (map :name columns)
      :rows     (for [row (.getRows response)]
                  (for [[data getter] (map vector row getters)]
-                   (getter data)))
-     :annotate mbql?}))
-
-
-;;; ----------------------------------------------- "transform-query" ------------------------------------------------
-
-;;; metrics
-
-(defn- built-in-metrics
-  [{query :query}]
-  (when-let [ags (seq (aggregations query))]
-    (str/join "," (for [[aggregation-type metric-name] ags
-                      :when (and aggregation-type
-                                 (= :metric (qputil/normalize-token aggregation-type))
-                                 (string? metric-name))]
-                  metric-name))))
-
-(defn- handle-built-in-metrics [query]
-  (-> query
-      (assoc-in [:ga :metrics] (built-in-metrics query))
-      (m/dissoc-in [:query :aggregation])))
-
-
-;;; segments
-
-(defn- filter-type ^clojure.lang.Keyword [filter-clause]
-  (when (and (sequential? filter-clause)
-             ((some-fn keyword? string?) (first filter-clause)))
-    (qputil/normalize-token (first filter-clause))))
-
-(defn- compound-filter? [filter-clause]
-  (contains? #{:and :or :not} (filter-type filter-clause)))
-
-(defn- built-in-segment? [filter-clause]
-  (and (= :segment (filter-type filter-clause))
-       (string? (second filter-clause))))
-
-(defn- built-in-segments [{{filter-clause :filter} :query}]
-  (if-not (compound-filter? filter-clause)
-    ;; if the top-level filter isn't compound check if it's built-in and return it if it is
-    (when (built-in-segment? filter-clause)
-      (second filter-clause))
-    ;; otherwise if it *is* compound return the first subclause that is built-in; if more than one is built-in throw
-    ;; exception
-    (when-let [[built-in-segment-name & more] (seq (for [subclause filter-clause
-                                                         :when     (built-in-segment? subclause)]
-                                                     (second subclause)))]
-      (when (seq more)
-        (throw (Exception. "Only one Google Analytics segment allowed at a time.")))
-      built-in-segment-name)))
-
-(defn- remove-built-in-segments [filter-clause]
-  (if-not (compound-filter? filter-clause)
-    ;; if top-level filter isn't compound just return it as long as it's not built-in
-    (when-not (built-in-segment? filter-clause)
-      filter-clause)
-    ;; otherwise for compound filters filter out the built-in filters
-    (when-let [filter-clause (seq (for [subclause filter-clause
-                                        :when     (not (built-in-segment? subclause))]
-                                    subclause))]
-      ;; don't keep the filter clause if it's something like an empty compound filter like [:and]
-      (when (> (count filter-clause) 1)
-        (vec filter-clause)))))
-
-(defn- handle-built-in-segments [{{filters :filter} :query, :as query}]
-  (let [query   (assoc-in query [:ga :segment] (built-in-segments query))
-        filters (remove-built-in-segments filters)]
-    (if (seq filters)
-      (assoc-in    query [:query :filter] filters)
-      (m/dissoc-in query [:query :filter]))))
-
-
-;;; public
-
-(def ^{:arglists '([query])} transform-query
-  "Preprocess the incoming query to pull out built-in segments and metrics.
-   This removes customizations to the query dict and makes it compatible with MBQL."
-  (comp handle-built-in-metrics handle-built-in-segments))
+                   (getter data)))}))
diff --git a/src/metabase/driver/h2.clj b/src/metabase/driver/h2.clj
index c08570eece5b9b9cf448aef5d9a3476f90b2b5ad..9f7f83e51a8a28e1f914c3d52de600785e8e2749 100644
--- a/src/metabase/driver/h2.clj
+++ b/src/metabase/driver/h2.clj
@@ -7,11 +7,10 @@
              [util :as u]]
             [metabase.db.spec :as dbspec]
             [metabase.driver.generic-sql :as sql]
-            [metabase.models.database :refer [Database]]
+            [metabase.query-processor.store :as qp.store]
             [metabase.util
              [honeysql-extensions :as hx]
-             [i18n :refer [tru]]]
-            [toucan.db :as db]))
+             [i18n :refer [tru]]]))
 
 (def ^:private ^:const column->base-type
   {:ARRAY                       :type/*
@@ -129,7 +128,7 @@
     ;; connection string. We don't allow SQL execution on H2 databases for the default admin account for security
     ;; reasons
     (when (= (keyword query-type) :native)
-      (let [{:keys [db]}   (db/select-one-field :details Database :id database-id)
+      (let [{:keys [db]}   (:details (qp.store/database))
             _              (assert db)
             [_ options]    (connection-string->file+options db)
             {:strs [USER]} options]
diff --git a/src/metabase/driver/hive_like.clj b/src/metabase/driver/hive_like.clj
index ddf175d1dc6335c09b880a5cb86b51d51b902e97..a557af5c045011b7e0744608980fb14054dde9df 100644
--- a/src/metabase/driver/hive_like.clj
+++ b/src/metabase/driver/hive_like.clj
@@ -3,8 +3,11 @@
             [honeysql
              [core :as hsql]
              [format :as hformat]]
-            [metabase.models.field :refer [Field]]
             [metabase.driver.generic-sql.util.unprepare :as unprepare]
+            [metabase.models
+             [field :refer [Field]]
+             [table :refer [Table]]]
+            [metabase.util :as u]
             [metabase.util.honeysql-extensions :as hx]
             [toucan.db :as db])
   (:import java.util.Date))
@@ -103,10 +106,11 @@
   "Return the pieces that represent a path to FIELD, of the form `[table-name parent-fields-name* field-name]`.
    This function should be used by databases where schemas do not make much sense."
   [{field-name :name, table-id :table_id, parent-id :parent_id}]
-  ;; TODO - we are making too many DB calls here! Why aren't we using the QP Store?
+  ;; TODO - we are making too many DB calls here!
+  ;; (At least this is only used for SQL parameters, which is why we can't currently use the Store)
   (conj (vec (if-let [parent (Field parent-id)]
                (qualified-name-components parent)
-               (let [{table-name :name, schema :schema} (db/select-one ['Table :name :schema], :id table-id)]
+               (let [{table-name :name, schema :schema} (db/select-one [Table :name :schema], :id table-id)]
                  [table-name])))
         field-name))
 
@@ -122,7 +126,7 @@
         statement        (into [sql] params)
         [columns & rows] (jdbc/query connection statement {:identifiers identity, :as-arrays? true})]
     {:rows    (or rows [])
-     :columns columns}))
+     :columns (map u/keyword->qualified-name columns)}))
 
 (defn run-query-without-timezone
   "Runs the given query without trying to set a timezone"
diff --git a/src/metabase/driver/mongo.clj b/src/metabase/driver/mongo.clj
index 03b2cb28cff7ce8917cbdcac91fd6c71d8e79ae1..7b6a9d2fa940e65082da196b08482167f3a83d7e 100644
--- a/src/metabase/driver/mongo.clj
+++ b/src/metabase/driver/mongo.clj
@@ -8,16 +8,16 @@
             [metabase.driver.mongo
              [query-processor :as qp]
              [util :refer [with-mongo-connection]]]
-            [metabase.models.database :refer [Database]]
-            [metabase.util.ssh :as ssh]
+            [metabase.query-processor.store :as qp.store]
+            [metabase.util
+             [i18n :refer [tru]]
+             [ssh :as ssh]]
             [monger
              [collection :as mc]
              [command :as cmd]
              [conversion :as conv]
              [db :as mdb]]
-            [metabase.util.i18n :refer [tru]]
-            [schema.core :as s]
-            [toucan.db :as db])
+            [schema.core :as s])
   (:import com.mongodb.DB))
 
 ;;; ## MongoDriver
@@ -51,7 +51,7 @@
 
 (defn- process-query-in-context [qp]
   (fn [{database-id :database, :as query}]
-    (with-mongo-connection [_ (db/select-one [Database :details], :id database-id)]
+    (with-mongo-connection [_ (qp.store/database)]
       (qp query))))
 
 
diff --git a/src/metabase/driver/mongo/query_processor.clj b/src/metabase/driver/mongo/query_processor.clj
index 6f5e0c1b0b9fccff154ad188961d4aeec5998777..24500dbe756a48558a30442e7a87bdb09d2aa12d 100644
--- a/src/metabase/driver/mongo/query_processor.clj
+++ b/src/metabase/driver/mongo/query_processor.clj
@@ -5,22 +5,30 @@
   (:require [cheshire.core :as json]
             [clojure
              [set :as set]
-             [string :as s]
+             [string :as str]
              [walk :as walk]]
             [clojure.tools.logging :as log]
             [metabase.driver.mongo.util :refer [*mongo-connection*]]
+            [metabase.mbql
+             [schema :as mbql.s]
+             [util :as mbql.u]]
+            [metabase.models.field :refer [Field]]
             [metabase.query-processor
-             [annotate :as annotate]
              [interface :as i]
              [store :as qp.store]]
+            [metabase.query-processor.middleware.annotate :as annotate]
             [metabase.util :as u]
-            [metabase.util.date :as du]
+            [metabase.util
+             [date :as du]
+             [i18n :as ui18n :refer [tru]]
+             [schema :as su]]
             [monger
              [collection :as mc]
-             [operators :refer :all]])
+             [operators :refer :all]]
+            [schema.core :as s])
   (:import java.sql.Timestamp
            [java.util Date TimeZone]
-           [metabase.query_processor.interface AgFieldRef DateTimeField DateTimeValue Field FieldLiteral RelativeDateTimeValue Value]
+           metabase.models.field.FieldInstance
            org.bson.types.ObjectId
            org.joda.time.DateTime))
 
@@ -35,14 +43,53 @@
 (def ^:private ^:const $subtract :$subtract)
 
 
-;; # DRIVER QP INTERFACE
+;;; +----------------------------------------------------------------------------------------------------------------+
+;;; |                                                     Schema                                                     |
+;;; +----------------------------------------------------------------------------------------------------------------+
+
+;; this is just a very limited schema to make sure we're generating valid queries. We should expand it more in the
+;; future
+
+(def ^:private $ProjectStage {(s/eq $project) {su/NonBlankString s/Any}})
+(def ^:private $SortStage    {(s/eq $sort)    {su/NonBlankString (s/enum -1 1)}})
+(def ^:private $MatchStage   {(s/eq $match)   {(s/constrained su/NonBlankString (partial not= $not)) s/Any}})
+(def ^:private $GroupStage   {(s/eq $group)   {su/NonBlankString s/Any}})
+(def ^:private $LimitStage   {(s/eq $limit)   su/IntGreaterThanZero})
+(def ^:private $SkipStage    {(s/eq $skip)    su/IntGreaterThanZero})
+
+(defn- is-stage? [stage]
+  (fn [m] (= (first (keys m)) stage)))
+
+(def ^:private Stage
+  (s/both
+   (s/constrained su/Map #(= (count (keys %)) 1) "map with a single key")
+   (s/conditional
+    (is-stage? $project) $ProjectStage
+    (is-stage? $sort)    $SortStage
+    (is-stage? $group)   $GroupStage
+    (is-stage? $match)   $MatchStage
+    (is-stage? $limit)   $LimitStage
+    (is-stage? $skip)    $SkipStage)))
+
+(def ^:private Pipeline [Stage])
+
+
+(def ^:private Projections
+  "Schema for the `:projections` generated by the functions in this namespace. It should be a sequence of the original
+  column names in the query (?)"
+  [s/Keyword])
+
+;;; +----------------------------------------------------------------------------------------------------------------+
+;;; |                                                    QP Impl                                                     |
+;;; +----------------------------------------------------------------------------------------------------------------+
+
 
 ;; TODO - We already have a *query* dynamic var in metabase.query-processor.interface. Do we need this one too?
 (def ^:dynamic ^:private *query* nil)
 
-(defn- log-monger-form [form]
+(defn- log-aggregation-pipeline [form]
   (when-not i/*disable-qp-logging*
-    (log/debug (u/format-color 'green "\nMONGO AGGREGATION PIPELINE:\n%s\n"
+    (log/debug (u/format-color 'green (str "\n" (tru "MONGO AGGREGATION PIPELINE:") "\n%s\n")
                  (->> form
                       ;; strip namespace qualifiers from Monger form
                       (walk/postwalk #(if (symbol? %) (symbol (name %)) %))
@@ -61,22 +108,27 @@
 ;; Escaped:
 ;;   {"$group" {"source___username" {"$first" {"$source.username"}, "_id" "$source.username"}}, ...}
 
-(defprotocol ^:private IRValue
-  (^:private ->rvalue [this]
-    "Format this `Field` or `Value` for use as the right hand value of an expression, e.g. by adding `$` to a
-    `Field`'s name"))
+(defmulti ^:private ^{:doc (str "Format this `Field` or `Value` for use as the right hand value of an expression, e.g. "
+                                "by adding `$` to a `Field`'s name")}
+  ->rvalue
+  mbql.u/dispatch-by-clause-name-or-class)
+
+(defmulti ^:private ^{:doc "Return an escaped name that can be used as the name of a given Field."}
+  ^String ->lvalue
+  mbql.u/dispatch-by-clause-name-or-class)
 
-(defprotocol ^:private IField
-  (^:private ->lvalue ^String [this]
-    "Return an escaped name that can be used as the name of a given Field.")
-  (^:private ->initial-rvalue [this]
-    "Return the rvalue that should be used in the *initial* projection for this `Field`."))
+(defmulti ^:private ^{:doc "Return the rvalue that should be used in the *initial* projection for this `Field`."}
+  ->initial-rvalue
+  mbql.u/dispatch-by-clause-name-or-class)
 
 
 (defn- field->name
   "Return a single string name for FIELD. For nested fields, this creates a combined qualified name."
-  ^String [^Field field, ^String separator]
-  (s/join separator (rest (i/qualified-name-components field))))
+  ^String [^FieldInstance field, ^String separator]
+  (if-let [parent-id (:parent_id field)]
+    (str/join separator [(field->name (qp.store/field parent-id) separator)
+                         (:name field)])
+    (:name field)))
 
 (defmacro ^:private mongo-let
   {:style/indent 1}
@@ -85,190 +137,216 @@
           :in   `(let [~field ~(keyword (str "$$" (name field)))]
                    ~@body)}})
 
-;; As mentioned elsewhere for some arcane reason distinct aggregations come back named "count" and every thing else as
-;; the aggregation type
-(defn- ag-type->field-name [ag-type]
-  (when ag-type
-    (if (= ag-type :distinct)
-      "count"
-      (name ag-type))))
-
-(extend-protocol IField
-  Field
-  (->lvalue [this]
-    (field->name this "___"))
-
-  (->initial-rvalue [this]
-    (str \$ (field->name this ".")))
-
-  FieldLiteral
-  (->lvalue [this]
-    (field->name this "___"))
-
-  (->initial-rvalue [this]
-    (str \$ (field->name this ".")))
-
-  AgFieldRef
-  (->lvalue [{:keys [index]}]
-    (let [{:keys [aggregation-type]} (nth (:aggregation (:query *query*)) index)]
-      (ag-type->field-name aggregation-type)))
-
-  DateTimeField
-  (->lvalue [{unit :unit, ^Field field :field}]
-    (str (->lvalue field) "~~~" (name unit)))
-
-  (->initial-rvalue [{unit :unit, {:keys [special-type], :as ^Field field} :field}]
-    (mongo-let [field (as-> field <>
-                        (->initial-rvalue <>)
-                        (cond
-                          (isa? special-type :type/UNIXTimestampMilliseconds)
-                          {$add [(java.util.Date. 0) <>]}
-
-                          (isa? special-type :type/UNIXTimestampSeconds)
-                          {$add [(java.util.Date. 0) {$multiply [<> 1000]}]}
-
-                          :else <>))]
+
+(defmethod ->lvalue         (class Field) [this] (field->name this "___"))
+(defmethod ->initial-rvalue (class Field) [this] (str \$ (field->name this ".")))
+(defmethod ->rvalue         (class Field) [this] (str \$ (->lvalue this)))
+
+(defmethod ->lvalue         :field-id [[_ field-id]] (->lvalue          (qp.store/field field-id)))
+(defmethod ->initial-rvalue :field-id [[_ field-id]] (->initial-rvalue  (qp.store/field field-id)))
+(defmethod ->rvalue         :field-id [[_ field-id]] (->rvalue          (qp.store/field field-id)))
+
+(defmethod ->lvalue         :field-literal [[_ field-name]] (name field-name))
+(defmethod ->initial-rvalue :field-literal [[_ field-name]] (str \$ (name field-name)))
+(defmethod ->rvalue         :field-literal [[_ field-name]] (str \$ (name field-name))) ; TODO - not sure if right?
+
+
+;; Don't think this needs to implement `->lvalue` because you can't assign something to an aggregation e.g.
+;;
+;;    aggregations[0] = 20
+;;
+;; makes no sense. It doesn't have an initial projection either so no need to implement `->initial-rvalue`
+(defmethod ->lvalue :aggregation [[_ index]]
+  (annotate/aggregation-name (mbql.u/aggregation-at-index *query* index)))
+;; TODO - does this need to implement `->lvalue` and `->initial-rvalue` ?
+
+
+(defmethod ->lvalue :datetime-field [[_ field-clause unit]]
+  (str (->lvalue field-clause) "~~~" (name unit)))
+
+(defmethod ->initial-rvalue :datetime-field [[_ field-clause unit]]
+  (let [field-id (mbql.u/field-clause->id-or-literal field-clause)
+        field    (when (integer? field-id)
+                   (qp.store/field field-id))]
+    (mongo-let [column (let [initial-rvalue (->initial-rvalue field-clause)]
+                         (cond
+                           (isa? (:special_type field) :type/UNIXTimestampMilliseconds)
+                           {$add [(java.util.Date. 0) initial-rvalue]}
+
+                           (isa? (:special_type field) :type/UNIXTimestampSeconds)
+                           {$add [(java.util.Date. 0) {$multiply [initial-rvalue 1000]}]}
+
+                           :else initial-rvalue))]
       (let [stringify (fn stringify
                         ([format-string]
-                         (stringify format-string field))
+                         (stringify format-string column))
                         ([format-string fld]
                          {:___date {:$dateToString {:format format-string
                                                     :date   fld}}}))]
         (case unit
-          :default         field
+          :default         column
           :minute          (stringify "%Y-%m-%dT%H:%M:00")
-          :minute-of-hour  {$minute field}
+          :minute-of-hour  {$minute column}
           :hour            (stringify "%Y-%m-%dT%H:00:00")
-          :hour-of-day     {$hour field}
+          :hour-of-day     {$hour column}
           :day             (stringify "%Y-%m-%d")
-          :day-of-week     {$dayOfWeek field}
-          :day-of-month    {$dayOfMonth field}
-          :day-of-year     {$dayOfYear field}
-          :week            (stringify "%Y-%m-%d" {$subtract [field
-                                                             {$multiply [{$subtract [{$dayOfWeek field}
+          :day-of-week     {$dayOfWeek column}
+          :day-of-month    {$dayOfMonth column}
+          :day-of-year     {$dayOfYear column}
+          :week            (stringify "%Y-%m-%d" {$subtract [column
+                                                             {$multiply [{$subtract [{$dayOfWeek column}
                                                                                      1]}
                                                                          (* 24 60 60 1000)]}]})
-          :week-of-year    {$add [{$week field}
+          :week-of-year    {$add [{$week column}
                                   1]}
           :month           (stringify "%Y-%m")
-          :month-of-year   {$month field}
+          :month-of-year   {$month column}
           ;; For quarter we'll just subtract enough days from the current date to put it in the correct month and
-          ;; stringify it as yyyy-MM Subtracting (($dayOfYear(field) % 91) - 3) days will put you in correct month.
+          ;; stringify it as yyyy-MM Subtracting (($dayOfYear(column) % 91) - 3) days will put you in correct month.
           ;; Trust me.
-          :quarter         (stringify "%Y-%m" {$subtract [field
-                                                          {$multiply [{$subtract [{$mod [{$dayOfYear field}
+          :quarter         (stringify "%Y-%m" {$subtract [column
+                                                          {$multiply [{$subtract [{$mod [{$dayOfYear column}
                                                                                          91]}
                                                                                   3]}
                                                                       (* 24 60 60 1000)]}]})
-          :quarter-of-year (mongo-let [month   {$month field}]
+          :quarter-of-year (mongo-let [month   {$month column}]
                              {$divide [{$subtract [{$add [month 2]}
                                                    {$mod [{$add [month 2]}
                                                           3]}]}
                                        3]})
-          :year            {$year field})))))
-
-
-(extend-protocol IRValue
-  nil (->rvalue [_] nil)
-
-  Field
-  (->rvalue [this]
-    (str \$ (->lvalue this)))
-
-  DateTimeField
-  (->rvalue [this]
-    (str \$ (->lvalue this)))
-
-  Value
-  (->rvalue [{value :value, {:keys [base-type]} :field}]
-    (if (isa? base-type :type/MongoBSONID)
-      (ObjectId. (str value))
-      value))
-
-  DateTimeValue
-  (->rvalue [{^java.sql.Timestamp value :value, {:keys [unit]} :field}]
-    (let [stringify (fn stringify
-                      ([format-string]
-                       (stringify format-string value))
-                      ([format-string v]
-                       {:___date (du/format-date format-string v)}))
-          extract   (u/rpartial du/date-extract value)]
-      (case (or unit :default)
-        :default         (some-> value du/->Date)
-        :minute          (stringify "yyyy-MM-dd'T'HH:mm:00")
-        :minute-of-hour  (extract :minute)
-        :hour            (stringify "yyyy-MM-dd'T'HH:00:00")
-        :hour-of-day     (extract :hour)
-        :day             (stringify "yyyy-MM-dd")
-        :day-of-week     (extract :day-of-week)
-        :day-of-month    (extract :day-of-month)
-        :day-of-year     (extract :day-of-year)
-        :week            (stringify "yyyy-MM-dd" (du/date-trunc :week value))
-        :week-of-year    (extract :week-of-year)
-        :month           (stringify "yyyy-MM")
-        :month-of-year   (extract :month)
-        :quarter         (stringify "yyyy-MM" (du/date-trunc :quarter value))
-        :quarter-of-year (extract :quarter-of-year)
-        :year            (extract :year))))
-
-  RelativeDateTimeValue
-  (->rvalue [{:keys [amount unit field]}]
-    (->rvalue (i/map->DateTimeValue {:value (du/relative-date (or unit :day) amount)
-                                     :field field}))))
-
-
-;;; ## CLAUSE APPLICATION
-
-;;; ### initial projection
-
-(defn- add-initial-projection [query pipeline-ctx]
-  (let [all-fields (distinct (annotate/collect-fields query :keep-date-time-fields))]
+          :year            {$year column})))))
+
+
+(defmethod ->rvalue :datetime-field [this]
+  (str \$ (->lvalue this)))
+
+
+;; Values clauses below; they only need to implement `->rvalue`
+
+(defmethod ->rvalue nil [_] nil)
+
+
+(defmethod ->rvalue :value [[_ value {base-type :base_type}]]
+  (if (isa? base-type :type/MongoBSONID)
+    (ObjectId. (str value))
+    value))
+
+
+(defmethod ->rvalue :absolute-datetime [[_ ^java.sql.Timestamp value, unit]]
+  (let [stringify (fn stringify
+                    ([format-string]
+                     (stringify format-string value))
+                    ([format-string v]
+                     {:___date (du/format-date format-string v)}))
+        extract   (u/rpartial du/date-extract value)]
+    (case (or unit :default)
+      :default         value
+      :minute          (stringify "yyyy-MM-dd'T'HH:mm:00")
+      :minute-of-hour  (extract :minute)
+      :hour            (stringify "yyyy-MM-dd'T'HH:00:00")
+      :hour-of-day     (extract :hour)
+      :day             (stringify "yyyy-MM-dd")
+      :day-of-week     (extract :day-of-week)
+      :day-of-month    (extract :day-of-month)
+      :day-of-year     (extract :day-of-year)
+      :week            (stringify "yyyy-MM-dd" (du/date-trunc :week value))
+      :week-of-year    (extract :week-of-year)
+      :month           (stringify "yyyy-MM")
+      :month-of-year   (extract :month)
+      :quarter         (stringify "yyyy-MM" (du/date-trunc :quarter value))
+      :quarter-of-year (extract :quarter-of-year)
+      :year            (extract :year))))
+
+
+;; TODO - where's the part where we handle include-current?
+(defmethod ->rvalue :relative-datetime [[_ amount unit]]
+  (->rvalue [:absolute-datetime (du/relative-date (or unit :day) amount) unit]))
+
+
+;;; +----------------------------------------------------------------------------------------------------------------+
+;;; |                                               CLAUSE APPLICATION                                               |
+;;; +----------------------------------------------------------------------------------------------------------------+
+
+
+;;; ----------------------------------------------- initial projection -----------------------------------------------
+
+(s/defn ^:private add-initial-projection :- {:projections Projections, :query Pipeline}
+  [inner-query pipeline-ctx]
+  (let [all-fields (distinct (mbql.u/match inner-query #{:field-id :datetime-field}))]
     (if-not (seq all-fields)
       pipeline-ctx
-      (let [projections (for [field all-fields]
-                          [(->lvalue field) (->initial-rvalue field)])]
+      (let [projection+initial-rvalue (for [field all-fields]
+                                        [(->lvalue field) (->initial-rvalue field)])]
         (-> pipeline-ctx
-            (assoc  :projections (doall (map (comp keyword first) projections)))
-            (update :query conj {$project (into (hash-map) projections)}))))))
-
-
-;;; ### filter
-
-(defn- parse-filter-subclause [{:keys [filter-type field value case-sensitive?] :as filter} & [negate?]]
-  (let [field (when field (->lvalue field))
-        value (when value (->rvalue value))
-        v     (case filter-type
-                :between     {$gte (->rvalue (:min-val filter))
-                              $lte (->rvalue (:max-val filter))}
-                :contains    (re-pattern (str (when-not case-sensitive? "(?i)")    value))
-                :starts-with (re-pattern (str (when-not case-sensitive? "(?i)") \^ value))
-                :ends-with   (re-pattern (str (when-not case-sensitive? "(?i)")    value \$))
-                :=           {"$eq" value}
-                :!=          {$ne  value}
-                :<           {$lt  value}
-                :>           {$gt  value}
-                :<=          {$lte value}
-                :>=          {$gte value})]
-    {field (if negate?
-             {$not v}
-             v)}))
-
-(defn- parse-filter-clause [{:keys [compound-type subclause subclauses], :as clause}]
-  (case compound-type
-    :and {$and (mapv parse-filter-clause subclauses)}
-    :or  {$or  (mapv parse-filter-clause subclauses)}
-    :not (parse-filter-subclause subclause :negate)
-    nil  (parse-filter-subclause clause)))
+            (assoc  :projections (doall (map (comp keyword first) projection+initial-rvalue)))
+            (update :query conj {$project (into {} projection+initial-rvalue)}))))))
+
+
+;;; ----------------------------------------------------- filter -----------------------------------------------------
+
+(defmethod ->rvalue ::not [[_ value]]
+  {$not (->rvalue value)})
+
+(defmulti ^:private parse-filter first)
+
+(defmethod parse-filter :between [[_ field min-val max-val]]
+  {(->lvalue field) {$gte (->rvalue min-val)
+                     $lte (->rvalue max-val)}})
+
+(defn- str-match-pattern [options prefix value suffix]
+  (if (mbql.u/is-clause? ::not value)
+    {$not (str-match-pattern options prefix (second value) suffix)}
+    (let [case-sensitive? (get options :case-sensitive true)]
+      (re-pattern (str (when-not case-sensitive? "(?i)") prefix (->rvalue value) suffix)))))
+
+(defmethod parse-filter :contains    [[_ field v opts]] {(->lvalue field) (str-match-pattern opts nil v nil)})
+(defmethod parse-filter :starts-with [[_ field v opts]] {(->lvalue field) (str-match-pattern opts \^  v nil)})
+(defmethod parse-filter :ends-with   [[_ field v opts]] {(->lvalue field) (str-match-pattern opts nil v \$)})
+
+(defmethod parse-filter :=  [[_ field value]] {(->lvalue field) {"$eq" (->rvalue value)}})
+(defmethod parse-filter :!= [[_ field value]] {(->lvalue field) {$ne   (->rvalue value)}})
+(defmethod parse-filter :<  [[_ field value]] {(->lvalue field) {$lt   (->rvalue value)}})
+(defmethod parse-filter :>  [[_ field value]] {(->lvalue field) {$gt   (->rvalue value)}})
+(defmethod parse-filter :<= [[_ field value]] {(->lvalue field) {$lte  (->rvalue value)}})
+(defmethod parse-filter :>= [[_ field value]] {(->lvalue field) {$gte  (->rvalue value)}})
+
+(defmethod parse-filter :and [[_ & args]] {$and (mapv parse-filter args)})
+(defmethod parse-filter :or  [[_ & args]] {$or (mapv parse-filter args)})
+
+
+;; This is somewhat silly but MongoDB doesn't support `not` as a top-level so we have to go in and negate things
+;; ourselves. Ick. Maybe we could pull this logic up into `mbql.u` so other people could use it?
+(defmulti ^:private negate first)
+
+(defmethod negate :not [[_ subclause]]    subclause)
+(defmethod negate :and [[_ & subclauses]] (apply vector :or  (map negate subclauses)))
+(defmethod negate :or  [[_ & subclauses]] (apply vector :and (map negate subclauses)))
+(defmethod negate :=   [[_ field value]]  [:!= field value])
+(defmethod negate :!=  [[_ field value]]  [:=  field value])
+(defmethod negate :>   [[_ field value]]  [:<= field value])
+(defmethod negate :<   [[_ field value]]  [:>= field value])
+(defmethod negate :>=  [[_ field value]]  [:<  field value])
+(defmethod negate :<=  [[_ field value]]  [:>  field value])
+
+(defmethod negate :between [[_ field min max]] [:or [:< field min] [:> field max]])
+
+(defmethod negate :contains    [[_ field v opts]] [:contains field [::not v] opts])
+(defmethod negate :starts-with [[_ field v opts]] [:starts-with field [::not v] opts])
+(defmethod negate :ends-with   [[_ field v opts]] [:ends-with field [::not v] opts])
+
+(defmethod parse-filter :not [[_ subclause]]
+  (parse-filter (negate subclause)))
+
 
 (defn- handle-filter [{filter-clause :filter} pipeline-ctx]
   (if-not filter-clause
     pipeline-ctx
-    (update pipeline-ctx :query conj {$match (parse-filter-clause filter-clause)})))
+    (update pipeline-ctx :query conj {$match (parse-filter filter-clause)})))
 
 
-;;; ### aggregation
+;;; -------------------------------------------------- aggregation ---------------------------------------------------
 
-(defn- aggregation->rvalue [{:keys [aggregation-type field]}]
+(defn- aggregation->rvalue [[aggregation-type field]]
   {:pre [(keyword? aggregation-type)]}
   (if-not field
     (case aggregation-type
@@ -283,17 +361,18 @@
       :min      {$min (->rvalue field)}
       :max      {$max (->rvalue field)})))
 
-(defn- breakouts-and-ags->projected-fields
+(s/defn ^:private breakouts-and-ags->projected-fields :- [(s/pair su/NonBlankString "projected-field-name"
+                                                                  s/Any             "source")]
   "Determine field projections for MBQL breakouts and aggregations. Returns a sequence of pairs like
   `[projectied-field-name source]`."
   [breakout-fields aggregations]
   (concat
-   (for [{ag-type :aggregation-type} aggregations]
-     [(ag-type->field-name ag-type) (if (= ag-type :distinct)
-                                      {$size "$count"} ; HACK
-                                      true)])
    (for [field breakout-fields]
-     [(->lvalue field) (format "$_id.%s" (->lvalue field))])))
+     [(->lvalue field) (format "$_id.%s" (->lvalue field))])
+   (for [ag aggregations]
+     [(annotate/aggregation-name ag) (if (mbql.u/is-clause? :distinct ag)
+                                       {$size "$count"} ; HACK
+                                       true)])))
 
 (defn- breakouts-and-ags->pipeline-stages
   "Return a sequeunce of aggregation pipeline stages needed to implement MBQL breakouts and aggregations."
@@ -306,15 +385,15 @@
       {$project (merge {"_id"      "$_id"
                         "___group" (into {} (for [field breakout-fields]
                                               {(->lvalue field) (->rvalue field)}))}
-                       (into {} (for [{ag-field :field} aggregations
-                                      :when             ag-field]
+                       (into {} (for [[_ ag-field] aggregations
+                                      :when        ag-field]
                                   {(->lvalue ag-field) (->rvalue ag-field)})))})
     ;; Now project onto the __group and the aggregation rvalue
     {$group (merge
              {"_id" (when (seq breakout-fields)
                       "$___group")}
-             (into {} (for [{ag-type :aggregation-type, :as aggregation} aggregations]
-                        {(ag-type->field-name ag-type) (aggregation->rvalue aggregation)})))}
+             (into {} (for [ag aggregations]
+                        [(annotate/aggregation-name ag) (aggregation->rvalue ag)])))}
     ;; Sort by _id (___group)
     {$sort {"_id" 1}}
     ;; now project back to the fields we expect
@@ -338,18 +417,20 @@
           (update :query into (breakouts-and-ags->pipeline-stages projected-fields breakout-fields aggregations))))))
 
 
-;;; ### order-by
+;;; ---------------------------------------------------- order-by ----------------------------------------------------
+
+(s/defn ^:private order-by->$sort :- $SortStage
+  [order-by :- [mbql.s/OrderBy]]
+  {$sort (into {} (for [[direction field] order-by]
+                    [(->lvalue field) (case direction
+                                        :asc   1
+                                        :desc -1)]))})
 
 (defn- handle-order-by [{:keys [order-by]} pipeline-ctx]
-  (if-not (seq order-by)
-    pipeline-ctx
-    (update pipeline-ctx :query conj {$sort (into (hash-map)
-                                                  (for [{:keys [field direction]} order-by]
-                                                    [(->lvalue field) (case direction
-                                                                        :ascending   1
-                                                                        :descending -1)]))})))
+  (cond-> pipeline-ctx
+    (seq order-by) (update :query conj (order-by->$sort order-by))))
 
-;;; ### fields
+;;; ----------------------------------------------------- fields -----------------------------------------------------
 
 (defn- handle-fields [{:keys [fields]} pipeline-ctx]
   (if-not (seq fields)
@@ -359,9 +440,9 @@
           (assoc :projections (map (comp keyword first) new-projections))
           ;; add project _id = false to keep _id from getting automatically returned unless explicitly specified
           (update :query conj {$project (merge {"_id" false}
-                                               (into (hash-map) new-projections))})))))
+                                               (into {} new-projections))})))))
 
-;;; ### limit
+;;; ----------------------------------------------------- limit ------------------------------------------------------
 
 (defn- handle-limit [{:keys [limit]} pipeline-ctx]
   (if-not limit
@@ -369,22 +450,26 @@
     (update pipeline-ctx :query conj {$limit limit})))
 
 
-;;; ### page
+;;; ------------------------------------------------------ page ------------------------------------------------------
 
-(defn- handle-page [{{page-num :page items-per-page :items, :as page-clause} :page} pipeline-ctx]
+(defn- handle-page [{{page-num :page, items-per-page :items, :as page-clause} :page} pipeline-ctx]
   (if-not page-clause
     pipeline-ctx
-    (update pipeline-ctx :query into [{$skip (* items-per-page (dec page-num))}
-                                      {$limit items-per-page}])))
+    (update pipeline-ctx :query concat (filter some? [(let [offset (* items-per-page (dec page-num))]
+                                                        (when-not (zero? offset)
+                                                          {$skip offset}))
+                                                      {$limit items-per-page}]))))
 
 
-;;; # process + run
+;;; +----------------------------------------------------------------------------------------------------------------+
+;;; |                                                 Process & Run                                                  |
+;;; +----------------------------------------------------------------------------------------------------------------+
 
-(defn- generate-aggregation-pipeline
+(s/defn ^:private generate-aggregation-pipeline :- {:projections Projections, :query Pipeline}
   "Generate the aggregation pipeline. Returns a sequence of maps representing each stage."
-  [query]
+  [inner-query :- mbql.s/MBQLQuery]
   (reduce (fn [pipeline-ctx f]
-            (f query pipeline-ctx))
+            (f inner-query pipeline-ctx))
           {:projections [], :query []}
           [add-initial-projection
            handle-filter
@@ -394,14 +479,15 @@
            handle-limit
            handle-page]))
 
-(defn- create-unescaping-rename-map [original-keys]
-  (into {} (for [k original-keys]
-             (let [k-str     (name k)
-                   unescaped (-> k-str
-                                 (s/replace #"___" ".")
-                                 (s/replace #"~~~(.+)$" ""))]
-               (when-not (= k-str unescaped)
-                 {k (keyword unescaped)})))))
+(s/defn ^:private create-unescaping-rename-map :- {s/Keyword s/Keyword}
+  [original-keys :- Projections]
+  (into {} (for [k original-keys
+                 :let [k-str     (name k)
+                       unescaped (-> k-str
+                                     (str/replace #"___" ".")
+                                     (str/replace #"~~~(.+)$" ""))]
+                 :when (not (= k-str unescaped))]
+             [k (keyword unescaped)])))
 
 (defn- unescape-names
   "Restore the original, unescaped nested Field names in the keys of RESULTS.
@@ -485,9 +571,9 @@
   [fn-name query-string]
   (-> query-string
       ;; replace any forms WITH NO args like ISODate() with ones like ["___ISODate"]
-      (s/replace (re-pattern (format "%s\\(\\)" (name fn-name))) (format "[\"___%s\"]" (name fn-name)))
+      (str/replace (re-pattern (format "%s\\(\\)" (name fn-name))) (format "[\"___%s\"]" (name fn-name)))
       ;; now replace any forms WITH args like ISODate("2016-01-01") with ones like ["___ISODate", "2016-01-01"]
-      (s/replace (re-pattern (format "%s\\(([^)]*)\\)" (name fn-name))) (format "[\"___%s\", $1]" (name fn-name)))))
+      (str/replace (re-pattern (format "%s\\(([^)]*)\\)" (name fn-name))) (format "[\"___%s\", $1]" (name fn-name)))))
 
 (defn- encode-fncalls
   "Replace occurances of `ISODate(...)` and similary function calls (invalid JSON, but legal in Mongo)
@@ -502,54 +588,59 @@
              more))))
 
 
-;;; ------------------------------------------------ Query Execution -------------------------------------------------
+;;; +----------------------------------------------------------------------------------------------------------------+
+;;; |                                                Query Execution                                                 |
+;;; +----------------------------------------------------------------------------------------------------------------+
 
 (defn mbql->native
   "Process and run an MBQL query."
-  [{database :database, {source-table-id :source-table} :query, :as query}]
-  {:pre [(map? database)]}
+  [{{source-table-id :source-table} :query, :as query}]
   (let [{source-table-name :name} (qp.store/table source-table-id)]
     (binding [*query* query]
       (let [{proj :projections, generated-pipeline :query} (generate-aggregation-pipeline (:query query))]
-        (log-monger-form generated-pipeline)
+        (log-aggregation-pipeline generated-pipeline)
         {:projections proj
          :query       generated-pipeline
          :collection  source-table-name
          :mbql?       true}))))
 
+(defn- check-columns [columns results]
+  (when (seq results)
+    (let [expected-cols columns
+          actual-cols   (keys (first results))]
+      (when (not= (set expected-cols) (set actual-cols))
+        (throw (Exception. (str (tru "Error: mismatched columns in results! Expected: {0} Got: {1}"
+                                     (vec expected-cols) (vec actual-cols)))))))))
+
 (defn execute-query
   "Process and run a native MongoDB query."
-  [{{:keys [collection query mbql? projections]} :native, database :database}]
-  {:pre [query
-         (string? collection)
-         (map? database)]}
-  (let [query   (if (string? query)
-                  (decode-fncalls (json/parse-string (encode-fncalls query) keyword))
-                  query)
-        results (mc/aggregate *mongo-connection* collection query
-                              :allow-disk-use true
-                              ;; options that control the creation of the cursor object. Empty map means use default
-                              ;; options. Needed for Mongo 3.6+
-                              :cursor {})
-        results (if (sequential? results)
-                  results
-                  [results])
+  [{{:keys [collection query mbql? projections]} :native}]
+  {:pre [query (string? collection)]}
+  (let [query      (if (string? query)
+                     (decode-fncalls (json/parse-string (encode-fncalls query) keyword))
+                     query)
+        ;; query *secret-query*
+        results    (mc/aggregate *mongo-connection* collection query
+                                 :allow-disk-use true
+                                 ;; options that control the creation of the cursor object. Empty map means use default
+                                 ;; options. Needed for Mongo 3.6+
+                                 :cursor {})
+        results    (if (sequential? results)
+                     results
+                     [results])
         ;; if we formed the query using MBQL then we apply a couple post processing functions
-        results (if-not mbql?
-                  results
-                  (-> results
-                      unescape-names
-                      unstringify-dates))
+        results    (cond-> results
+                     mbql? (-> unescape-names unstringify-dates))
         rename-map (create-unescaping-rename-map projections)
-        columns (if-not mbql?
-                  (vec (keys (first results)))
-                  (map (fn [proj]
-                         (if (contains? rename-map proj)
-                           (get rename-map proj)
-                           proj))
-                       projections))]
-
-    {:columns   columns
-     :rows      (for [row results]
-                  (mapv row columns))
-     :annotate? mbql?}))
+        columns    (if-not mbql?
+                     (keys (first results))
+                     (map (fn [proj]
+                            (if (contains? rename-map proj)
+                              (get rename-map proj)
+                              proj))
+                          projections))]
+    (when mbql?
+      (check-columns columns results))
+    {:columns (map name columns)
+     :rows    (for [row results]
+                (mapv row columns))}))
diff --git a/src/metabase/driver/mongo/util.clj b/src/metabase/driver/mongo/util.clj
index 60fb3244e5ad2e45e00b42bc710bf53c3f2d0eaf..24c4b6036f2ac69750139d149f2749e92d0bbfe4 100644
--- a/src/metabase/driver/mongo/util.clj
+++ b/src/metabase/driver/mongo/util.clj
@@ -4,10 +4,12 @@
             [metabase
              [config :as config]
              [util :as u]]
+            [metabase.models.database :refer [Database]]
             [metabase.util.ssh :as ssh]
             [monger
              [core :as mg]
-             [credentials :as mcred]])
+             [credentials :as mcred]]
+            [toucan.db :as db])
   (:import [com.mongodb MongoClientOptions MongoClientOptions$Builder MongoClientURI]))
 
 (def ^:const ^:private connection-timeout-ms
@@ -80,6 +82,7 @@
    values for DATABASE, such as plain strings or the usual MB details map."
   [database]
   (cond
+    (integer? database)           (db/select-one [Database :details] :id database)
     (string? database)            {:dbname database}
     (:dbname (:details database)) (:details database) ; entire Database obj
     (:dbname database)            database            ; connection details map only
diff --git a/src/metabase/driver/mysql.clj b/src/metabase/driver/mysql.clj
index d23e29a6ccfe6f7801f6abc1dfdf9300a809dec5..b8a3b84cb02a40370aeddd9889e48654e15e66d4 100644
--- a/src/metabase/driver/mysql.clj
+++ b/src/metabase/driver/mysql.clj
@@ -175,7 +175,7 @@
   (let [date-str (du/format-date :date-hour-minute-second-ms date)]
     (sql/make-stmt-subs (-> (create-hsql-for-date date date-str)
                             hx/->date
-                            (hsql/format :quoting (sql/quote-style (MySQLDriver.)))
+                            (hsql/format :quoting :mysql, :allow-dashed-names? true)
                             first)
                         [date-str])))
 
diff --git a/src/metabase/driver/oracle.clj b/src/metabase/driver/oracle.clj
index 76d9ba1574a40dd6050b2ea5f6f8753623d26098..d360fbe275eb8947c87adc7b609df59644e763f8 100644
--- a/src/metabase/driver/oracle.clj
+++ b/src/metabase/driver/oracle.clj
@@ -243,7 +243,7 @@
 (defn- remove-rownum-column
   "Remove the `:__rownum__` column from results, if present."
   [{:keys [columns rows], :as results}]
-  (if-not (contains? (set columns) :__rownum__)
+  (if-not (contains? (set columns) "__rownum__")
     results
     ;; if we added __rownum__ it will always be the last column and value so we can just remove that
     {:columns (butlast columns)
diff --git a/src/metabase/driver/postgres.clj b/src/metabase/driver/postgres.clj
index ab31364f5018edb895ac2ef74fd826a3fa75dec7..73ef006436f999c3a019cd8ec708fa9c8cff2d0b 100644
--- a/src/metabase/driver/postgres.clj
+++ b/src/metabase/driver/postgres.clj
@@ -16,8 +16,7 @@
              [honeysql-extensions :as hx]
              [ssh :as ssh]])
   (:import java.sql.Time
-           java.util.UUID
-           metabase.query_processor.interface.Value))
+           java.util.UUID))
 
 (defrecord PostgresDriver []
   :load-ns true
@@ -191,14 +190,14 @@
     #".*" ; default
     message))
 
-(defmethod sqlqp/->honeysql [PostgresDriver Value]
-  [driver {value :value, {:keys [base-type database-type]} :field}]
-  (when (some? value)
-    (cond
-      (isa? base-type :type/UUID)         (UUID/fromString value)
-      (isa? base-type :type/IPAddress)    (hx/cast :inet value)
-      (isa? base-type :type/PostgresEnum) (hx/quoted-cast database-type value)
-      :else                               (sqlqp/->honeysql driver value))))
+(defmethod sqlqp/->honeysql [PostgresDriver :value] [driver value]
+  (let [[_ value {base-type :base_type, database-type :database_type}] value]
+    (when (some? value)
+      (cond
+        (isa? base-type :type/UUID)         (UUID/fromString value)
+        (isa? base-type :type/IPAddress)    (hx/cast :inet value)
+        (isa? base-type :type/PostgresEnum) (hx/quoted-cast database-type value)
+        :else                               (sqlqp/->honeysql driver value)))))
 
 (defmethod sqlqp/->honeysql [PostgresDriver Time]
   [_ time-value]
diff --git a/src/metabase/driver/presto.clj b/src/metabase/driver/presto.clj
index 0d904ca291f4a1c2a0bd39bbb024f2bf2e2e641d..5232bd592a9c7012c09d0ea7b066439b977d4613 100644
--- a/src/metabase/driver/presto.clj
+++ b/src/metabase/driver/presto.clj
@@ -18,15 +18,16 @@
             [metabase.driver.generic-sql :as sql]
             [metabase.driver.generic-sql.query-processor :as sqlqp]
             [metabase.driver.generic-sql.util.unprepare :as unprepare]
-            [metabase.query-processor.util :as qputil]
+            [metabase.query-processor
+             [store :as qp.store]
+             [util :as qputil]]
             [metabase.util
              [date :as du]
              [honeysql-extensions :as hx]
              [i18n :refer [tru]]
              [ssh :as ssh]])
   (:import java.sql.Time
-           java.util.Date
-           metabase.query_processor.interface.TimeValue))
+           java.util.Date))
 
 (defrecord PrestoDriver []
   :load-ns true
@@ -37,6 +38,7 @@
 
 (defn- details->uri
   [{:keys [ssl host port]} path]
+  {:pre [(string? host) (seq host) ((some-fn integer? string?) port)]}
   (str (if ssl "https" "http") "://" host ":" port path))
 
 (defn- details->request [{:keys [user password catalog report-timezone]}]
@@ -88,9 +90,10 @@
 (defn- parse-presto-results [report-timezone columns data]
   (let [parsers (map (comp #(field-type->parser report-timezone %) :type) columns)]
     (for [row data]
-      (for [[value parser] (partition 2 (interleave row parsers))]
-        (when (some? value)
-          (parser value))))))
+      (vec
+       (for [[value parser] (partition 2 (interleave row parsers))]
+         (when (some? value)
+           (parser value)))))))
 
 (defn- fetch-presto-results! [details {prev-columns :columns, prev-rows :rows} uri]
   (let [{{:keys [columns data nextUri error]} :body} (http/get uri (assoc (details->request details) :as :json))]
@@ -105,6 +108,7 @@
             (fetch-presto-results! details results nextUri))))))
 
 (defn- execute-presto-query! [details query]
+  {:pre [(map? details)]}
   (ssh/with-ssh-tunnel [details-with-tunnel details]
     (let [{{:keys [columns data nextUri error id]} :body :as foo} (http/post (details->uri details-with-tunnel "/v1/statement")
                                                                              (assoc (details->request details-with-tunnel) :body query, :as :json))]
@@ -145,14 +149,6 @@
 (defn- quote+combine-names [& names]
   (str/join \. (map quote-name names)))
 
-(defn- rename-duplicates [values]
-  ;; Appends _2, _3 and so on to duplicated values
-  (loop [acc [], [h & tail] values, seen {}]
-    (let [value (if (seen h) (str h "_" (inc (seen h))) h)]
-      (if tail
-        (recur (conj acc value) tail (assoc seen h (inc (get seen h 0))))
-        (conj acc value)))))
-
 ;;; IDriver implementation
 
 (defn- can-connect? [{:keys [catalog] :as details}]
@@ -220,6 +216,10 @@
   [_ date]
   (hsql/call :from_iso8601_timestamp (hx/literal (du/date->iso-8601 date))))
 
+(defmethod sqlqp/->honeysql [PrestoDriver :stddev]
+  [driver [_ field]]
+  (hsql/call :stddev_samp (sqlqp/->honeysql driver field)))
+
 (def ^:private time-format (tformat/formatter "HH:mm:SS.SSS"))
 
 (defn- time->str
@@ -229,25 +229,32 @@
    (let [tz (time/time-zone-for-id tz-id)]
      (tformat/unparse (tformat/with-zone time-format tz) (tcoerce/to-date-time t)))))
 
-(defmethod sqlqp/->honeysql [PrestoDriver TimeValue]
-  [_ {:keys [value timezone-id]}]
-  (hx/cast :time (time->str value timezone-id)))
-
-(defmethod sqlqp/->honeysql [PrestoDriver Time]
-  [_ {:keys [value]}]
-  (hx/->time (time->str value)))
+(defmethod sqlqp/->honeysql [PrestoDriver :time]
+  [_ [_ value]]
+  (hx/cast :time (time->str value (driver/report-timezone))))
 
-(defn- execute-query [{:keys [database settings], {sql :query, params :params} :native, :as outer-query}]
+(defn- execute-query [{database-id                  :database
+                       :keys                        [settings]
+                       {sql :query, params :params} :native
+                       query-type                   :type
+                       :as                          outer-query}]
   (let [sql                    (str "-- "
                                     (qputil/query->remark outer-query) "\n"
                                     (unprepare/unprepare (cons sql params) :quote-escape "'", :iso-8601-fn :from_iso8601_timestamp))
-        details                (merge (:details database) settings)
+        details                (merge (:details (qp.store/database))
+                                      settings)
         {:keys [columns rows]} (execute-presto-query! details sql)
-        columns                (for [[col name] (map vector columns (rename-duplicates (map :name columns)))]
+        columns                (for [[col name] (map vector columns (map :name columns))]
                                  {:name name, :base_type (presto-type->base-type (:type col))})]
-    {:cols    columns
-     :columns (map (comp keyword :name) columns)
-     :rows    rows}))
+    (merge
+     {:columns (map (comp u/keyword->qualified-name :name) columns)
+      :rows    rows}
+     ;; only include `:cols` info for native queries for the time being, since it changes all the types up for MBQL
+     ;; queries (e.g. `:count` aggregations come back as `:type/BigInteger` instead of `:type/Integer`.) I don't want
+     ;; to deal with fixing a million tests to make it work at this second since it doesn't make a difference from an
+     ;; FE perspective. Perhaps when we get our test story sorted out a bit better we can fix this
+     (when (= query-type :native)
+       {:cols columns}))))
 
 
 (defn- humanize-connection-error-message [message]
@@ -359,7 +366,6 @@
           :date                      (u/drop-first-arg date)
           :excluded-schemas          (constantly #{"information_schema"})
           :quote-style               (constantly :ansi)
-          :stddev-fn                 (constantly :stddev_samp)
           :string-length-fn          (u/drop-first-arg string-length-fn)
           :unix-timestamp->timestamp (u/drop-first-arg unix-timestamp->timestamp)}))
 
diff --git a/src/metabase/driver/snowflake.clj b/src/metabase/driver/snowflake.clj
new file mode 100644
index 0000000000000000000000000000000000000000..001c57a9920dbc9a038472d5fd2434f293982656
--- /dev/null
+++ b/src/metabase/driver/snowflake.clj
@@ -0,0 +1,241 @@
+(ns metabase.driver.snowflake
+  "Snowflake Driver."
+  (:require [clojure.string :as str]
+            [honeysql.core :as hsql]
+            [metabase
+             [driver :as driver]
+             [util :as u]]
+            [metabase.driver.generic-sql :as sql]
+            [metabase.driver.generic-sql.query-processor :as sql.qp]
+            [metabase.models
+             [field :refer [Field]]
+             [table :refer [Table]]]
+            [metabase.query-processor.store :as qp.store]
+            [metabase.util
+             [honeysql-extensions :as hx]
+             [ssh :as ssh]]
+            [toucan.db :as db])
+  (:import java.sql.Time))
+
+(defn- connection-details->spec
+  "Create a database specification for a snowflake database."
+  [{:keys [account regionid] :as opts}]
+  (let [host (if regionid
+               (str account "." regionid)
+               account)]
+    ;; it appears to be the case that their JDBC driver ignores `db` -- see my bug report at
+    ;; https://support.snowflake.net/s/question/0D50Z00008WTOMCSA5/
+    (merge {:subprotocol                                "snowflake"
+            :classname                                  "net.snowflake.client.jdbc.SnowflakeDriver"
+            :subname                                    (str "//" host ".snowflakecomputing.com/")
+            :client_metadata_request_use_connection_ctx true
+            :ssl                                        true
+            ;; other SESSION parameters
+            ;; use the same week start we use for all the other drivers
+            :week_start                                 7
+            ;; not 100% sure why we need to do this but if we don't set the connection to UTC our report timezone
+            ;; stuff doesn't work, even though we ultimately override this when we set the session timezone
+            :timezone                                   "UTC"}
+           (dissoc opts :host :port :timezone))))
+
+(defrecord SnowflakeDriver []
+  :load-ns true
+  clojure.lang.Named
+  (getName [_] "Snowflake"))
+
+(def ^:private snowflake-date-formatters
+  "The default timestamp format for Snowflake.
+  See https://docs.snowflake.net/manuals/sql-reference/data-types-datetime.html#timestamp."
+  (driver/create-db-time-formatters "yyyy-MM-dd HH:mm:ss.SSSSSSSSS Z"))
+
+(def ^:private snowflake-db-time-query
+  "Snowflake current database time, with hour and minute timezone offset."
+  "select to_char(current_timestamp, 'YYYY-MM-DD HH24:MI:SS.FF TZHTZM')")
+
+(def ^:private column->base-type
+  "Map of the default Snowflake column types -> Field base types. Add more
+  mappings here as you come across them."
+  {:NUMBER                     :type/Number
+   :DECIMAL                    :type/Decimal
+   :NUMERIC                    :type/Number
+   :INT                        :type/Integer
+   :INTEGER                    :type/Integer
+   :BIGINT                     :type/BigInteger
+   :SMALLINT                   :type/Integer
+   :TINYINT                    :type/Integer
+   :BYTEINT                    :type/Integer
+   :FLOAT                      :type/Float
+   :FLOAT4                     :type/Float
+   :FLOAT8                     :type/Float
+   :DOUBLE                     :type/Float
+   (keyword "DOUBLE PRECISON") :type/Float
+   :REAL                       :type/Float
+   :VARCHAR                    :type/Text
+   :CHAR                       :type/Text
+   :CHARACTER                  :type/Text
+   :STRING                     :type/Text
+   :TEXT                       :type/Text
+   :BINARY                     :type/*
+   :VARBINARY                  :type/*
+   :BOOLEAN                    :type/Boolean
+   :DATE                       :type/Date
+   :DATETIME                   :type/DateTime
+   :TIME                       :type/Time
+   :TIMESTAMP                  :type/DateTime
+   :TIMESTAMPLTZ               :type/DateTime
+   :TIMESTAMPNTZ               :type/DateTime
+   :TIMESTAMPTZ                :type/DateTime
+   :VARIANT                    :type/*
+   ;; Maybe also type *
+   :OBJECT                     :type/Dictionary
+   :ARRAY                      :type/*})
+
+(defn- unix-timestamp->timestamp [expr seconds-or-milliseconds]
+  (case seconds-or-milliseconds
+    :seconds      (hsql/call :to_timestamp expr)
+    :milliseconds (hsql/call :to_timestamp expr 3)))
+
+(defn- date-interval [unit amount]
+  (hsql/call :dateadd
+    (hsql/raw (name unit))
+    (hsql/raw (int amount))
+    :%current_timestamp))
+
+(defn- extract [unit expr] (hsql/call :date_part unit (hx/->timestamp expr)))
+(defn- date-trunc [unit expr] (hsql/call :date_trunc unit (hx/->timestamp expr)))
+
+(defn- date [unit expr]
+  (case unit
+    :default         expr
+    :minute          (date-trunc :minute expr)
+    :minute-of-hour  (extract :minute expr)
+    :hour            (date-trunc :hour expr)
+    :hour-of-day     (extract :hour expr)
+    :day             (date-trunc :day expr)
+    :day-of-week     (extract :dayofweek expr)
+    :day-of-month    (extract :day expr)
+    :day-of-year     (extract :dayofyear expr)
+    :week            (date-trunc :week expr)
+    :week-of-year    (extract :week expr)
+    :month           (date-trunc :month expr)
+    :month-of-year   (extract :month expr)
+    :quarter         (date-trunc :quarter expr)
+    :quarter-of-year (extract :quarter expr)
+    :year            (extract :year expr)))
+
+(defn- query-db-name []
+  (or (-> (qp.store/database) :details :db)
+      (throw (Exception. "Missing DB name"))))
+
+(defmethod sql.qp/->honeysql [SnowflakeDriver (class Field)]
+  [driver field]
+  (let [table            (qp.store/table (:table_id field))
+        db-name          (when-not (:alias? table)
+                           (query-db-name))
+        field-identifier (keyword
+                          (hx/qualify-and-escape-dots db-name (:schema table) (:name table) (:name field)))]
+    (sql.qp/cast-unix-timestamp-field-if-needed driver field field-identifier)))
+
+(defmethod sql.qp/->honeysql [SnowflakeDriver (class Table)]
+  [_ table]
+  (let [{table-name :name, schema :schema} table]
+    (hx/qualify-and-escape-dots (query-db-name) schema table-name)))
+
+(defmethod sql.qp/->honeysql [SnowflakeDriver :time]
+  [driver [_ value unit]]
+  (hx/->time (sql.qp/->honeysql driver value)))
+
+(defn- field->identifier
+  "Generate appropriate identifier for a Field for SQL parameters. (NOTE: THIS IS ONLY USED FOR SQL PARAMETERS!)"
+  ;; TODO - Making a DB call for each field to fetch its Table is inefficient and makes me cry, but this method is
+  ;; currently only used for SQL params so it's not a huge deal at this point
+  ;;
+  ;; TODO - we should make sure these are in the QP store somewhere and then could at least batch the calls
+  [driver {table-id :table_id, :as field}]
+  (qp.store/store-table! (db/select-one [Table :id :name :schema], :id (u/get-id table-id)))
+  (sql.qp/->honeysql driver field))
+
+
+(defn- table-rows-seq [driver database table]
+
+  (sql/query driver database {:select [:*]
+                              :from   [(qp.store/with-store
+                                         (qp.store/store-database! database)
+                                         (sql.qp/->honeysql driver table))]}))
+
+(defn- string-length-fn [field-key]
+  (hsql/call :length (hx/cast :VARCHAR field-key)))
+
+(defn- describe-database [driver database]
+  (sql/with-metadata [metadata driver database]
+    {:tables (sql/fast-active-tables driver metadata (:name database))}))
+
+(defn- describe-table [driver database table]
+  (sql/with-metadata [metadata driver database]
+    (->> (assoc (select-keys table [:name :schema])
+           :fields (sql/describe-table-fields metadata driver table (:name database)))
+         ;; find PKs and mark them
+         (sql/add-table-pks metadata))))
+
+(defn- describe-table-fks [driver database table]
+  (sql/describe-table-fks driver database table (:name database)))
+
+(u/strict-extend SnowflakeDriver
+  driver/IDriver
+  (merge (sql/IDriverSQLDefaultsMixin)
+         {:date-interval            (u/drop-first-arg date-interval)
+          :details-fields           (constantly (ssh/with-tunnel-config
+                                                  [{:name         "account"
+                                                    :display-name "Account"
+                                                    :placeholder  "Your snowflake account name."
+                                                    :required     true}
+                                                   {:name         "user"
+                                                    :display-name "Database username"
+                                                    :placeholder  "ken bier"
+                                                    :required     true}
+                                                   {:name         "password"
+                                                    :display-name "Database user password"
+                                                    :type         :password
+                                                    :placeholder  "*******"
+                                                    :required     true}
+                                                   {:name         "warehouse"
+                                                    :display-name "Warehouse"
+                                                    :placeholder  "my_warehouse"}
+                                                   {:name         "dbname"
+                                                    :display-name "Database name"
+                                                    :placeholder  "cockerel"}
+                                                   {:name         "regionid"
+                                                    :display-name "Region Id"
+                                                    :placeholder  "my_region"}
+                                                   {:name         "schema"
+                                                    :display-name "Schema"
+                                                    :placeholder  "my_schema"}
+                                                   {:name         "role"
+                                                    :display-name "Role"
+                                                    :placeholder  "my_role"}]))
+          :format-custom-field-name (u/drop-first-arg str/lower-case)
+          :current-db-time          (driver/make-current-db-time-fn
+                                     snowflake-db-time-query
+                                     snowflake-date-formatters)
+          :table-rows-seq           table-rows-seq
+          :describe-database        describe-database
+          :describe-table           describe-table
+          :describe-table-fks       describe-table-fks})
+
+  sql/ISQLDriver
+  (merge (sql/ISQLDriverDefaultsMixin)
+         {:connection-details->spec  (u/drop-first-arg connection-details->spec)
+          :string-length-fn          (u/drop-first-arg string-length-fn)
+          :excluded-schemas          (constantly #{"INFORMATION_SCHEMA"})
+          :date                      (u/drop-first-arg date)
+          :field->identifier         field->identifier
+          :current-datetime-fn       (constantly :%current_timestamp)
+          :set-timezone-sql          (constantly "ALTER SESSION SET TIMEZONE = %s;")
+          :unix-timestamp->timestamp (u/drop-first-arg unix-timestamp->timestamp)
+          :column->base-type         (u/drop-first-arg column->base-type)}))
+
+
+(defn -init-driver
+  "Register the Snowflake driver"
+  []
+  (driver/register-driver! :snowflake (SnowflakeDriver.)))
diff --git a/src/metabase/driver/sparksql.clj b/src/metabase/driver/sparksql.clj
index 75486d54fac0a0ddd5742d1c22bb4cbe6c2b08fd..a429e0a53ce698b6c0cf2391f470913ff4738fc9 100644
--- a/src/metabase/driver/sparksql.clj
+++ b/src/metabase/driver/sparksql.clj
@@ -16,6 +16,7 @@
              [hive-like :as hive-like]]
             [metabase.driver.generic-sql.query-processor :as sqlqp]
             [metabase.mbql.util :as mbql.u]
+            [metabase.models.field :refer [Field]]
             [metabase.query-processor
              [store :as qp.store]
              [util :as qputil]]
@@ -23,8 +24,7 @@
              [honeysql-extensions :as hx]
              [i18n :refer [trs tru]]])
   (:import clojure.lang.Reflector
-           java.sql.DriverManager
-           metabase.query_processor.interface.Field))
+           java.sql.DriverManager))
 
 (defrecord SparkSQLDriver []
   :load-ns true
@@ -36,24 +36,33 @@
 
 (def ^:private source-table-alias "t1")
 
-(defn- resolve-table-alias [{:keys [schema-name table-name special-type field-name] :as field}]
-  (let [source-table (qp.store/table (mbql.u/query->source-table-id sqlqp/*query*))]
-    (if (and (= schema-name (:schema source-table))
-             (= table-name (:name source-table)))
-      (-> (assoc field :schema-name nil)
-          (assoc :table-name source-table-alias))
-      (if-let [matching-join-table (->> (get-in sqlqp/*query* [:query :join-tables])
-                                        (filter #(and (= schema-name (:schema %))
-                                                      (= table-name (:table-name %))))
-                                        first)]
-        (-> (assoc field :schema-name nil)
-            (assoc :table-name (:join-alias matching-join-table)))
-        field))))
-
-(defmethod  sqlqp/->honeysql [SparkSQLDriver Field]
+(defn- resolve-table-alias [{field-name :name, special-type :special_type, table-id :table_id, :as field}]
+  (let [{schema-name :schema, table-name :name} (qp.store/table table-id)
+        source-table                            (qp.store/table (mbql.u/query->source-table-id sqlqp/*query*))
+        matching-join-table                     (some (fn [{:keys [table-id]}]
+                                                        (let [join-table (qp.store/table table-id)]
+                                                          (when (and (= schema-name (:schema join-table))
+                                                                     (= table-name (:name join-table)))
+                                                            join-table)))
+                                                      (get-in sqlqp/*query* [:query :join-tables]))]
+    (cond
+      (and (= schema-name (:schema source-table))
+           (= table-name (:name source-table)))
+      (assoc field :schema nil, :table source-table-alias)
+
+      matching-join-table
+      (assoc field :schema nil, :table (:join-alias matching-join-table))
+
+      :else
+      field)))
+
+(defmethod  sqlqp/->honeysql [SparkSQLDriver (class Field)]
   [driver field-before-aliasing]
-  (let [{:keys [schema-name table-name special-type field-name] :as foo} (resolve-table-alias field-before-aliasing)
-        field (keyword (hx/qualify-and-escape-dots schema-name table-name field-name))]
+  (let [{schema-name  :schema
+         table-name   :table
+         special-type :special_type
+         field-name   :name} (resolve-table-alias field-before-aliasing)
+        field                (keyword (hx/qualify-and-escape-dots schema-name table-name field-name))]
     (cond
       (isa? special-type :type/UNIXTimestampSeconds)      (sql/unix-timestamp->timestamp driver field :seconds)
       (isa? special-type :type/UNIXTimestampMilliseconds) (sql/unix-timestamp->timestamp driver field :milliseconds)
@@ -61,11 +70,15 @@
 
 (defn- apply-join-tables
   [honeysql-form {join-tables :join-tables}]
-  (loop [honeysql-form honeysql-form, [{:keys [table-name pk-field source-field schema join-alias]} & more] join-tables]
-    (let [honeysql-form (h/merge-left-join honeysql-form
-                          [(hx/qualify-and-escape-dots schema table-name) (keyword join-alias)]
-                          [:= (hx/qualify-and-escape-dots source-table-alias (:field-name source-field))
-                           (hx/qualify-and-escape-dots join-alias         (:field-name pk-field))])]
+  (loop [honeysql-form honeysql-form, [{:keys [table-id pk-field-id fk-field-id schema join-alias]} & more] join-tables]
+    (let [{table-name :name} (qp.store/table table-id)
+          source-field       (qp.store/field fk-field-id)
+          pk-field           (qp.store/field pk-field-id)
+          honeysql-form      (h/merge-left-join honeysql-form
+                                                [(hx/qualify-and-escape-dots schema table-name) (keyword join-alias)]
+                                                [:=
+                                                 (hx/qualify-and-escape-dots source-table-alias (:field-name source-field))
+                                                 (hx/qualify-and-escape-dots join-alias         (:field-name pk-field))])]
       (if (seq more)
         (recur honeysql-form more)
         honeysql-form))))
diff --git a/src/metabase/driver/sqlserver.clj b/src/metabase/driver/sqlserver.clj
index 348e28c1a6358472e2bc6d2394bd550326c1fe39..6038850b4d64551db0f65eb1ce6558f49fdee7dc 100644
--- a/src/metabase/driver/sqlserver.clj
+++ b/src/metabase/driver/sqlserver.clj
@@ -164,6 +164,10 @@
   [_ time-value]
   (hx/->time time-value))
 
+(defmethod sqlqp/->honeysql [SQLServerDriver :stddev]
+  [driver [_ field]]
+  (hsql/call :stdev (sqlqp/->honeysql driver field)))
+
 (defn- string-length-fn [field-key]
   (hsql/call :len (hx/cast :VARCHAR field-key)))
 
@@ -210,7 +214,6 @@
     :current-datetime-fn       (constantly :%getutcdate)
     :date                      (u/drop-first-arg date)
     :excluded-schemas          (constantly #{"sys" "INFORMATION_SCHEMA"})
-    :stddev-fn                 (constantly :stdev)
     :string-length-fn          (u/drop-first-arg string-length-fn)
     :unix-timestamp->timestamp (u/drop-first-arg unix-timestamp->timestamp)}))
 
diff --git a/src/metabase/events/activity_feed.clj b/src/metabase/events/activity_feed.clj
index 0b6aaae0db20e824436ad40b07a6003dc87296a2..aad5306c961b1fede7ab226e31979af27c6a06e1 100644
--- a/src/metabase/events/activity_feed.clj
+++ b/src/metabase/events/activity_feed.clj
@@ -11,9 +11,10 @@
              [card :refer [Card]]
              [dashboard :refer [Dashboard]]
              [table :as table]]
+            [metabase.util.i18n :refer [tru]]
             [toucan.db :as db]))
 
-(def activity-feed-topics
+(def ^:private activity-feed-topics
   "The `Set` of event topics which are subscribed to for use in the Metabase activity feed."
   #{:alert-create
     :alert-delete
@@ -42,11 +43,13 @@
 
 ;;; ------------------------------------------------ EVENT PROCESSING ------------------------------------------------
 
-(defn- process-card-activity! [topic object]
+(defn- process-card-activity! [topic {query :dataset_query, :as object}]
   (let [details-fn  #(select-keys % [:name :description])
-        query       (u/ignore-exceptions (qp/expand (:dataset_query object)))
-        database-id (when-let [database (:database query)]
-                      (u/get-id database))
+        query       (when (seq query)
+                      (try (qp/query->preprocessed query)
+                           (catch Throwable e
+                             (log/error e (tru "Error preprocessing query:")))))
+        database-id (some-> query :database u/get-id)
         table-id    (mbql.u/query->source-table-id query)]
     (activity/record-activity!
       :topic       topic
diff --git a/src/metabase/integrations/slack.clj b/src/metabase/integrations/slack.clj
index 97df78f092a442ec7b71a0ec5bdb8d8b0459303c..e0164cdf71e4fdbf7a070a8da6bb592a3f6a6e65 100644
--- a/src/metabase/integrations/slack.clj
+++ b/src/metabase/integrations/slack.clj
@@ -1,6 +1,7 @@
 (ns metabase.integrations.slack
   (:require [cheshire.core :as json]
             [clj-http.client :as http]
+            [clojure.java.io :as io]
             [clojure.tools.logging :as log]
             [metabase.models.setting :as setting :refer [defsetting]]
             [metabase.util.i18n :refer [tru]]
@@ -19,7 +20,7 @@
 
 
 (defn- handle-response [{:keys [status body]}]
-  (let [body (json/parse-string body keyword)]
+  (let [body (-> body io/reader (json/parse-stream keyword))]
     (if (and (= 200 status) (:ok body))
       body
       (let [error (if (= (:error body) "invalid_auth")
@@ -31,15 +32,16 @@
 (defn- do-slack-request [request-fn params-key endpoint & {:keys [token], :as params, :or {token (slack-token)}}]
   (when token
     (handle-response (request-fn (str slack-api-base-url "/" (name endpoint)) {params-key      (assoc params :token token)
-                                                                              :conn-timeout   1000
-                                                                              :socket-timeout 1000}))))
+                                                                               :as             :stream
+                                                                               :conn-timeout   1000
+                                                                               :socket-timeout 1000}))))
 
 (def ^{:arglists '([endpoint & {:as params}]), :style/indent 1} GET  "Make a GET request to the Slack API."  (partial do-slack-request http/get  :query-params))
 (def ^{:arglists '([endpoint & {:as params}]), :style/indent 1} POST "Make a POST request to the Slack API." (partial do-slack-request http/post :form-params))
 
 (def ^{:arglists '([& {:as args}])} channels-list
   "Calls Slack api `channels.list` function and returns the list of available channels."
-  (comp :channels (partial GET :channels.list, :exclude_archived 1)))
+  (comp :channels (partial GET :channels.list, :exclude_archived true, :exclude_members true)))
 
 (def ^{:arglists '([& {:as args}])} users-list
   "Calls Slack api `users.list` function and returns the list of available users."
@@ -55,7 +57,7 @@
   []
   (some (fn [channel] (when (= (:name channel) files-channel-name)
                         channel))
-        (channels-list :exclude_archived 0)))
+        (channels-list :exclude_archived false)))
 
 (defn files-channel
   "Calls Slack api `channels.info` to check whether a channel named #metabase_files exists. If it doesn't,
diff --git a/src/metabase/mbql/normalize.clj b/src/metabase/mbql/normalize.clj
index 97659f887571ad4e03a9ab530ce6ae5aad01b7da..fd44ee2c21f481f1d1e3b66aa093ecd5b5fbcadd 100644
--- a/src/metabase/mbql/normalize.clj
+++ b/src/metabase/mbql/normalize.clj
@@ -1,7 +1,7 @@
 (ns metabase.mbql.normalize
   "Logic for taking any sort of weird MBQL query and normalizing it into a standardized, canonical form. You can think
   of this like taking any 'valid' MBQL query and rewriting it as-if it was written in perfect up-to-date MBQL in the
-  latest version. There are two main things done here, done as three separate steps:
+  latest version. There are four main things done here, done as four separate steps:
 
   #### NORMALIZING TOKENS
 
@@ -10,17 +10,30 @@
 
   #### CANONICALIZING THE QUERY
 
-  Rewriting deprecated MBQL 95 syntax and other things that are still supported for backwards-compatibility in
-  canonical MBQL 98 syntax. For example `{:breakout [:count 10]}` becomes `{:breakout [[:count [:field-id 10]]]}`.
+  Rewriting deprecated MBQL 95/98 syntax and other things that are still supported for backwards-compatibility in
+  canonical MBQL 2000 syntax. For example `{:breakout [:count 10]}` becomes `{:breakout [[:count [:field-id 10]]]}`.
+
+  #### WHOLE-QUERY TRANSFORMATIONS
+
+  Transformations and cleanup of the query structure as a whole to fix inconsistencies. Whereas the canonicalization
+  phase operates on a lower-level, transforming invidual clauses, this phase focuses on transformations that affect
+  multiple clauses, such as removing duplicate references to Fields if they are specified in both the `:breakout` and
+  `:fields` clauses.
+
+  This is not the only place that does such transformations; several pieces of QP middleware perform similar
+  individual transformations, such as `reconcile-breakout-and-order-by-bucketing`.
 
   #### REMOVING EMPTY CLAUSES
 
   Removing empty clauses like `{:aggregation nil}` or `{:breakout []}`.
 
   Token normalization occurs first, followed by canonicalization, followed by removing empty clauses."
-  (:require [clojure.walk :as walk]
+  (:require [clojure.tools.logging :as log]
+            [clojure.walk :as walk]
             [medley.core :as m]
-            [metabase.mbql.util :as mbql.u]
+            [metabase.mbql
+             [predicates :as mbql.pred]
+             [util :as mbql.u]]
             [metabase.util :as u]
             [metabase.util.i18n :refer [tru]]))
 
@@ -172,7 +185,7 @@
   [clauses]
   (vec (for [subclause clauses]
          (if (mbql-clause? subclause)
-           ;; MBQL 98 [direction field] style: normalize as normal
+           ;; MBQL 98+ [direction field] style: normalize as normal
            (normalize-mbql-clause-tokens subclause)
            ;; otherwise it's MBQL 95 [field direction] style: flip the args and *then* normalize the clause. And then
            ;; flip it back to put it back the way we found it.
@@ -314,7 +327,7 @@
     [ag-type (wrap-implicit-field-id field)]))
 
 (defn- wrap-single-aggregations
-  "Convert old MBQL 95 single-aggregations like `{:aggregation :count}` or `{:aggregation [:count]}` to MBQL 98
+  "Convert old MBQL 95 single-aggregations like `{:aggregation :count}` or `{:aggregation [:count]}` to MBQL 98+
   multiple-aggregation syntax (e.g. `{:aggregation [[:count]]}`)."
   [aggregations]
   (mbql.u/replace aggregations
@@ -403,8 +416,11 @@
     [:ascending field]  (recur [:asc field])
     [:descending field] (recur [:desc field])
 
-    [:asc field]  [:asc (wrap-implicit-field-id field)]
-    [:desc field] [:desc (wrap-implicit-field-id field)]))
+    [:asc field]  [:asc  (wrap-implicit-field-id field)]
+    [:desc field] [:desc (wrap-implicit-field-id field)]
+
+    ;; this case should be the first one hit when we come in with a vector of clauses e.g. [[:asc 1] [:desc 2]]
+    [& clauses] (vec (distinct (map canonicalize-order-by clauses)))))
 
 (declare canonicalize-inner-mbql-query)
 
@@ -465,7 +481,11 @@
        (let [[clause-name & _] clause
              f                 (mbql-clause->canonicalization-fn clause-name)]
          (if f
-           (apply f clause)
+           (try
+             (apply f clause)
+             (catch Throwable e
+               (log/error (tru "Invalid clause:") clause)
+               (throw e)))
            clause))))
    mbql-query))
 
@@ -481,6 +501,42 @@
     (:parameters outer-query) (update :parameters (partial mapv canonicalize-mbql-clauses))))
 
 
+;;; +----------------------------------------------------------------------------------------------------------------+
+;;; |                                          WHOLE-QUERY TRANSFORMATIONS                                           |
+;;; +----------------------------------------------------------------------------------------------------------------+
+
+(defn- remove-breakout-fields-from-fields
+  "Remove any Fields specified in both `:breakout` and `:fields` from `:fields`; it is implied that any breakout Field
+  will be returned, specifying it in both would imply it is to be returned twice, which tends to cause confusion for
+  the QP and drivers. (This is done to work around historic bugs with the way queries were generated on the frontend;
+  I'm not sure this behavior makes sense, but removing it would break existing queries.)
+
+  We will remove either exact matches:
+
+    {:breakout [[:field-id 10]], :fields [[:field-id 10]]} ; -> {:breakout [[:field-id 10]]}
+
+  or unbucketed matches:
+
+    {:breakout [[:datetime-field [:field-id 10] :month]], :fields [[:field-id 10]]}
+    ;; -> {:breakout [[:field-id 10]]}"
+  [{{:keys [breakout fields]} :query, :as query}]
+  (if-not (and (seq breakout) (seq fields))
+    query
+    ;; get a set of all Field clauses (of any type) in the breakout. For `datetime-field` clauses, we'll include both
+    ;; the bucketed `[:datetime-field <field> ...]` clause and the `<field>` clause it wraps
+    (let [breakout-fields (set (reduce concat (mbql.u/match breakout
+                                                [:datetime-field field-clause _] [&match field-clause]
+                                                mbql.pred/Field?                 [&match])))]
+      ;; now remove all the Fields in `:fields` that match the ones in the set
+      (update-in query [:query :fields] (comp vec (partial remove breakout-fields))))))
+
+(defn- perform-whole-query-transformations
+  "Perform transformations that operate on the query as a whole, making sure the structure as a whole is logical and
+  consistent."
+  [query]
+  (-> query
+      remove-breakout-fields-from-fields))
+
 ;;; +----------------------------------------------------------------------------------------------------------------+
 ;;; |                                             REMOVING EMPTY CLAUSES                                             |
 ;;; +----------------------------------------------------------------------------------------------------------------+
@@ -533,8 +589,9 @@
 ;; all mergable
 (def ^{:arglists '([outer-query])} normalize
   "Normalize the tokens in a Metabase query (i.e., make them all `lisp-case` keywords), rewrite deprecated clauses as
-  up-to-date MBQL 98, and remove empty clauses."
+  up-to-date MBQL 2000, and remove empty clauses."
   (comp remove-empty-clauses
+        perform-whole-query-transformations
         canonicalize
         normalize-tokens))
 
diff --git a/src/metabase/mbql/predicates.clj b/src/metabase/mbql/predicates.clj
new file mode 100644
index 0000000000000000000000000000000000000000..88ccf76d0380f0e8173cfbcc4475f769c2dc5dd0
--- /dev/null
+++ b/src/metabase/mbql/predicates.clj
@@ -0,0 +1,23 @@
+(ns metabase.mbql.predicates
+  "Predicate functions for checking whether something is a valid instance of a given MBQL clause."
+  (:require [metabase.mbql.schema :as mbql.s]
+            [schema.core :as s]))
+
+;; This namespace only covers a few things, please add more stuff here as we write the functions so we can use them
+;; elsewhere
+
+(def ^{:arglists '([unit])} TimeUnit?
+  "Is `unit` a datetime bucketing unit referring only to time, such as `hour` or `minute`?"
+  (complement (s/checker mbql.s/TimeUnit)))
+
+(def ^{:arglists '([unit])} DatetimeFieldUnit?
+  "Is `unit` a valid datetime bucketing unit?"
+  (complement (s/checker mbql.s/DatetimeFieldUnit)))
+
+(def ^{:arglists '([ag-clause])} Aggregation?
+  "Is this a valid Aggregation clause?"
+  (complement (s/checker mbql.s/Aggregation)))
+
+(def ^{:arglists '([field-clause])} Field?
+  "Is this a valid Field clause?"
+  (complement (s/checker mbql.s/Field)))
diff --git a/src/metabase/mbql/schema.clj b/src/metabase/mbql/schema.clj
index ec46a8c102f84a1b8afbb72474bed67727e7081d..13732aae57643ac674921d180e6e2189d193cd8c 100644
--- a/src/metabase/mbql/schema.clj
+++ b/src/metabase/mbql/schema.clj
@@ -1,6 +1,6 @@
 (ns metabase.mbql.schema
   "Schema for validating a *normalized* MBQL query. This is also the definitive grammar for MBQL, wow!"
-  (:refer-clojure :exclude [count distinct min max + - / * and or not = < > <= >=])
+  (:refer-clojure :exclude [count distinct min max + - / * and or not = < > <= >= time])
   (:require [clojure
              [core :as core]
              [set :as set]]
@@ -46,7 +46,7 @@
    "relative-datetime-unit"))
 
 (def ^:private LiteralDatetimeString
-  "Schema for an MBQL datetime string literal, in ISO-8601 format."
+  "Schema for an MBQL datetime string literal, in ISO-8601 format. (This also accepts literal time stings.)"
   (s/constrained su/NonBlankString du/date-string? "datetime-literal"))
 
 ;; TODO - `unit` is not allowed if `n` is `current`
@@ -64,27 +64,45 @@
 ;;
 ;; becomes:
 ;; [:= [:datetime-field [:field-id 10] :day] [:absolute-datetime #inst "2018-10-02" :day]]
-
 (defclause ^:internal absolute-datetime
   timestamp java.sql.Timestamp
   unit      DatetimeFieldUnit)
 
+;; it could make sense to say hour-of-day(field) =  hour-of-day("2018-10-10T12:00")
+;; but it does not make sense to say month-of-year(field) = month-of-year("08:00:00"),
+;; does it? So we'll restrict the set of units a TimeValue can have to ones that have no notion of day/date.
+(def TimeUnit
+  "Valid unit for time bucketing."
+  (apply s/enum #{:default :minute :minute-of-hour :hour :hour-of-day}))
+
+;; almost exactly the same as `absolute-datetime`, but generated in some sitations where the literal in question was
+;; clearly a time (e.g. "08:00:00.000") and/or the Field derived from `:type/Time` and/or the unit was a
+;; time-bucketing unit
+(defclause ^:internval time
+  time java.sql.Time
+  unit TimeUnit)
+
 (def ^:private DatetimeLiteral
-  "Schema for valid absoulute datetime literals."
-  (s/if (partial is-clause? :absolute-datetime)
-    absolute-datetime
-    (s/cond-pre
-     ;; literal datetime strings and Java types will get transformed to `absolute-datetime` clauses automatically by
-     ;; middleware so drivers don't need to deal with these directly. You only need to worry about handling
-     ;; `absolute-datetime` clauses.
-     LiteralDatetimeString
-     java.sql.Date
-     java.util.Date)))
+  "Schema for valid absolute datetime literals."
+  (s/conditional
+   (partial is-clause? :absolute-datetime)
+   absolute-datetime
+
+   (partial is-clause? :time)
+   time
+
+   :else
+   (s/cond-pre
+    ;; literal datetime strings and Java types will get transformed to `absolute-datetime` clauses automatically by
+    ;; middleware so drivers don't need to deal with these directly. You only need to worry about handling
+    ;; `absolute-datetime` clauses.
+    LiteralDatetimeString
+    java.util.Date)))
 
 (def DateTimeValue
   "Schema for a datetime value drivers will personally have to handle, either an `absolute-datetime` form or a
   `relative-datetime` form."
-  (one-of absolute-datetime relative-datetime))
+  (one-of absolute-datetime relative-datetime time))
 
 
 ;;; -------------------------------------------------- Other Values --------------------------------------------------
@@ -135,12 +153,13 @@
 ;; automatically bucketed, so drivers still need to make sure they do any special datetime handling for plain
 ;; `:field-id` clauses when their Field derives from `:type/DateTime`.
 ;;
-;; Datetime Field can wrap any of the lowest-level Field clauses or expression references, but not other
-;; datetime-field clauses, because that wouldn't make sense
+;; Datetime Field can wrap any of the lowest-level Field clauses, but not other datetime-field clauses, because that
+;; wouldn't make sense. They similarly can not wrap expression references, because doing arithmetic on timestamps
+;; doesn't make a whole lot of sense (what does `"2018-10-23"::timestamp / 2` mean?).
 ;;
 ;; Field is an implicit Field ID
 (defclause datetime-field
-  field (one-of field-id field-literal fk-> expression)
+  field (one-of field-id field-literal fk->)
   unit  DatetimeFieldUnit)
 
 ;; binning strategy can wrap any of the above clauses, but again, not another binning strategy clause
@@ -162,16 +181,16 @@
 ;; TODO - binning strategy param is disallowed for `:default` and required for the others. For `num-bins` it must also
 ;; be an integer.
 (defclause ^{:requires-features #{:binning}} binning-strategy
-  field          BinnableField
-  strategy-name  BinningStrategyName
-  strategy-param (optional (s/constrained s/Num (complement neg?) "strategy param must be >= 0."))
+  field            BinnableField
+  strategy-name    BinningStrategyName
+  strategy-param   (optional (s/constrained s/Num (complement neg?) "strategy param must be >= 0."))
   ;; These are added in automatically by the `binning` middleware. Don't add them yourself, as they're just be
-  ;; replaced.
+  ;; replaced. Driver implementations can rely on this being populated
   resolved-options (optional ResolvedBinningStrategyOptions))
 
 (def Field
   "Schema for anything that refers to a Field, from the common `[:field-id <id>]` to variants like `:datetime-field` or
-  `:fk->`."
+  `:fk->` or an expression reference `[:expression <name>]`."
   (one-of field-id field-literal fk-> datetime-field expression binning-strategy))
 
 ;; aggregate field reference refers to an aggregation, e.g.
@@ -263,12 +282,12 @@
 
 ;; the following are definitions for expression aggregations, e.g. [:+ [:sum [:field-id 10]] [:sum [:field-id 20]]]
 
-(declare UnnamedAggregation)
+(declare Aggregation)
 
 (def ^:private ExpressionAggregationArg
   (s/if number?
     s/Num
-    (s/recursive #'UnnamedAggregation)))
+    (s/recursive #'Aggregation)))
 
 (defclause [^{:requires-features #{:expression-aggregations}} ag:+   +]
   x ExpressionAggregationArg, y ExpressionAggregationArg, more (rest ExpressionAggregationArg))
@@ -519,27 +538,39 @@
   "Schema for a valid value for the `:source-table` clause of an MBQL query."
   (s/cond-pre su/IntGreaterThanZero source-table-card-id-regex))
 
+(defn- distinct-non-empty [schema]
+  (s/constrained schema (every-pred (partial apply distinct?) seq) "non-empty sequence of distinct items"))
+
 (def MBQLQuery
   "Schema for a valid, normalized MBQL [inner] query."
-  (s/constrained
+  (->
    {(s/optional-key :source-query) SourceQuery
     (s/optional-key :source-table) SourceTable
     (s/optional-key :aggregation)  (su/non-empty [Aggregation])
     (s/optional-key :breakout)     (su/non-empty [Field])
-    (s/optional-key :expressions)  {s/Keyword ExpressionDef} ; TODO - I think expressions keys should be strings
+    ; TODO - expressions keys should be strings; fix this when we get a chance
+    (s/optional-key :expressions)  {s/Keyword ExpressionDef}
+    ;; TODO - should this be `distinct-non-empty`?
     (s/optional-key :fields)       (su/non-empty [Field])
     (s/optional-key :filter)       Filter
     (s/optional-key :limit)        su/IntGreaterThanZero
-    (s/optional-key :order-by)     (su/non-empty [OrderBy])
+    (s/optional-key :order-by)     (distinct-non-empty [OrderBy])
     (s/optional-key :page)         {:page  su/IntGreaterThanOrEqualToZero
                                     :items su/IntGreaterThanZero}
     ;; Various bits of middleware add additonal keys, such as `fields-is-implicit?`, to record bits of state or pass
     ;; info to other pieces of middleware. Everyone else can ignore them.
     (s/optional-key :join-tables)  (s/constrained [JoinInfo] (partial apply distinct?) "distinct JoinInfo")
     s/Keyword                      s/Any}
-   (fn [query]
-     (core/= 1 (core/count (select-keys query [:source-query :source-table]))))
-   "Query must specify either `:source-table` or `:source-query`, but not both."))
+
+   (s/constrained
+    (fn [query]
+      (core/= 1 (core/count (select-keys query [:source-query :source-table]))))
+    "Query must specify either `:source-table` or `:source-query`, but not both.")
+
+   (s/constrained
+    (fn [{:keys [breakout fields]}]
+      (empty? (set/intersection (set breakout) (set fields))))
+    "Fields specified in `:breakout` should not be specified in `:fields`; this is implied.")))
 
 
 ;;; ----------------------------------------------------- Params -----------------------------------------------------
@@ -579,6 +610,10 @@
    ;; should we skip converting datetime types to ISO-8601 strings with appropriate timezone when post-processing
    ;; results? Used by `metabase.query-processor.middleware.format-rows`; default `false`
    (s/optional-key :format-rows?)           s/Bool
+   ;; disable the MBQL->native middleware. If you do this, the query will not work at all, so there are no cases where
+   ;; you should set this yourself. This is only used by the `qp/query->preprocessed` function to get the fully
+   ;; pre-processed query without attempting to convert it to native.
+   (s/optional-key :disable-mbql->native?)  s/Bool
    ;; other middleware options might be used somewhere, but I don't know about them. Add them if you come across them
    ;; for documentation purposes
    s/Keyword                                s/Any})
diff --git a/src/metabase/mbql/util.clj b/src/metabase/mbql/util.clj
index ba31f6f9098f2d9e7d702f181455dc22746e7b2b..d6fac37a89167abc013947a23bfb8bac474f7c12 100644
--- a/src/metabase/mbql/util.clj
+++ b/src/metabase/mbql/util.clj
@@ -1,9 +1,7 @@
 (ns metabase.mbql.util
   "Utilitiy functions for working with MBQL queries."
   (:refer-clojure :exclude [replace])
-  (:require [clojure
-             [string :as str]
-             [walk :as walk]]
+  (:require [clojure.string :as str]
             [metabase.mbql.schema :as mbql.s]
             [metabase.mbql.util.match :as mbql.match]
             [metabase.util :as u]
@@ -166,45 +164,11 @@
        form#
        (update-in form# ks# #(mbql.match/replace % ~patterns-and-results)))))
 
-(defn ^:deprecated clause-instances
-  "DEPRECATED: use `match` instead."
-  {:style/indent 1}
-  [k-or-ks x & {:keys [include-subclauses?], :or {include-subclauses? false}}]
-  (let [instances (atom [])]
-    (walk/prewalk
-     (fn [clause]
-       (if (is-clause? k-or-ks clause)
-         (do (swap! instances conj clause)
-             (when include-subclauses?
-               clause))
-         clause))
-     x)
-    (seq @instances)))
-
-(defn ^:deprecated replace-clauses
-  "DEPRECATED: use `replace` instead."
-  {:style/indent 2}
-  [query k-or-ks f]
-  (walk/postwalk
-   (fn [clause]
-     (if (is-clause? k-or-ks clause)
-       (f clause)
-       clause))
-   query))
-
-(defn ^:deprecated replace-clauses-in
-  "DEPRECATED: use `replace-in` instead!"
-  {:style/indent 3}
-  [query keypath k-or-ks f]
-  (update-in query keypath #(replace-clauses % k-or-ks f)))
-
 
 ;;; +----------------------------------------------------------------------------------------------------------------+
 ;;; |                                       Functions for manipulating queries                                       |
 ;;; +----------------------------------------------------------------------------------------------------------------+
 
-;; TODO - I think we actually should move this stuff into a `mbql.helpers` namespace so we can use the util functions
-;; above in the `schema.helpers` namespace instead of duplicating them
 (defn- combine-compound-filters-of-type [compound-type subclauses]
 
   (mapcat #(match-one %
@@ -218,19 +182,28 @@
 (s/defn simplify-compound-filter :- (s/maybe mbql.s/Filter)
   "Simplify compound `:and`, `:or`, and `:not` compound filters, combining or eliminating them where possible. This
   also fixes theoretically disallowed compound filters like `:and` with only a single subclause, and eliminates `nils`
-  from the clauses."
+  and duplicate subclauses from the clauses."
   [filter-clause]
   (replace filter-clause
     seq? (recur (vec &match))
 
     ;; if this an an empty filter, toss it
-    nil              nil
-    []               nil
-    [(:or :and :or)] nil
+    nil                                  nil
+    [& (_ :guard (partial every? nil?))] nil
+    []                                   nil
+    [(:or :and :or)]                     nil
+
+    ;; if the COMPOUND clause contains any nils, toss them
+    [(clause-name :guard #{:and :or}) & (args :guard (partial some nil?))]
+    (recur (apply vector clause-name (filterv some? args)))
 
-    ;; if the clause contains any nils, toss them
-    [& (args :guard (partial some nil?))]
-    (recur (filterv some? args))
+    ;; Rewrite a `:not` over `:and` using de Morgan's law
+    [:not [:and & args]]
+    (recur (apply vector :or (map #(vector :not %) args)))
+
+    ;; Rewrite a `:not` over `:or` using de Morgan's law
+    [:not [:or & args]]
+    (recur (apply vector :and (map #(vector :not %) args)))
 
     ;; for `and` or `not` compound filters with only one subclase, just unnest the subclause
     [(:or :and :or) arg] (recur arg)
@@ -352,7 +325,7 @@
     (let [[_ original-n unit] absolute-or-relative-datetime]
       [:relative-datetime (+ n original-n) unit])
     (let [[_ timestamp unit] absolute-or-relative-datetime]
-      (du/relative-date unit n timestamp))))
+      [:absolute-datetime (du/relative-date unit n timestamp) unit])))
 
 
 (defn dispatch-by-clause-name-or-class
@@ -417,3 +390,57 @@
   [field]
   (or (isa? (:base_type field)    :type/DateTime)
       (isa? (:special_type field) :type/DateTime)))
+
+
+;;; --------------------------------- Unique names & transforming ags to have names ----------------------------------
+
+(s/defn uniquify-names :- (s/constrained [s/Str] distinct? "sequence of unique strings")
+  "Make the names in a sequence of string names unique by adding suffixes such as `_2`.
+
+     (uniquify-names [\"count\" \"sum\" \"count\" \"count_2\"])
+     ;; -> [\"count\" \"sum\" \"count_2\" \"count_2_2\"]"
+  [names :- [s/Str]]
+  (let [aliases     (atom {})
+        unique-name (fn [original-name]
+                      (let [total-count (get (swap! aliases update original-name #(if % (inc %) 1))
+                                             original-name)]
+                        (if (= total-count 1)
+                          original-name
+                          (recur (str original-name \_ total-count)))))]
+    (map unique-name names)))
+
+(def ^:private NamedAggregationsWithUniqueNames
+  (s/constrained [mbql.s/named] #(distinct? (map last %)) "sequence of named aggregations with unique names"))
+
+(s/defn uniquify-named-aggregations :- NamedAggregationsWithUniqueNames
+  "Make the names of a sequence of named aggregations unique by adding suffixes such as `_2`."
+  [named-aggregations :- [mbql.s/named]]
+  (map (fn [[_ ag] unique-name]
+         [:named ag unique-name])
+       named-aggregations
+       (uniquify-names (map last named-aggregations))))
+
+(s/defn pre-alias-aggregations :- [mbql.s/named]
+  "Wrap every aggregation clause in a `:named` clause, using the name returned by `(aggregation->name-fn ag-clause)`
+  as names for any clauses that are not already wrapped in `:name`.
+
+    (pre-alias-aggregations annotate/aggregation-name [[:count] [:count] [:named [:sum] \"Sum-41\"]])
+    ;; -> [[:named [:count] \"count\"]
+           [:named [:count] \"count\"]
+           [:named [:sum [:field-id 1]] \"Sum-41\"]]
+
+  Most often, `aggregation->name-fn` will be something like `annotate/aggregation-name`, but for purposes of keeping
+  the `metabase.mbql` module seperate from the `metabase.query-processor` code we'll let you pass that in yourself."
+  {:style/indent 1}
+  [aggregation->name-fn :- (s/pred fn?), aggregations :- [mbql.s/Aggregation]]
+  (replace aggregations
+    [:named ag ag-name]       [:named ag ag-name]
+    [(_ :guard keyword?) & _] [:named &match (aggregation->name-fn &match)]))
+
+(s/defn pre-alias-and-uniquify-aggregations :- NamedAggregationsWithUniqueNames
+  "Wrap every aggregation clause in a `:named` clause with a unique name. Combines `pre-alias-aggregations` with
+  `uniquify-named-aggregations`."
+  {:style/indent 1}
+  [aggregation->name-fn :- (s/pred fn?), aggregations :- [mbql.s/Aggregation]]
+  (-> (pre-alias-aggregations aggregation->name-fn aggregations)
+      uniquify-named-aggregations))
diff --git a/src/metabase/models/card.clj b/src/metabase/models/card.clj
index 90b418dc9ccb0781cefa577f63b745f154ec50c5..9adf79213b13774bd52a1a615b6a4ec3de6f5754 100644
--- a/src/metabase/models/card.clj
+++ b/src/metabase/models/card.clj
@@ -41,8 +41,10 @@
 (defn- extract-ids
   "Get all the Segment or Metric IDs referenced by a query."
   [segment-or-metric query]
-  (set (for [[_ id] (mbql.u/clause-instances segment-or-metric query)]
-         id)))
+  (set
+   (case segment-or-metric
+     :segment (mbql.u/match query [:segment id] id)
+     :metric  (mbql.u/match query [:metric  id] id))))
 
 (defn card-dependencies
   "Calculate any dependent objects for a given `card`."
diff --git a/src/metabase/models/dashboard.clj b/src/metabase/models/dashboard.clj
index 6de3818456dbdc4c7a8e3948471965bd73e09ebc..39937027bbd8db628795311ae057acb9f92ef854 100644
--- a/src/metabase/models/dashboard.clj
+++ b/src/metabase/models/dashboard.clj
@@ -21,6 +21,7 @@
              [revision :as revision]]
             [metabase.models.revision.diff :refer [build-sentence]]
             [metabase.query-processor.interface :as qpi]
+            [metabase.util.i18n :as ui18n]
             [toucan
              [db :as db]
              [hydrate :refer [hydrate]]
@@ -247,8 +248,8 @@
   [collection-name parent-collection-id]
   (let [c (db/count 'Collection
             :name     [:like (format "%s%%" collection-name)]
-            :location  (collection/children-location  (db/select-one ['Collection :location :id]
-                                                        :id parent-collection-id)))]
+            :location (collection/children-location (db/select-one ['Collection :location :id]
+                                                      :id parent-collection-id)))]
     (if (zero? c)
       collection-name
       (format "%s %s" collection-name (inc c)))))
@@ -256,7 +257,8 @@
 (defn save-transient-dashboard!
   "Save a denormalized description of `dashboard`."
   [dashboard parent-collection-id]
-  (let [dashcards  (:ordered_cards dashboard)
+  (let [dashboard  (ui18n/localized-strings->strings dashboard)
+        dashcards  (:ordered_cards dashboard)
         collection (magic.populate/create-collection!
                     (ensure-unique-collection-name (:name dashboard) parent-collection-id)
                     (rand-nth magic.populate/colors)
diff --git a/src/metabase/models/field_values.clj b/src/metabase/models/field_values.clj
index fa2daf6528551137015cd2a7b34bc705a9b21a61..ebf118f44943576099cd58f72f83d4e9c9c72aa5 100644
--- a/src/metabase/models/field_values.clj
+++ b/src/metabase/models/field_values.clj
@@ -114,6 +114,7 @@
                   (trs "Switching Field to use a search widget instead."))
         (db/update! 'Field (u/get-id field) :has_field_values nil)
         (db/delete! FieldValues :field_id (u/get-id field)))
+
       ;; if the FieldValues object already exists then update values in it
       (and field-values values)
       (do
@@ -122,6 +123,7 @@
           :values                values
           :human_readable_values (fixup-human-readable-values field-values values))
         ::fv-updated)
+
       ;; if FieldValues object doesn't exist create one
       values
       (do
@@ -131,6 +133,7 @@
           :values                values
           :human_readable_values human-readable-values)
         ::fv-created)
+
       ;; otherwise this Field isn't eligible, so delete any FieldValues that might exist
       :else
       (do
diff --git a/src/metabase/models/metric.clj b/src/metabase/models/metric.clj
index 7e3e5f1cc327c55a67ca875f8f762a3f5ebb9e98..bcdb2df54c6597d97d00f4ef8bd0102d803640c8 100644
--- a/src/metabase/models/metric.clj
+++ b/src/metabase/models/metric.clj
@@ -72,8 +72,7 @@
   "Calculate any dependent objects for a given Metric."
   [_ _ {:keys [definition]}]
   (when definition
-    {:Segment (set (for [[_ id] (mbql.u/clause-instances :segment definition)]
-                     id))}))
+    {:Segment (set (mbql.u/match definition [:segment id] id))}))
 
 (u/strict-extend (class Metric)
   dependency/IDependent
diff --git a/src/metabase/models/params.clj b/src/metabase/models/params.clj
index c31acbc26fe10b82b5ef19c1985ebac0b7c960d3..ce25a08cb156044404b7c2551ff12715996f4c68 100644
--- a/src/metabase/models/params.clj
+++ b/src/metabase/models/params.clj
@@ -4,12 +4,10 @@
             [metabase
              [db :as mdb]
              [util :as u]]
-            [metabase.query-processor.middleware.expand :as ql]
+            [metabase.mbql.util :as mbql.u]
             [toucan
              [db :as db]
-             [hydrate :refer [hydrate]]]
-            [metabase.mbql.util :as mbql.u])
-  (:import metabase.query_processor.interface.FieldPlaceholder))
+             [hydrate :refer [hydrate]]]))
 
 ;;; +----------------------------------------------------------------------------------------------------------------+
 ;;; |                                                     SHARED                                                     |
@@ -65,9 +63,9 @@
   "Parse a Card parameter TARGET form, which looks something like `[:dimension [:field-id 100]]`, and return the Field
   ID it references (if any)."
   [target dashcard]
-  (when (ql/is-clause? :dimension target)
+  (when (mbql.u/is-clause? :dimension target)
     (let [[_ dimension] target]
-      (field-form->id (if (ql/is-clause? :template-tag dimension)
+      (field-form->id (if (mbql.u/is-clause? :template-tag dimension)
                         (template-tag->field-form dimension dashcard)
                         dimension)))))
 
@@ -145,7 +143,7 @@
 
 (defn- param-field-ids->fields
   "Get the Fields (as a map of Field ID -> Field) that shoudl be returned for hydrated `:param_fields` for a Card or
-  Dashboard. These only contain the minimal amount of information neccesary needed to power public or embedded
+  Dashboard. These only contain the minimal amount of information necessary needed to power public or embedded
   parameter widgets."
   [field-ids]
   (when (seq field-ids)
diff --git a/src/metabase/models/permissions.clj b/src/metabase/models/permissions.clj
index ad9331b2ce24867a80e1749539eec0d4ec285c80..d086491ee802480342093514f6f24aa4c0217573 100644
--- a/src/metabase/models/permissions.clj
+++ b/src/metabase/models/permissions.clj
@@ -581,7 +581,7 @@
                      (u/pprint-to-str 'blue new))))
 
 (s/defn update-graph!
-  "Update the permissions graph, making any changes neccesary to make it match NEW-GRAPH.
+  "Update the permissions graph, making any changes necessary to make it match NEW-GRAPH.
    This should take in a graph that is exactly the same as the one obtained by `graph` with any changes made as
    needed. The graph is revisioned, so if it has been updated by a third party since you fetched it this function will
    fail and return a 409 (Conflict) exception. If nothing needs to be done, this function returns `nil`; otherwise it
diff --git a/src/metabase/models/query/permissions.clj b/src/metabase/models/query/permissions.clj
index 7b68dc1b750b8406084e4fa3d80a28e95d9bf084..7ca9bb67fdba63e91afdc9fd239772a95c12100a 100644
--- a/src/metabase/models/query/permissions.clj
+++ b/src/metabase/models/query/permissions.clj
@@ -3,6 +3,7 @@
   only thing that is subject to these sorts of checks are *ad-hoc* queries, i.e. queries that have not yet been saved
   as a Card. Saved Cards are subject to the permissions of the Collection to which they belong."
   (:require [clojure.tools.logging :as log]
+            [metabase.api.common :as api]
             [metabase.models
              [interface :as i]
              [permissions :as perms]
@@ -42,11 +43,11 @@
     ;; if we have a source-query just recur until we hit either the native source or the MBQL source
     source-query (recur source-query)
     ;; for root MBQL queries just return source-table + join-tables
-    :else        (cons source-table join-tables)))
+    :else        (cons source-table (map :table-id join-tables))))
 
 (s/defn ^:private tables->permissions-path-set :- #{perms/ObjectPath}
   "Given a sequence of `tables` referenced by a query, return a set of required permissions."
-  [database-or-id tables]
+  [database-or-id :- (s/cond-pre su/IntGreaterThanZero su/Map) tables]
   (let [table-ids        (filter integer? tables)
         table-id->schema (when (seq table-ids)
                            (db/select-id->field :schema Table :id [:in table-ids]))]
@@ -57,6 +58,7 @@
              (perms/adhoc-native-query-path database-or-id)
 
              ;; If Table is an ID then fetch its schema from the DB and require normal table perms
+             ;; TODO - we should check and see if Table is in the QP store here so we don't do the extra fetch
              (integer? table)
              (perms/object-path (u/get-id database-or-id) (table-id->schema table) table)
 
@@ -64,6 +66,7 @@
              :else
              (perms/object-path (u/get-id database-or-id)
                                 (:schema table)
+                                ;; TODO - don't think we use `:table-id` anywhere anymore
                                 (or (:id table) (:table-id table))))))))
 
 (s/defn ^:private source-card-read-perms :- #{perms/ObjectPath}
@@ -74,10 +77,12 @@
                            (throw (Exception. (str (tru "Card {0} does not exist." source-card-id)))))
                        :read))
 
-(defn- expand-query-if-needed [query]
-  (if (map? (:database query))
-    query
-    ((resolve 'metabase.query-processor/expand) query)))
+(defn- preprocess-query [query]
+  ;; ignore the current user for the purposes of calculating the permissions required to run the query. Don't want the
+  ;; preprocessing to fail because current user doesn't have permissions to run it when we're not trying to run it at
+  ;; all
+  (binding [api/*current-user-id* nil]
+    ((resolve 'metabase.query-processor/query->preprocessed) query)))
 
 ;; TODO - not sure how we can prevent circular source Cards if source Cards permissions are just collection perms now???
 (s/defn ^:private mbql-permissions-path-set :- #{perms/ObjectPath}
@@ -87,13 +92,15 @@
   things when a single Card is busted (e.g. API endpoints that filter out unreadable Cards) and instead returns 'only
   admins can see this' permissions -- `#{\"db/0\"}` (DB 0 will never exist, thus normal users will never be able to
   get permissions for it, but admins have root perms and will still get to see (and hopefully fix) it)."
-  [query :- {:query su/Map, s/Keyword s/Any} & [throw-exceptions? :- (s/maybe (s/eq :throw-exceptions))]]
+  [query :- {:query su/Map, s/Keyword s/Any} & [throw-exceptions?     :- (s/maybe (s/eq :throw-exceptions))
+                                                already-preprocessed? :- (s/maybe (s/eq :already-preprocessed))]]
   (try
     ;; if we are using a Card as our perms are that Card's (i.e. that Card's Collection's) read perms
     (if-let [source-card-id (qputil/query->source-card-id query)]
       (source-card-read-perms source-card-id)
       ;; otherwise if there's no source card then calculate perms based on the Tables referenced in the query
-      (let [{:keys [query database]} (expand-query-if-needed query)]
+      (let [{:keys [query database]} (cond-> query
+                                       (not already-preprocessed?) preprocess-query)]
         (tables->permissions-path-set database (query->source-and-join-tables query))))
     ;; if for some reason we can't expand the Card (i.e. it's an invalid legacy card) just return a set of permissions
     ;; that means no one will ever get to see it (except for superusers who get to see everything)
@@ -107,11 +114,12 @@
 
 (s/defn perms-set :- #{perms/ObjectPath}
   "Calculate the set of permissions required to run an ad-hoc `query`."
-  {:arglists '([outer-query & [throw-exceptions?]])}
-  ;; TODO - I think we can remove the two optional params because nothing uses them anymore
-  [{query-type :type, database :database, :as query} & [throw-exceptions? :- (s/maybe (s/eq :throw-exceptions))]]
+  {:arglists '([outer-query & [throw-exceptions? already-preprocessed?]])}
+  ;; TODO - I think we can remove the `throw-exceptions?` optional param because nothing uses it anymore
+  [{query-type :type, database :database, :as query} & [throw-exceptions?     :- (s/maybe (s/eq :throw-exceptions))
+                                                        already-preprocessed? :- (s/maybe (s/eq :already-preprocessed))]]
   (cond
     (empty? query)                   #{}
+    (= (keyword query-type) :query)  (mbql-permissions-path-set query throw-exceptions? already-preprocessed?)
     (= (keyword query-type) :native) #{(perms/adhoc-native-query-path database)}
-    (= (keyword query-type) :query)  (mbql-permissions-path-set query throw-exceptions?)
     :else                            (throw (Exception. (str (tru "Invalid query type: {0}" query-type))))))
diff --git a/src/metabase/pulse/render.clj b/src/metabase/pulse/render.clj
index c720b8166fa77d98fb4aa461ae3d21f12397d642..b29a91f16d69f9e86b6544980de328f1467c6713 100644
--- a/src/metabase/pulse/render.clj
+++ b/src/metabase/pulse/render.clj
@@ -10,6 +10,7 @@
             [hiccup
              [core :refer [h html]]
              [util :as hutil]]
+            [metabase.mbql.util :as mbql.u]
             [metabase.pulse.color :as color]
             [metabase.util :as u]
             [metabase.util
@@ -124,11 +125,6 @@
    (or (ui-logic/y-axis-rowfn card data)
        second)])
 
-(defn- datetime-field?
-  [field]
-  (or (isa? (:base_type field)    :type/DateTime)
-      (isa? (:special_type field) :type/DateTime)))
-
 (defn- number-field?
   [field]
   (or (isa? (:base_type field)    :type/Number)
@@ -152,7 +148,7 @@
            (= row-count 1))                                        :scalar
       (and (= col-count 2)
            (> row-count 1)
-           (datetime-field? col-1)
+           (mbql.u/datetime-field? col-1)
            (number-field? col-2))                                  :sparkline
       (and (= col-count 2)
            (number-field? col-2))                                  :bar
@@ -264,9 +260,9 @@
 (defn- format-cell
   [timezone value col]
   (cond
-    (datetime-field? col) (format-timestamp timezone value col)
-    (and (number? value) (not (datetime-field? col))) (format-number value)
-    :else (str value)))
+    (mbql.u/datetime-field? col)                             (format-timestamp timezone value col)
+    (and (number? value) (not (mbql.u/datetime-field? col))) (format-number value)
+    :else                                                    (str value)))
 
 (defn- render-img-data-uri
   "Takes a PNG byte array and returns a Base64 encoded URI"
@@ -653,7 +649,7 @@
 (s/defn ^:private render:sparkline :- RenderedPulseCard
   [render-type timezone card {:keys [rows cols] :as data}]
   (let [[x-axis-rowfn y-axis-rowfn] (graphing-columns card data)
-        ft-row (if (datetime-field? (x-axis-rowfn cols))
+        ft-row (if (mbql.u/datetime-field? (x-axis-rowfn cols))
                  #(.getTime ^Date (du/->Timestamp % timezone))
                  identity)
         rows   (non-nil-rows x-axis-rowfn y-axis-rowfn
@@ -680,7 +676,7 @@
                     (image-bundle->attachment image-bundle))
      :content     [:div
                    [:img {:style (style {:display :block
-                                         :width :100%})
+                                         :width   :100%})
                           :src   (:image-src image-bundle)}]
                    [:table
                     [:tr
@@ -774,11 +770,11 @@
       (:include_xls card)))
 
 (s/defn ^:private render-pulse-card-body :- RenderedPulseCard
-  [render-type timezone card {:keys [data error]}]
+  [render-type timezone card {:keys [data error], :as results}]
   (try
     (when error
       (let [^String msg (str (tru "Card has errors: {0}" error))]
-        (throw (Exception. msg))))
+        (throw (ex-info msg results))))
     (case (detect-pulse-card-type card data)
       :empty     (render:empty     render-type card data)
       :scalar    (render:scalar    timezone card data)
diff --git a/src/metabase/query_processor.clj b/src/metabase/query_processor.clj
index ec4bc013002628c8b51ba1abd62b7c50ce6d2066..4589eaed64c6e1c89b5e331402ac695f303a87cd 100644
--- a/src/metabase/query_processor.clj
+++ b/src/metabase/query_processor.clj
@@ -2,6 +2,7 @@
   "Preprocessor that does simple transformations to all incoming queries, simplifing the driver-specific
   implementations."
   (:require [clojure.tools.logging :as log]
+            [medley.core :as m]
             [metabase
              [driver :as driver]
              [util :as u]]
@@ -12,18 +13,20 @@
             [metabase.query-processor.middleware
              [add-dimension-projections :as add-dim]
              [add-implicit-clauses :as implicit-clauses]
+             [add-query-throttle :as query-throttle]
              [add-row-count-and-status :as row-count-and-status]
              [add-settings :as add-settings]
-             [annotate-and-sort :as annotate-and-sort]
+             [annotate :as annotate]
              [auto-bucket-datetime-breakouts :as bucket-datetime]
              [bind-effective-timezone :as bind-timezone]
              [binning :as binning]
              [cache :as cache]
              [catch-exceptions :as catch-exceptions]
+             [check-features :as check-features]
              [cumulative-aggregations :as cumulative-ags]
+             [desugar :as desugar]
              [dev :as dev]
              [driver-specific :as driver-specific]
-             [expand :as expand]
              [expand-macros :as expand-macros]
              [fetch-source-query :as fetch-source-query]
              [format-rows :as format-rows]
@@ -33,14 +36,16 @@
              [normalize-query :as normalize]
              [parameters :as parameters]
              [permissions :as perms]
-             [add-query-throttle :as query-throttle]
-             [resolve :as resolve]
+             [reconcile-breakout-and-order-by-bucketing :as reconcile-bucketing]
+             [resolve-database :as resolve-database]
              [resolve-driver :as resolve-driver]
              [resolve-fields :as resolve-fields]
+             [resolve-joined-tables :as resolve-joined-tables]
+             [resolve-source-table :as resolve-source-table]
              [results-metadata :as results-metadata]
-             [source-table :as source-table]
              [store :as store]
-             [validate :as validate]]
+             [validate :as validate]
+             [wrap-value-literals :as wrap-value-literals]]
             [metabase.query-processor.util :as qputil]
             [metabase.util
              [date :as du]
@@ -94,30 +99,43 @@
   [f]
   ;; ▼▼▼ POST-PROCESSING ▼▼▼  happens from TOP-TO-BOTTOM, e.g. the results of `f` are (eventually) passed to `limit`
   (-> f
-      dev/guard-multiple-calls
-      mbql-to-native/mbql->native                      ; ▲▲▲ NATIVE-ONLY POINT ▲▲▲ Query converted from MBQL to native here; all functions *above* will only see the native query
-      annotate-and-sort/annotate-and-sort
+      ;; ▲▲▲ NATIVE-ONLY POINT ▲▲▲ Query converted from MBQL to native here; f will see a native query instead of MBQL
+      mbql-to-native/mbql->native
+      ;; TODO - should we log the fully preprocessed query here?
+      check-features/check-features
+      wrap-value-literals/wrap-value-literals
+      annotate/add-column-info
       perms/check-query-permissions
+      resolve-joined-tables/resolve-joined-tables
       dev/check-results-format
       limit/limit
       cumulative-ags/handle-cumulative-aggregations
       results-metadata/record-and-return-metadata!
       format-rows/format-rows
-      resolve/resolve-middleware
-      expand/expand-middleware                         ; ▲▲▲ QUERY EXPANSION POINT  ▲▲▲ All functions *above* will see EXPANDED query during PRE-PROCESSING
+      desugar/desugar
       binning/update-binning-strategy
       resolve-fields/resolve-fields
       add-dim/add-remapping
       implicit-clauses/add-implicit-clauses
+      reconcile-bucketing/reconcile-breakout-and-order-by-bucketing
       bucket-datetime/auto-bucket-datetime-breakouts
-      source-table/resolve-source-table-middleware
-      row-count-and-status/add-row-count-and-status    ; ▼▼▼ RESULTS WRAPPING POINT ▼▼▼ All functions *below* will see results WRAPPED in `:data` during POST-PROCESSING
+      resolve-source-table/resolve-source-table
+      row-count-and-status/add-row-count-and-status
+      ;; ▼▼▼ RESULTS WRAPPING POINT ▼▼▼ All functions *below* will see results WRAPPED in `:data` during POST-PROCESSING
+      ;;
+      ;; TODO - I think we should add row count and status much later, perhaps at the very end right before
+      ;; `catch-exceptions`
       parameters/substitute-parameters
       expand-macros/expand-macros
-      driver-specific/process-query-in-context         ; (drivers can inject custom middleware if they implement IDriver's `process-query-in-context`)
+      ;; (drivers can inject custom middleware if they implement IDriver's `process-query-in-context`)
+      driver-specific/process-query-in-context
       add-settings/add-settings
-      resolve-driver/resolve-driver                    ; ▲▲▲ DRIVER RESOLUTION POINT ▲▲▲ All functions *above* will have access to the driver during PRE- *and* POST-PROCESSING
+      ;; ▲▲▲ DRIVER RESOLUTION POINT ▲▲▲
+      ;; All functions *above* will have access to the driver during PRE- *and* POST-PROCESSING
+      ;; TODO - I think we should do this much earlier
+      resolve-driver/resolve-driver
       bind-timezone/bind-effective-timezone
+      resolve-database/resolve-database
       fetch-source-query/fetch-source-query
       store/initialize-store
       query-throttle/maybe-add-query-throttle
@@ -131,39 +149,68 @@
 ;; ▲▲▲ PRE-PROCESSING ▲▲▲ happens from BOTTOM-TO-TOP, e.g. the results of `expand-macros` are passed to
 ;; `substitute-parameters`
 
+(def ^:private ^{:arglists '([query])} preprocess
+  "Run all the preprocessing steps on a query, returning it in the shape it looks immediately before it would normally
+  get executed by `execute-query`. One important thing to note: if preprocessing fails for some reason, `preprocess`
+  will throw an Exception, unlike `process-query`. Why? Preprocessing is something we use internally, so wrapping
+  catching Exceptions and wrapping them in frontend results format doesn't make sense.
+
+  (NOTE: Don't use this directly. You either want `query->preprocessed` (for the fully preprocessed query) or
+  `query->native` for the native form.)"
+  ;; throwing pre-allocated exceptions can actually get optimized away into long jumps by the JVM, let's give it a
+  ;; chance to happen here
+  (let [quit-early-exception (Exception.)
+        ;; the 'pivoting' function is just one that delivers the query in its current state into the promise we
+        ;; conveniently attached to the query. Then it quits early by throwing our pre-allocated Exception...
+        deliver-native-query
+        (fn [{:keys [results-promise] :as query}]
+          (deliver results-promise (dissoc query :results-promise))
+          (throw quit-early-exception))
+
+        ;; ...which ends up getting caught by the `catch-exceptions` middleware. Add a final post-processing function
+        ;; around that which will return whatever we delivered into the `:results-promise`.
+        recieve-native-query
+        (fn [qp]
+          (fn [query]
+            (let [results-promise (promise)
+                  results         (qp (assoc query :results-promise results-promise))]
+              (if (realized? results-promise)
+                @results-promise
+                ;; if the results promise was never delivered, it means we never made it all the way to the
+                ;; `deliver-native-query` portion of the QP pipeline; the results will thus be a failure message from
+                ;; our `catch-exceptions` middleware. In 99.9% of cases we probably want to know right away that the
+                ;; query failed instead of giving people a failure response and trying to get results from that. So do
+                ;; everyone a favor and throw an Exception
+                (let [results (m/dissoc-in results [:query :results-promise])]
+                  (throw (ex-info (str (tru "Error preprocessing query")) results)))))))]
+    (recieve-native-query (qp-pipeline deliver-native-query))))
+
+(defn query->preprocessed
+  "Return the fully preprocessed form for `query`, the way it would look immediately before `mbql->native` is called.
+  Especially helpful for debugging or testing driver QP implementations."
+  {:style/indent 0}
+  [query]
+  (-> (update query :middleware assoc :disable-mbql->native? true)
+      preprocess
+      (m/dissoc-in [:middleware :disable-mbql->native?])))
+
 (defn query->native
   "Return the native form for QUERY (e.g. for a MBQL query on Postgres this would return a map containing the compiled
-  SQL form)."
+  SQL form). (Like `preprocess`, this function will throw an Exception if preprocessing was not successful.)"
   {:style/indent 0}
   [query]
-  (let [results ((qp-pipeline identity) query)]
-    (or (get-in results [:data :native_form])
-        (throw (ex-info "No native form returned."
-                 results)))))
+  (let [results (preprocess query)]
+    (or (get results :native)
+        (throw (ex-info (str (tru "No native form returned."))
+                 (or results {}))))))
+
+(def ^:private default-pipeline (qp-pipeline execute-query))
 
 (defn process-query
   "A pipeline of various QP functions (including middleware) that are used to process MB queries."
   {:style/indent 0}
   [query]
-  ((qp-pipeline execute-query) query))
-
-(def ^{:arglists '([query])} expand
-  "Expand a QUERY the same way it would normally be done as part of query processing.
-   This is useful for things that need to look at an expanded query, such as permissions checking for Cards."
-  (->> identity
-       resolve/resolve-middleware
-       expand/expand-middleware
-       source-table/resolve-source-table-middleware
-       parameters/substitute-parameters
-       expand-macros/expand-macros
-       driver-specific/process-query-in-context
-       resolve-driver/resolve-driver
-       fetch-source-query/fetch-source-query
-       bind-timezone/bind-effective-timezone
-       validate/validate-query
-       normalize/normalize))
-;; ▲▲▲ This only does PRE-PROCESSING, so it happens from bottom to top, eventually returning the preprocessed query
-;; instead of running it
+  (default-pipeline query))
 
 
 ;;; +----------------------------------------------------------------------------------------------------------------+
@@ -186,21 +233,26 @@
 
 (defn- save-and-return-failed-query!
   "Save QueryExecution state and construct a failed query response"
-  [query-execution error-message]
+  [query-execution, ^Throwable e]
   ;; record our query execution and format response
   (-> query-execution
       (dissoc :start_time_millis)
-      (merge {:error        error-message
+      (merge {:error        (.getMessage e)
               :running_time (- (System/currentTimeMillis) (:start_time_millis query-execution))})
       save-query-execution!
       (dissoc :result_rows :hash :executor_id :native :card_id :dashboard_id :pulse_id)
       ;; this is just for the response for client
       (assoc :status    :failed
-             :error     error-message
+             :error     (.getMessage e)
              :row_count 0
              :data      {:rows    []
                          :cols    []
-                         :columns []})))
+                         :columns []})
+      ;; include stacktrace and preprocessed/native stages of the query if available in the response which should make
+      ;; debugging queries a bit easier
+      (merge (some-> (ex-data e)
+                     (select-keys [:stacktrace :preprocessed :native])
+                     (m/dissoc-in [:preprocessed :info])))))
 
 (defn- save-and-return-successful-query!
   "Save QueryExecution state and construct a completed (successful) query response"
@@ -226,10 +278,12 @@
   "Make sure QUERY-RESULT `:status` is something other than `nil`or `:failed`, or throw an Exception."
   [query-result]
   (when-not (contains? query-result :status)
-    (throw (Exception. "invalid response from database driver. no :status provided")))
+    (throw (ex-info (str (tru "Invalid response from database driver. No :status provided."))
+             query-result)))
   (when (= :failed (:status query-result))
     (log/warn (u/pprint-to-str 'red query-result))
-    (throw (Exception. (str (get query-result :error "general error"))))))
+    (throw (ex-info (str (get query-result :error (tru "General error")))
+             query-result))))
 
 (def ^:dynamic ^Boolean *allow-queries-with-no-executor-id*
   "Should we allow running queries (via `dataset-query`) without specifying the `executed-by` User ID?  By default
@@ -271,7 +325,7 @@
             (log/warn (u/format-color 'red "Query failure: %s\n%s"
                                       (.getMessage e)
                                       (u/pprint-to-str (u/filtered-stacktrace e))))
-            (save-and-return-failed-query! query-execution (.getMessage e))))))))
+            (save-and-return-failed-query! query-execution e)))))))
 
 ;; TODO - couldn't saving the query execution be done by MIDDLEWARE?
 (s/defn process-query-and-save-execution!
diff --git a/src/metabase/query_processor/annotate.clj b/src/metabase/query_processor/annotate.clj
deleted file mode 100644
index e844b44d74443024a31476903b1236293c57f26d..0000000000000000000000000000000000000000
--- a/src/metabase/query_processor/annotate.clj
+++ /dev/null
@@ -1,394 +0,0 @@
-(ns metabase.query-processor.annotate
-  "Code that analyzes the results of running a query and adds relevant type information about results (including
-  foreign key information). This also does things like taking lisp-case keys used in the QP and converting them back
-  to snake_case ones used in the frontend.
-
-  TODO - The code in this namespace could definitely use a little cleanup to make it a little easier to wrap one's
-         head around :)
-
-  TODO - This namespace should be called something like `metabase.query-processor.middleware.annotate`"
-  (:require [clojure
-             [set :as set]
-             [string :as str]]
-            [clojure.tools.logging :as log]
-            [medley.core :as m]
-            [metabase
-             [driver :as driver]
-             [util :as u]]
-            [metabase.models
-             [field :refer [Field]]
-             [humanization :as humanization]]
-            [metabase.query-processor
-             [interface :as i]
-             [sort :as sort]]
-            [toucan.db :as db])
-  (:import [metabase.query_processor.interface Expression ExpressionRef]))
-
-;;; ## Field Resolution
-
-(defn- valid-collected-field? [keep-date-time-fields? f]
-  (or
-   ;; is `f` an instance of `Field`, `FieldLiteral`, or `ExpressionRef`?
-   (some (u/rpartial instance? f)
-         [metabase.query_processor.interface.Field
-          metabase.query_processor.interface.FieldLiteral
-          metabase.query_processor.interface.ExpressionRef])
-   ;; or if we're keeping DateTimeFields, is is an instance of `DateTimeField`?
-   (when keep-date-time-fields?
-     (instance? metabase.query_processor.interface.DateTimeField f))))
-
-(defn collect-fields
-  "Return a sequence of all the `Fields` inside THIS, recursing as needed for collections.
-   For maps, add or `conj` to property `:path`, recording the keypath used to reach each `Field.`
-
-     (collect-fields {:name \"id\", ...})     -> [{:name \"id\", ...}]
-     (collect-fields [{:name \"id\", ...}])   -> [{:name \"id\", ...}]
-     (collect-fields {:a {:name \"id\", ...}) -> [{:name \"id\", :path [:a], ...}]"
-  {:style/indent 0}
-  [this & [keep-date-time-fields?]]
-  {:post [(every? (partial valid-collected-field? keep-date-time-fields?) %)]}
-  (condp instance? this
-    ;; For a DateTimeField we'll flatten it back into regular Field but include the :unit info for the frontend.
-    ;; Recurse so it is otherwise handled normally
-    metabase.query_processor.interface.DateTimeField
-    (let [{:keys [field unit]} this
-          fields               (collect-fields (assoc field :unit unit) keep-date-time-fields?)]
-      (if keep-date-time-fields?
-        (for [field fields]
-          (i/map->DateTimeField {:field field, :unit unit}))
-        fields))
-
-    metabase.query_processor.interface.BinnedField
-    (let [{:keys [strategy min-value max-value], nested-field :field} this]
-      [(assoc nested-field :binning_info {:binning_strategy strategy
-                                          :bin_width (:bin-width this)
-                                          :num_bins (:num-bins this)
-                                          :min_value min-value
-                                          :max_value max-value})])
-
-    metabase.query_processor.interface.Field
-    (if-let [parent (:parent this)]
-      [this parent]
-      [this])
-
-    metabase.query_processor.interface.FieldLiteral
-    [(assoc this
-       :field-id           [:field-literal (:field-name this) (:base-type this)]
-       :field-display-name (humanization/name->human-readable-name (:field-name this)))]
-
-    metabase.query_processor.interface.ExpressionRef
-    [(assoc this
-       :field-display-name (:expression-name this)
-       :base-type          :type/Float
-       :special-type       :type/Number)]
-
-    ;; for every value in a map in the query we'll descend into the map and find all the fields contained therein and
-    ;; mark the key as each field's source. e.g. if we descend into the `:breakout` columns for a query each field
-    ;; returned will get a `:source` of `:breakout` The source is important since it is used to determine sort order
-    ;; for for columns
-    clojure.lang.IPersistentMap
-    (for [[k v] (seq this)
-          field (collect-fields v keep-date-time-fields?)
-          :when field]
-      (if (= k :source-query)
-        ;; For columns collected from a source query...
-        ;; 1) Make sure they didn't accidentally pick up an integer ID if the fields clause was added implicitly. If
-        ;;     it does the frontend won't know how to use the field since it won't match up with the same field in the
-        ;;     "virtual" table metadata.
-        ;; 2) Keep the original `:source` rather than replacing it with `:source-query` since the frontend doesn't
-        ;;    know what to do with that.
-        (if (= (:unit field) :year)
-          ;; if the field is broken out by year we don't want to advertise it as type/DateTime because you can't do a
-          ;; datetime breakout on the years that come back (they come back as text). So instead just tell people it's
-          ;; a Text column
-          (assoc field
-            :field-id [:field-literal (:field-name field) :type/Text]
-            :base-type :type/Text
-            :unit      nil)
-          (assoc field
-            :field-id [:field-literal (:field-name field) (:base-type field)]))
-        ;; For all other fields just add `:source` as described above
-        (assoc field :source k)))
-
-    clojure.lang.Sequential
-    (for [[i field] (m/indexed (mapcat (u/rpartial collect-fields keep-date-time-fields?) this))]
-      (assoc field :clause-position i))
-
-    nil))
-
-(defn- qualify-field-name
-  "Update the `field-name` to reflect the name we expect to see coming back from the query.
-   (This is for handling Mongo nested Fields, I think (?))"
-  [field]
-  {:post [(keyword? (:field-name %))]}
-  (assoc field :field-name (keyword (str/join \. (rest (i/qualified-name-components field))))))
-
-(defn aggregation-name
-  "Return an appropriate field *and* display name for an `:aggregation` subclause (an aggregation or expression)."
-  ^String [{custom-name :custom-name, aggregation-type :aggregation-type, :as ag}]
-  (when-not i/*driver*
-    (throw (Exception. "metabase.query-processor.interface/*driver* is unbound.")))
-  (cond
-    ;; if a custom name was provided use it
-    custom-name               (driver/format-custom-field-name i/*driver* custom-name)
-    ;; for unnamed expressions, just compute a name like "sum + count"
-    (instance? Expression ag) (let [{:keys [operator args]} ag]
-                                (str/join (str " " (name operator) " ")
-                                          (for [arg args]
-                                            (if (instance? Expression arg)
-                                              (str "(" (aggregation-name arg) ")")
-                                              (aggregation-name arg)))))
-    ;; for unnamed normal aggregations, the column alias is always the same as the ag type except for `:distinct` with
-    ;; is called `:count` (WHY?)
-    aggregation-type          (if (= (keyword aggregation-type) :distinct)
-                                "count"
-                                (name aggregation-type))))
-
-(defn- expression-aggregate-field-info [expression]
-  (let [ag-name (aggregation-name expression)]
-    {:source             :aggregation
-     :field-name         ag-name
-     :field-display-name ag-name
-     :base-type          :type/Number
-     :special-type       :type/Number}))
-
-(defn- aggregate-field-info
-  "Return appropriate column metadata for an `:aggregation` clause."
-  [{ag-type :aggregation-type, ag-field :field, :as ag}]
-  (merge (let [field-name (aggregation-name ag)]
-           {:source             :aggregation
-            :field-name         field-name
-            :field-display-name field-name
-            :base-type          (:base-type ag-field)
-            :special-type       (:special-type ag-field)
-            :settings           (:settings ag-field)})
-         ;; Always treat count or distinct count as an integer even if the DB in question returns it as something
-         ;; wacky like a BigDecimal or Float
-         (when (contains? #{:count :distinct} ag-type)
-           {:base-type    :type/Integer
-            :special-type :type/Number})
-         ;; For the time being every Expression is an arithmetic operator and returns a floating-point number, so
-         ;; hardcoding these types is fine; In the future when we extend Expressions to handle more functionality
-         ;; we'll want to introduce logic that associates a return type with a given expression. But this will work
-         ;; for the purposes of a patch release.
-         (when (or (instance? ExpressionRef ag-field)
-                   (instance? Expression ag-field))
-           {:base-type    :type/Float
-            :special-type :type/Number})))
-
-(defn- has-aggregation?
-  "Does QUERY have an aggregation?"
-  [{aggregations :aggregation}]
-  (or (empty? aggregations)
-      ;; TODO - Not sure this needs to be checked anymore since `:rows` is a legacy way to specifiy "no aggregations"
-      ;; and should be stripped out during preprocessing
-      (= (:aggregation-type (first aggregations)) :rows)))
-
-(defn- add-aggregate-fields-if-needed
-  "Add a Field containing information about an aggregate columns such as `:count` or `:distinct` if needed."
-  [{aggregations :aggregation, :as query} fields]
-  (if (has-aggregation? query)
-    fields
-    (concat fields (for [ag aggregations]
-                     (if (instance? Expression ag)
-                       (expression-aggregate-field-info ag)
-                       (aggregate-field-info ag))))))
-
-(defn- generic-info-for-missing-key
-  "Return a set of bare-bones metadata for a Field named K when all else fails.
-   Scan the INITIAL-VALUES of K in an attempt to determine the `base-type`."
-  [k & [initial-values]]
-  {:base-type          (or (driver/values->base-type initial-values)
-                           :type/*)
-   :preview-display    true
-   :special-type       nil
-   :field-name         k
-   :field-display-name (humanization/name->human-readable-name (name k))})
-
-;; TODO - I'm not 100% sure the code reaches this point any more since the `collect-fields` logic now handles nested
-;; queries maybe this is used for queries where the source query is native?
-(defn- info-for-column-from-source-query
-  "Return information about a column that comes back when we're using a source query.
-   (This is basically the same as the generic information, but we also add `:id` and `:source`
-   columns so drill-through operations can be done on it)."
-  [k & [initial-values]]
-  (let [col (generic-info-for-missing-key k initial-values)]
-    (assoc col
-      :id     [:field-literal k (:base-type col)]
-      :source :fields)))
-
-
-(defn- info-for-duplicate-field
-  "The Clojure JDBC driver automatically appends suffixes like `count_2` to duplicate columns if multiple columns come
-  back with the same name; since at this time we can't resolve those normally (#1786) fall back to using the metadata
-  for the first column (e.g., `count`). This is definitely a HACK, but in most cases this should be correct (or at
-  least better than the generic info) for the important things like type information."
-  [fields k]
-  (when-let [[_ field-name-without-suffix] (re-matches #"^(.*)_\d+$" (name k))]
-    (some (fn [{field-name :field-name, :as field}]
-            (when (= (name field-name) field-name-without-suffix)
-              (merge (generic-info-for-missing-key k)
-                     (select-keys field [:base-type :special-type :source]))))
-          fields)))
-
-(defn- info-for-missing-key
-  "Metadata for a field named K, which we weren't able to resolve normally."
-  [inner-query fields k initial-values]
-  (or (when (:source-query inner-query)
-        (info-for-column-from-source-query k initial-values))
-      (info-for-duplicate-field fields k)
-      (generic-info-for-missing-key k initial-values)))
-
-(defn- add-unknown-fields-if-needed
-  "When create info maps for any fields we didn't expect to come back from the query.
-   Ideally, this should never happen, but on the off chance it does we still want to return it in the results."
-  [inner-query actual-keys initial-rows fields]
-  {:pre [(sequential? actual-keys) (every? keyword? actual-keys)]}
-  (let [expected-keys (u/prog1 (set (map :field-name fields))
-                        (assert (every? keyword? <>)))
-        missing-keys  (set/difference (set actual-keys) expected-keys)]
-    (when (seq missing-keys)
-      (log/warn (u/format-color 'yellow (str "There are fields we (maybe) weren't expecting in the results: %s\n"
-                                             "Expected: %s\nActual: %s")
-                  missing-keys expected-keys (set actual-keys))))
-    (concat fields (for [k     actual-keys
-                         :when (contains? missing-keys k)]
-                     (info-for-missing-key inner-query fields k (map k initial-rows))))))
-
-(defn- fixup-renamed-fields
-  "After executing the query, it's possible that java.jdbc changed the name of the column that was originally in the
-  query. This can happen when java.jdbc finds two columns with the same name, it will append an integer (like _2) on
-  the end. When this is done on an existing column in the query, this function fixes that up, updating the column
-  information we have with the new name that java.jdbc assigned the column. The `add-unknown-fields-if-needed`
-  function above is similar, but is used when we don't have existing information on that column and need to infer it."
-  [query actual-keys]
-  (let [expected-field-names (set (map (comp keyword name) (:fields query)))]
-    (if (= expected-field-names (set actual-keys))
-      query
-      (update query :fields
-              (fn [fields]
-                (mapv (fn [expected-field actual-key]
-                        (if (not= (name expected-field) (name actual-key))
-                          (assoc expected-field :field-name (name actual-key))
-                          expected-field))
-                      fields actual-keys))))))
-
-(defn- convert-field-to-expected-format
-  "Rename keys, provide default values, etc. for FIELD so it is in the format expected by the frontend."
-  [field]
-  {:pre  [field]
-   :post [(keyword? (:name %))]}
-  (let [defaults {:description nil
-                  :id          nil
-                  :table_id    nil}]
-    (-> (merge defaults field)
-        (update :field-display-name #(when % (name %)))
-        (set/rename-keys {:base-type          :base_type
-                          :field-display-name :display_name
-                          :field-id           :id
-                          :field-name         :name
-                          :fk-field-id        :fk_field_id
-                          :preview-display    :preview_display
-                          :schema-name        :schema_name
-                          :special-type       :special_type
-                          :table-id           :table_id
-                          :visibility-type    :visibility_type
-                          :remapped-to        :remapped_to
-                          :remapped-from      :remapped_from})
-        (dissoc :position :clause-position :parent :parent-id :table-name :database-type))))
-
-(defn- fk-field->dest-fn
-  "Fetch fk info and return a function that returns the destination Field of a given Field."
-  ([fields]
-   (or (fk-field->dest-fn fields (for [{:keys [special_type id]} fields
-                                       :when  (and (isa? special_type :type/FK)
-                                                   (integer? id))]
-                                   id))
-       (constantly nil)))
-  ;; Fetch the foreign key fields whose origin is in the returned Fields, create a map of
-  ;; origin-field-id->destination-field-id
-  ([fields fk-ids]
-   (when (seq fk-ids)
-     (fk-field->dest-fn fields fk-ids (db/select-id->field :fk_target_field_id Field
-                                        :id                 [:in fk-ids]
-                                        :fk_target_field_id [:not= nil]))))
-  ;; Fetch the destination Fields referenced by the foreign keys
-  ([fields fk-ids id->dest-id]
-   (when (seq id->dest-id)
-     (fk-field->dest-fn fields fk-ids id->dest-id (u/key-by :id (db/select [Field :id :name :display_name :table_id :description :base_type :special_type :visibility_type :settings]
-                                                                  :id [:in (vals id->dest-id)])))))
-  ;; Return a function that will return the corresponding destination Field for a given Field
-  ([_ _ id->dest-id dest-id->field]
-   (fn [{:keys [id]}]
-     (some-> id id->dest-id dest-id->field))))
-
-(defn- add-extra-info-to-fk-fields
-  "Add `:extra_info` about foreign keys to `Fields` whose `special_type` is a `:type/FK`."
-  [fields]
-  (let [field->dest (fk-field->dest-fn fields)]
-    (for [field fields]
-      (let [{:keys [table_id], :as dest-field} (field->dest field)]
-        (assoc field
-          :target     (when dest-field
-                        (into {} dest-field))
-          :extra_info (if table_id
-                        {:target_table_id table_id}
-                        {}))))))
-
-(defn- resolve-sort-and-format-columns
-  "Collect the Fields referenced in INNER-QUERY, sort them according to the rules at the top of this page, format them
-  as expected by the frontend, and return the results."
-  [inner-query result-keys initial-rows]
-  {:pre [(sequential? result-keys)]}
-  (when (seq result-keys)
-    (let [result-keys-set (set result-keys)
-          query-with-renamed-columns (fixup-renamed-fields inner-query result-keys)]
-      (->> (dissoc query-with-renamed-columns :expressions)
-           collect-fields
-           ;; qualify the field name to make sure it matches what will come back. (For Mongo nested queries only)
-           (map qualify-field-name)
-           ;; add entries for aggregate fields
-           (add-aggregate-fields-if-needed inner-query)
-           ;; make field-name a keyword
-           (map (u/rpartial update :field-name keyword))
-           ;; add entries for fields we weren't expecting
-           (add-unknown-fields-if-needed inner-query result-keys initial-rows)
-           ;; remove expected fields not present in the results, and make sure they're unique
-           (filter (comp (partial contains? (set result-keys)) :field-name))
-           ;; now sort the fields
-           (sort/sort-fields inner-query)
-           ;; remove any duplicate entires
-           (m/distinct-by :field-name)
-           ;; convert them to the format expected by the frontend
-           (map convert-field-to-expected-format)
-           ;; add FK info
-           add-extra-info-to-fk-fields))))
-
-(defn- pre-sort-index->post-sort-index
-  "Return a  mapping of how columns should be sorted:
-   [2 1 0] means the 1st column should be 3rd, 2nd remain 2nd, and 3rd should come 1st."
-  [unsorted-columns sorted-columns]
-  (let [column-index (zipmap unsorted-columns (range))]
-    (map column-index sorted-columns)))
-
-(defn annotate-and-sort
-  "Post-process a structured query to add metadata to the results. This stage:
-
-  1.  Sorts the results according to the rules at the top of this page
-  2.  Resolves the Fields returned in the results and adds information like `:columns` and `:cols` expected by the
-      frontend."
-  [query {:keys [columns rows], :as results}]
-  (let [cols           (resolve-sort-and-format-columns (:query query)
-                                                        (distinct columns)
-                                                        (for [row (take 10 rows)]
-                                                          (zipmap columns row)))
-        sorted-columns (mapv :name cols)]
-    (assoc results
-      :cols    (vec (for [col cols]
-                      (update col :name name)))
-      :columns (mapv name sorted-columns)
-      :rows    (if (not= columns sorted-columns)
-                 (let [sorted-column-ordering (pre-sort-index->post-sort-index columns sorted-columns)]
-                   (for [row rows]
-                     (mapv (partial nth (vec row)) sorted-column-ordering)))
-                 rows))))
diff --git a/src/metabase/query_processor/interface.clj b/src/metabase/query_processor/interface.clj
index 7200cf3b6b18b57a0e6a437222956091c1eab7e3..4f0d078e279d641e74853c10d0bbddac6a628b4b 100644
--- a/src/metabase/query_processor/interface.clj
+++ b/src/metabase/query_processor/interface.clj
@@ -1,21 +1,9 @@
-(ns ^:deprecated metabase.query-processor.interface
-  "Definitions of `Field`, `Value`, and other record types present in an expanded query.
-   This namespace should just contain definitions ^:deprecated of various protocols and record types; associated logic
-  should go in `metabase.query-processor.middleware.expand`."
-  (:require [metabase.config :as config]
-            [metabase.models
-             [dimension :as dim]
-             [field :as field]]
-            [metabase.sync.interface :as i]
-            [metabase.util
-             [date :as du]
-             [schema :as su]]
-            [schema.core :as s])
-  (:import clojure.lang.Keyword
-           [java.sql Time Timestamp]))
-
-;;; --------------------------------------------------- CONSTANTS ----------------------------------------------------
+(ns metabase.query-processor.interface
+  "Dynamic variables, constants, and other things used across the query builder namespaces.")
+;; TODO - Not 100% sure we really need this namespace since it's almost completely empty these days. Seems like the
+;; things here could be moved elsewhere
 
+;; TODO - I think this could go in the `limit` namespace
 (def absolute-max-results
   "Maximum number of rows the QP should ever return.
 
@@ -23,570 +11,16 @@
    https://support.office.com/en-nz/article/Excel-specifications-and-limits-1672b34d-7043-467e-8e27-269d656771c3"
   1048576)
 
-
-;;; -------------------------------------------------- DYNAMIC VARS --------------------------------------------------
-
+;; TODO - maybe we should do this more generally with the help of a macro like `do-with-suppressed-output` from the
+;; test utils, perhaps implemented as separate middleware (and using a `:middleware` option). Or perhaps even make QP
+;; log level an option so you could do debug individual queries
 (def ^:dynamic ^Boolean *disable-qp-logging*
   "Should we disable logging for the QP? (e.g., during sync we probably want to turn it off to keep logs less
   cluttered)."
   false)
 
-
 (def ^:dynamic *driver*
   "The driver that will be used to run the query we are currently parsing.
-   Used by `assert-driver-supports` and other places.
    Always bound when running queries the normal way, e.g. via `metabase.driver/process-query`.
    Not neccesarily bound when using various functions like `fk->` in the REPL."
   nil)
-
-
-;;; ------------------------------------------------------ ETC -------------------------------------------------------
-
-(defn ^:deprecated driver-supports?
-  "Does the currently bound `*driver*` support FEATURE?
-   (This returns `nil` if `*driver*` is unbound. `*driver*` is always bound when running queries the normal way,
-   but may not be when calling this function directly from the REPL.)"
-  [feature]
-  (when *driver*
-    ((resolve 'metabase.driver/driver-supports?) *driver* feature)))
-
-;; `assert-driver-supports` doesn't run check when `*driver*` is unbound (e.g., when used in the REPL)
-;; Allows flexibility when composing queries for tests or interactive development
-(defn ^:deprecated assert-driver-supports
-  "When `*driver*` is bound, assert that is supports keyword FEATURE."
-  [feature]
-  (when *driver*
-    (when-not (driver-supports? feature)
-      (throw (Exception. (str (name feature) " is not supported by this driver."))))))
-
-;; Expansion Happens in a Few Stages:
-;; 1. A query dict is parsed via pattern-matching code in the Query Expander.
-;;    field IDs and values are replaced with FieldPlaceholders and ValuePlaceholders, respectively.
-;; 2. Relevant Fields and Tables are fetched from the DB, and the placeholder objects are "resolved"
-;;    and replaced with objects like Field, Value, etc.
-
-;;; ------------------------------------------------ JOINING OBJECTS -------------------------------------------------
-
-;; These are just used by the QueryExpander to record information about how joins should occur.
-
-(s/defrecord ^:deprecated JoinTableField [field-id   :- su/IntGreaterThanZero
-                                          field-name :- su/NonBlankString]
-  nil
-  :load-ns true)
-
-(s/defrecord ^:deprecated JoinTable [source-field :- JoinTableField
-                                     pk-field     :- JoinTableField
-                                     table-id     :- su/IntGreaterThanZero
-                                     table-name   :- su/NonBlankString
-                                     schema       :- (s/maybe su/NonBlankString)
-                                     join-alias   :- su/NonBlankString]
-  nil
-  :load-ns true)
-
-(declare Query)
-
-;; Similar to a `JoinTable` but instead of referencing a table, it references a query expression
-(s/defrecord ^:deprecated JoinQuery [source-field :- JoinTableField
-                                     pk-field     :- JoinTableField
-                                     table-id     :- su/IntGreaterThanZero
-                                     schema       :- (s/maybe su/NonBlankString)
-                                     join-alias   :- su/NonBlankString
-                                     query        :- {s/Any  s/Any
-                                                      :query Query}]
-  nil
-  :load-ns true)
-
-;;; --------------------------------------------------- PROTOCOLS ----------------------------------------------------
-
-(defprotocol ^:deprecated IField
-  "Methods specific to the Query Expander `Field` record type."
-  (qualified-name-components [this]
-    "Return a vector of name components of the form `[table-name parent-names... field-name]`
-     (This should always return AT LEAST 2 components. If no table name should be used, return
-     `nil` as the first part.)"))
-;; TODO - Yes, I know, that makes no sense. `annotate/qualify-field-name` expects it that way tho
-
-
-;;; +----------------------------------------------------------------------------------------------------------------+
-;;; |                                                     FIELDS                                                     |
-;;; +----------------------------------------------------------------------------------------------------------------+
-
-
-(s/defrecord ^:deprecated FieldValues [field-value-id          :- su/IntGreaterThanZero
-                                       field-id                :- su/IntGreaterThanZero
-                                       values                  :- (s/maybe (s/cond-pre [s/Any] {} []))
-                                       human-readable-values   :- (s/maybe (s/cond-pre [s/Any] {} []))
-                                       created-at              :- java.util.Date
-                                       updated-at              :- java.util.Date]
-  nil
-  :load-ns true)
-
-(s/defrecord ^:deprecated Dimensions [dimension-id            :- su/IntGreaterThanZero
-                                      field-id                :- su/IntGreaterThanZero
-                                      dimension-name          :- su/NonBlankString
-                                      human-readable-field-id :- (s/maybe su/IntGreaterThanZero)
-                                      dimension-type          :- (apply s/enum dim/dimension-types)
-                                      created-at              :- java.util.Date
-                                      updated-at              :- java.util.Date]
-  nil
-  :load-ns true)
-
-;; Field is the "expanded" form of a Field ID (field reference) in MBQL
-(s/defrecord ^:deprecated Field [field-id           :- su/IntGreaterThanZero
-                                 field-name         :- su/NonBlankString
-                                 field-display-name :- su/NonBlankString
-                                 database-type      :- su/NonBlankString
-                                 base-type          :- su/FieldType
-                                 special-type       :- (s/maybe su/FieldType)
-                                 visibility-type    :- (apply s/enum field/visibility-types)
-                                 table-id           :- su/IntGreaterThanZero
-                                 schema-name        :- (s/maybe su/NonBlankString)
-                                 table-name         :- (s/maybe su/NonBlankString) ; TODO - Why is this `maybe` ?
-                                 position           :- (s/maybe su/IntGreaterThanZero)
-                                 fk-field-id        :- (s/maybe s/Int)
-                                 description        :- (s/maybe su/NonBlankString)
-                                 parent-id          :- (s/maybe su/IntGreaterThanZero)
-                                 ;; Field once its resolved; FieldPlaceholder before that
-                                 parent             :- s/Any
-                                 remapped-from      :- (s/maybe s/Str)
-                                 remapped-to        :- (s/maybe s/Str)
-                                 dimensions         :- (s/maybe (s/cond-pre Dimensions {} []))
-                                 values             :- (s/maybe (s/cond-pre FieldValues {} []))
-                                 fingerprint        :- (s/maybe i/Fingerprint)
-                                 settings           :- (s/maybe su/Map)]
-  nil
-  :load-ns true
-  clojure.lang.Named
-  (getName [_] field-name)              ; (name <field>) returns the *unqualified* name of the field, #obvi
-
-  IField
-  (qualified-name-components [_]
-    (conj (if parent
-            (qualified-name-components parent)
-            [table-name])
-          field-name)))
-
-;;; DateTimeField
-
-(def ^:deprecated datetime-field-units
-  "Valid units for a `DateTimeField`."
-  #{:default :minute :minute-of-hour :hour :hour-of-day :day :day-of-week :day-of-month :day-of-year
-    :week :week-of-year :month :month-of-year :quarter :quarter-of-year :year})
-
-(def ^:deprecated relative-datetime-value-units
-  "Valid units for a `RelativeDateTimeValue`."
-  #{:minute :hour :day :week :month :quarter :year})
-
-(def ^:deprecated DatetimeFieldUnit
-  "Schema for datetime units that are valid for `DateTimeField` forms."
-  (s/named (apply s/enum datetime-field-units) "Valid datetime unit for a field"))
-
-(def ^:deprecated DatetimeValueUnit
-  "Schema for datetime units that valid for relative datetime values."
-  (s/named (apply s/enum relative-datetime-value-units) "Valid datetime unit for a relative datetime"))
-
-(defn ^:deprecated datetime-field-unit?
-  "Is UNIT a valid datetime unit for a `DateTimeField` form?"
-  [unit]
-  (contains? datetime-field-units (keyword unit)))
-
-(defn ^:deprecated relative-datetime-value-unit?
-  "Is UNIT a valid datetime unit for a `RelativeDateTimeValue` form?"
-  [unit]
-  (contains? relative-datetime-value-units (keyword unit)))
-
-(def ^:deprecated binning-strategies
-  "Valid binning strategies for a `BinnedField`"
-  #{:num-bins :bin-width :default})
-
-;; TODO - maybe we should figure out some way to have the schema validate that the driver supports field literals,
-;; like we do for some of the other clauses. Ideally we'd do that in a more generic way (perhaps in expand, we could
-;; make the clauses specify required feature metadata and have that get checked automatically?)
-(s/defrecord ^:deprecated FieldLiteral [field-name       :- su/NonBlankString
-                                        base-type        :- su/FieldType
-                                        binning-strategy :- (s/maybe (apply s/enum binning-strategies))
-                                        binning-param    :- (s/maybe s/Num)
-                                        binning-opts     :- s/Any
-                                        fingerprint      :- (s/maybe i/Fingerprint)]
-  nil
-  :load-ns true
-  clojure.lang.Named
-  (getName [_] field-name)
-  IField
-  (qualified-name-components [_] [nil field-name]))
-
-;; DateTimeField is just a simple wrapper around Field
-(s/defrecord ^:deprecated DateTimeField [field :- (s/cond-pre Field FieldLiteral)
-                                         unit  :- DatetimeFieldUnit]
-  nil
-  :load-ns true
-  clojure.lang.Named
-  (getName [_] (name field)))
-
-;; TimeField is just a field wrapper that indicates string should be interpretted as a time
-(s/defrecord ^:deprecated TimeField [field :- (s/cond-pre Field FieldLiteral)]
-  nil
-  :load-ns true
-  clojure.lang.Named
-  (getName [_] (name field)))
-
-(s/defrecord ^:deprecated TimeValue [value       :- Time
-                                     field       :- TimeField
-                                     timezone-id :- (s/maybe String)]
-  nil
-  :load-ns true)
-
-(s/defrecord ^:deprecated BinnedField [field     :- (s/cond-pre Field FieldLiteral)
-                                       strategy  :- (apply s/enum binning-strategies)
-                                       num-bins  :- s/Int
-                                       min-value :- s/Num
-                                       max-value :- s/Num
-                                       bin-width :- s/Num]
-  nil
-  :load-ns true
-  clojure.lang.Named
-  (getName [_] (name field)))
-
-(s/defrecord ^:deprecated ExpressionRef [expression-name :- su/NonBlankString]
-  nil
-  :load-ns true
-  clojure.lang.Named
-  (getName [_] expression-name)
-  IField
-  (qualified-name-components [_]
-    [nil expression-name]))
-
-
-;;; Placeholder Types. See explaination above RE what these mean
-
-(def ^:deprecated FKFieldID
-  "Schema for an ID for a foreign key Field. If `*driver*` is bound this will throw an Exception if this is non-nil
-  and the driver does not support foreign keys."
-  (s/constrained
-   su/IntGreaterThanZero
-   (fn [_] (or (assert-driver-supports :foreign-keys) true))
-   "foreign-keys is not supported by this driver."))
-
-;; Replace Field IDs with these during first pass
-;; fk-field-id = the ID of the Field we point to (if any). For example if we are 'bird_id` then that is the ID of
-;; bird.id
-(s/defrecord ^:deprecated FieldPlaceholder [field-id            :- su/IntGreaterThanZero
-                                            fk-field-id         :- (s/maybe FKFieldID)
-                                            datetime-unit       :- (s/maybe DatetimeFieldUnit)
-                                            remapped-from       :- (s/maybe s/Str)
-                                            remapped-to         :- (s/maybe s/Str)
-                                            field-display-name  :- (s/maybe s/Str)
-                                            binning-strategy    :- (s/maybe (apply s/enum binning-strategies))
-                                            binning-opts        :- s/Any
-                                            binning-param       :- (s/maybe s/Num)]
-  nil
-  :load-ns true)
-
-(s/defrecord ^:deprecated AgFieldRef [index :- s/Int]
-  nil
-  :load-ns true)
-;; TODO - add a method to get matching expression from the query?
-
-(s/defrecord ^:deprecated RelativeDatetime [amount :- s/Int
-                                            unit   :- DatetimeValueUnit]
-  nil
-  :load-ns true)
-
-(declare Aggregation AnyField AnyValueLiteral)
-
-(def ^:deprecated ^:private ExpressionOperator (s/named (s/enum :+ :- :* :/) "Valid expression operator"))
-
-(s/defrecord ^:deprecated Expression [operator   :- ExpressionOperator
-                                      args       :- [(s/cond-pre (s/recursive #'AnyValueLiteral)
-                                                                 (s/recursive #'AnyField)
-                                                                 (s/recursive #'Aggregation))]
-                                      custom-name :- (s/maybe su/NonBlankString)]
-  nil
-  :load-ns true)
-
-
-(def ^:deprecated AnyField
-  "Schema for anything that is considered a valid 'field' including placeholders, expressions, and literals."
-  (s/named (s/cond-pre Field
-                       FieldPlaceholder
-                       DateTimeField
-                       FieldLiteral
-                       AgFieldRef
-                       Expression
-                       ExpressionRef)
-           "AnyField: field, ag field reference, expression, expression reference, or field literal."))
-
-
-;;; +----------------------------------------------------------------------------------------------------------------+
-;;; |                                                     VALUES                                                     |
-;;; +----------------------------------------------------------------------------------------------------------------+
-
-(def ^:deprecated LiteralDatetimeString
-  "Schema for an MBQL datetime string literal, in ISO-8601 format."
-  (s/constrained su/NonBlankString du/date-string? "Valid ISO-8601 datetime string literal"))
-
-(def ^:deprecated LiteralDatetime
-  "Schema for an MBQL literal datetime value: and ISO-8601 string or `java.sql.Date`."
-  (s/named (s/cond-pre java.sql.Date LiteralDatetimeString)
-           "Valid datetime literal (must be ISO-8601 string or java.sql.Date)"))
-
-(def ^:deprecated Datetime
-  "Schema for an MBQL datetime value: an ISO-8601 string, `java.sql.Date`, or a relative dateitme form."
-  (s/named (s/cond-pre RelativeDatetime LiteralDatetime)
-           "Valid datetime (must ISO-8601 string literal or a relative-datetime form)"))
-
-(def ^:deprecated OrderableValueLiteral
-  "Schema for something that is orderable value in MBQL (either a number or datetime)."
-  (s/named (s/cond-pre s/Num Datetime) "Valid orderable value (must be number or datetime)"))
-
-(def ^:deprecated AnyValueLiteral
-  "Schema for anything that is a considered a valid value literal in MBQL - `nil`, a `Boolean`, `Number`, `String`, or
-  relative datetime form."
-  (s/named (s/maybe (s/cond-pre s/Bool su/NonBlankString OrderableValueLiteral))
-           "Valid value (must be nil, boolean, number, string, or a relative-datetime form)"))
-
-
-;; Value is the expansion of a value within a QL clause
-;; Information about the associated Field is included for convenience
-;; TODO - Value doesn't need the whole field, just the relevant type info / units
-(s/defrecord ^:deprecated Value [value   :- AnyValueLiteral
-                                 field   :- (s/recursive #'AnyField)]
-  nil
-  :load-ns true)
-
-(s/defrecord ^:deprecated RelativeDateTimeValue [amount :- s/Int
-                                                 unit   :- DatetimeValueUnit
-                                                 field  :- (s/cond-pre DateTimeField
-                                                                       FieldPlaceholder)]
-  nil
-  :load-ns true)
-
-;; e.g. an absolute point in time (literal)
-(s/defrecord ^:deprecated DateTimeValue [value :- (s/maybe Timestamp)
-                                         field :- DateTimeField]
-  nil
-  :load-ns true)
-
-(def ^:deprecated OrderableValue
-  "Schema for an instance of `Value` whose `:value` property is itself orderable (a datetime or number, i.e. a
-  `OrderableValueLiteral`)."
-  (s/named (s/cond-pre
-            DateTimeValue
-            RelativeDateTimeValue
-            (s/constrained Value (fn [{value :value}]
-                                   (nil? (s/check OrderableValueLiteral value)))))
-           "Value that is orderable (Value whose :value is something orderable, like a datetime or number)"))
-
-(def ^:deprecated StringValue
-  "Schema for an instance of `Value` whose `:value` property is itself a string (a datetime or string, i.e. a
-  `OrderableValueLiteral`)."
-  (s/named (s/constrained Value (comp string? :value))
-           "Value that is a string (Value whose :value is a string)"))
-
-(defprotocol ^:deprecated ^:private IDateTimeValue
-  (unit [this]
-    "Get the `unit` associated with a `DateTimeValue` or `RelativeDateTimeValue`.")
-
-  (add-date-time-units [this n]
-    "Return a new `DateTimeValue` or `RelativeDateTimeValue` with N `units` added to it."))
-
-(extend-protocol IDateTimeValue
-  DateTimeValue
-  (unit                [this]   (:unit (:field this)))
-  (add-date-time-units [this n] (assoc this :value (du/relative-date (unit this) n (:value this))))
-
-  RelativeDateTimeValue
-  (unit                [this]   (:unit this))
-  (add-date-time-units [this n] (update this :amount (partial + n))))
-
-
-;;; Placeholder Types
-
-;; Replace values with these during first pass over Query.
-;; Include associated Field ID so appropriate the info can be found during Field resolution
-(s/defrecord ^:deprecated ValuePlaceholder [field-placeholder :- AnyField
-                                            value             :- AnyValueLiteral]
-  nil
-  :load-ns true)
-
-(def ^:deprecated OrderableValuePlaceholder
-  "`ValuePlaceholder` schema with the additional constraint that the value be orderable (a number or datetime)."
-  (s/constrained
-   ValuePlaceholder
-   (comp (complement (s/checker OrderableValueLiteral)) :value)
-   ":value must be orderable (number or datetime)"))
-
-(def ^:deprecated OrderableValueOrPlaceholder
-  "Schema for an `OrderableValue` (instance of `Value` whose `:value` is orderable) or a placeholder for one."
-  (s/named (s/cond-pre OrderableValue OrderableValuePlaceholder)
-           "Must be an OrderableValue or OrderableValuePlaceholder"))
-
-(def ^:deprecated StringValuePlaceholder
-  "`ValuePlaceholder` schema with the additional constraint that the value be a string/"
-  (s/constrained ValuePlaceholder (comp string? :value) ":value must be a string"))
-
-(def ^:deprecated StringValueOrPlaceholder
-  "Schema for an `StringValue` (instance of `Value` whose `:value` is a string) or a placeholder for one."
-  (s/named (s/cond-pre StringValue StringValuePlaceholder)
-           "Must be an StringValue or StringValuePlaceholder"))
-
-(def ^:deprecated AnyValue
-  "Schema that accepts anything normally considered a value or value placeholder."
-  (s/named (s/cond-pre DateTimeValue RelativeDateTimeValue Value ValuePlaceholder) "Valid value"))
-
-(def ^:deprecated AnyFieldOrValue
-  "Schema that accepts anything normally considered a field or value."
-  (s/named (s/cond-pre AnyField AnyValue) "Field or value"))
-
-
-;;; +----------------------------------------------------------------------------------------------------------------+
-;;; |                                                    CLAUSES                                                     |
-;;; +----------------------------------------------------------------------------------------------------------------+
-
-;;; aggregation
-
-(s/defrecord ^:deprecated AggregationWithoutField [aggregation-type :- (s/named (s/enum :count :cumulative-count)
-                                                                                "Valid aggregation type")
-                                                   custom-name      :- (s/maybe su/NonBlankString)]
-  nil
-  :load-ns true)
-
-(s/defrecord ^:deprecated AggregationWithField [aggregation-type :- (s/named (s/enum :avg :count :cumulative-count
-                                                                                     :cumulative-sum :distinct :max
-                                                                                     :min :stddev :sum)
-                                                                             "Valid aggregation type")
-                                                field            :- (s/cond-pre AnyField
-                                                                                Expression)
-                                                custom-name      :- (s/maybe su/NonBlankString)]
-  nil
-  :load-ns true)
-
-(defn- ^:deprecated valid-aggregation-for-driver? [{:keys [aggregation-type]}]
-  (when (= aggregation-type :stddev)
-    (assert-driver-supports :standard-deviation-aggregations))
-  true)
-
-(def ^:deprecated Aggregation
-  "Schema for an `aggregation` subclause in an MBQL query."
-  (s/constrained
-   (s/cond-pre AggregationWithField AggregationWithoutField Expression)
-   valid-aggregation-for-driver?
-   "standard-deviation-aggregations is not supported by this driver."))
-
-
-;;; filter
-
-(s/defrecord ^:deprecated EqualityFilter [filter-type :- (s/enum := :!=)
-                                          field       :- AnyField
-                                          value       :- AnyFieldOrValue]
-  nil
-  :load-ns true)
-
-(s/defrecord ^:deprecated ComparisonFilter [filter-type :- (s/enum :< :<= :> :>=)
-                                            field       :- AnyField
-                                            value       :- OrderableValueOrPlaceholder]
-  nil
-  :load-ns true)
-
-(s/defrecord ^:deprecated BetweenFilter [filter-type  :- (s/eq :between)
-                                         min-val      :- OrderableValueOrPlaceholder
-                                         field        :- AnyField
-                                         max-val      :- OrderableValueOrPlaceholder]
-  nil
-  :load-ns true)
-
-(s/defrecord ^:deprecated StringFilter [filter-type     :- (s/enum :starts-with :contains :ends-with)
-                                        field           :- AnyField
-                                        ;; TODO - not 100% sure why this is also allowed to accept a plain string
-                                        value           :- (s/cond-pre s/Str StringValueOrPlaceholder)
-                                        case-sensitive? :- s/Bool]
-  nil
-  :load-ns true)
-
-(def ^:deprecated SimpleFilterClause
-  "Schema for a non-compound, non-`not` MBQL `filter` clause."
-  (s/named (s/cond-pre EqualityFilter ComparisonFilter BetweenFilter StringFilter)
-           "Simple filter clause"))
-
-(s/defrecord ^:deprecated NotFilter [compound-type :- (s/eq :not)
-                                     subclause     :- SimpleFilterClause]
-  nil
-  :load-ns true)
-
-(declare Filter)
-
-(s/defrecord ^:deprecated CompoundFilter [compound-type :- (s/enum :and :or)
-                                          subclauses    :- [(s/recursive #'Filter)]]
-  nil
-  :load-ns true)
-
-(def ^:deprecated Filter
-  "Schema for top-level `filter` clause in an MBQL query."
-  (s/named (s/cond-pre SimpleFilterClause NotFilter CompoundFilter)
-           "Valid filter clause"))
-
-
-;;; order-by
-
-(def ^:deprecated OrderByDirection
-  "Schema for the direction in an `OrderBy` subclause."
-  (s/named (s/enum :ascending :descending) "Valid order-by direction"))
-
-(def ^:deprecated OrderBy
-  "Schema for top-level `order-by` clause in an MBQL query."
-  (s/named {:field     AnyField
-            :direction OrderByDirection}
-           "Valid order-by subclause"))
-
-
-;;; page
-
-(def ^:deprecated Page
-  "Schema for the top-level `page` clause in a MBQL query."
-  (s/named {:page  su/IntGreaterThanZero
-            :items su/IntGreaterThanZero}
-           "Valid page clause"))
-
-
-;;; source-query
-
-(def ^:deprecated SourceQuery
-  "Schema for a valid value for a `:source-query` clause."
-  (s/if :native
-    {:native                         s/Any
-     (s/optional-key :template-tags) s/Any}
-    (s/recursive #'Query)))
-
-
-;;; +----------------------------------------------------------------------------------------------------------------+
-;;; |                                                     QUERY                                                      |
-;;; +----------------------------------------------------------------------------------------------------------------+
-
-(def ^:deprecated Query
-  "Schema for an MBQL query."
-  (s/constrained
-   {(s/optional-key :aggregation)  [Aggregation]
-    (s/optional-key :breakout)     [AnyField]
-    (s/optional-key :fields)       [AnyField]
-    (s/optional-key :filter)       Filter
-    (s/optional-key :limit)        su/IntGreaterThanZero
-    (s/optional-key :order-by)     [OrderBy]
-    (s/optional-key :page)         Page
-    (s/optional-key :expressions)  {s/Keyword Expression}
-    (s/optional-key :source-table) su/IntGreaterThanZero
-    (s/optional-key :source-query) SourceQuery}
-   (fn [{:keys [source-table source-query native-source-query]}]
-     (and (or source-table
-              source-query
-              native-source-query)
-          (not (and source-table
-                    source-query
-                    native-source-query))))
-   "Query must specify either `:source-table` or `:source-query`, but not both."))
-
-;; Go ahead and mark all the `->Record` and `map->Record` functions as deprecated too! Just so they show up in red in
-;; Emacs
-(when config/is-dev?
-  (doseq [[_ varr] (ns-publics *ns*)
-          :when (fn? (var-get varr))]
-    (alter-meta! varr assoc :deprecated true)))
diff --git a/src/metabase/query_processor/middleware/add_dimension_projections.clj b/src/metabase/query_processor/middleware/add_dimension_projections.clj
index ca45d1d5e27a00752dadee81e12e747c4707b187..40885cdce8a9139b7d19ae14f3e5a1a5b311b1af 100644
--- a/src/metabase/query_processor/middleware/add_dimension_projections.clj
+++ b/src/metabase/query_processor/middleware/add_dimension_projections.clj
@@ -30,7 +30,9 @@
             [metabase.util :as u]
             [metabase.util.schema :as su]
             [schema.core :as s]
-            [toucan.db :as db]))
+            [toucan
+             [db :as db]
+             [hydrate :refer [hydrate]]]))
 
 (def ^:private ExternalRemappingDimension
   "Schema for the info we fetch about `external` type Dimensions that will be used for remappings in this Query. Fetched
@@ -60,13 +62,16 @@
   get hidden when displayed anyway?)"
   [fields :- [mbql.s/Field]]
   (when-let [field-id->remapping-dimension (fields->field-id->remapping-dimension fields)]
-    (vec (for [field fields
-               :when (mbql.u/is-clause? :field-id field)
-               :let  [dimension (field-id->remapping-dimension (second field))]
-               :when dimension]
-           [field
-            [:fk-> field [:field-id (:human_readable_field_id dimension)]]
-            dimension]))))
+    (vec
+     (mbql.u/match fields
+       ;; don't match Field IDs nested in other clauses
+       [(_ :guard keyword?) [:field-id _] & _] nil
+
+       [:field-id (id :guard field-id->remapping-dimension)]
+       (let [dimension (field-id->remapping-dimension id)]
+         [&match
+          [:fk-> &match [:field-id (:human_readable_field_id dimension)]]
+          dimension])))))
 
 (s/defn ^:private update-remapped-order-by :- [mbql.s/OrderBy]
   "Order by clauses that include an external remapped column should be replace that original column in the order by with
@@ -84,6 +89,7 @@
   "Add any Fields needed for `:external` remappings to the `:fields` clause of the query, and update `:order-by`
   clause as needed. Returns a pair like `[external-remapping-dimensions updated-query]`."
   [{{:keys [fields order-by]} :query, :as query} :- mbql.s/Query]
+  ;; TODO - I think we need to handle Fields in `:breakout` here as well...
   ;; fetch remapping column pairs if any exist...
   (if-let [remap-col-tuples (seq (create-remap-col-tuples fields))]
     ;; if they do, update `:fields` and `:order-by` clauses accordingly and add to the query
@@ -107,8 +113,8 @@
   To get this critical information, this uses the `remapping-dimensions` info saved by the pre-processing portion of
   this middleware for external remappings, and the internal-only remapped columns handled by post-processing
   middleware below for internal columns."
-  [remapping-dimensions   :- (s/maybe [ExternalRemappingDimension])
-   columns                :- [su/Map]
+  [columns                :- [su/Map]
+   remapping-dimensions   :- (s/maybe [ExternalRemappingDimension])
    internal-remap-columns :- (s/maybe [su/Map])]
   (let [column-id->column              (u/key-by :id columns)
         name->internal-remapped-to-col (u/key-by :remapped_from internal-remap-columns)
@@ -133,17 +139,14 @@
           :remapped_from (:name (get column-id->column remapped-from-id))})))))
 
 (defn- create-remapped-col [col-name remapped-from]
-  {:description     nil
-   :id              nil
-   :table_id        nil
-   :expression-name col-name
-   :source          :fields
-   :name            col-name
-   :display_name    col-name
-   :target          nil
-   :extra_info      {}
-   :remapped_from   remapped-from
-   :remapped_to     nil})
+  {:description   nil
+   :id            nil
+   :table_id      nil
+   :name          col-name
+   :display_name  col-name
+   :target        nil
+   :remapped_from remapped-from
+   :remapped_to   nil})
 
 (defn- transform-values-for-col
   "Converts `values` to a type compatible with the base_type found for `col`. These values should be directly comparable
@@ -158,23 +161,40 @@
          identity)
        values))
 
-(defn- col->dim-map
-  [idx {{remap-to :dimension-name, remap-type :dimension-type, field-id :field-id} :dimensions, :as col}]
-  (when field-id
+(def ^:private InternalDimensionInfo
+  {;; index of original column
+   :col-index       s/Int
+   ;; names
+   :from            su/NonBlankString
+   :to              su/NonBlankString
+   ;; map of original value -> human readable value
+   :value->readable su/Map
+   ;; Info about the new column we will tack on to end of `:cols`
+   :new-column      su/Map})
+
+(s/defn ^:private col->dim-map :- (s/maybe InternalDimensionInfo)
+  "Given a `:col` map from the results, return a map of information about the `internal` dimension used for remapping
+  it."
+  [idx {{remap-to :name, remap-type :type, field-id :field_id}         :dimensions
+        {values :values, human-readable-values :human_readable_values} :values
+        :as                                                            col}]
+  (when (and field-id
+             (= remap-type :internal))
     (let [remap-from (:name col)]
-      {:col-index      idx
-       :from           remap-from
-       :to             remap-to
-       :xform-fn       (zipmap (transform-values-for-col col (get-in col [:values :values]))
-                               (get-in col [:values :human-readable-values]))
-       :new-column     (create-remapped-col remap-to remap-from)
-       :dimension-type remap-type})))
-
-(defn- row-map-fn [dim-seq]
+      {:col-index       idx
+       :from            remap-from
+       :to              remap-to
+       :value->readable (zipmap (transform-values-for-col col values)
+                                human-readable-values)
+       :new-column      (create-remapped-col remap-to remap-from)})))
+
+(s/defn ^:private make-row-map-fn :- (s/pred fn? "function")
+  "Return a function that will add internally-remapped values to each row in the results."
+  [dim-seq :- [InternalDimensionInfo]]
   (fn [row]
-    (concat row (map (fn [{:keys [col-index xform-fn]}]
-                          (xform-fn (nth row col-index)))
-                        dim-seq))))
+    (concat row (map (fn [{:keys [col-index value->readable]}]
+                       (value->readable (nth row col-index)))
+                     dim-seq))))
 
 (s/defn ^:private remap-results
   "Munges results for remapping after the query has been executed. For internal remappings, a new column needs to be
@@ -182,16 +202,24 @@
   the column information needs to be updated with what it's being remapped from and the user specified name for the
   remapped column."
   [remapping-dimensions :- (s/maybe [ExternalRemappingDimension]), results]
-  (let [indexed-dims       (keep-indexed col->dim-map (:cols results))
-        internal-only-dims (filter #(= :internal (:dimension-type %)) indexed-dims)
-        remap-fn           (row-map-fn internal-only-dims)
+  (let [ ;; hydrate Dimensions and FieldValues for all of the columns in the results, then make a map of dimension info
+        ;; for each one that is `internal` type
+        internal-only-dims (->> (hydrate (:cols results) :values :dimensions)
+                                (keep-indexed col->dim-map)
+                                (filter identity))
+        ;; now using `indexed-dims` create a function that will add internal remapped values to each row in the results
+        remap-fn           (make-row-map-fn internal-only-dims)
+        ;; Get the entires we're going to add to `:cols` for each of the remapped values we add
         internal-only-cols (map :new-column internal-only-dims)]
     (-> results
-        (update :columns into (map :to internal-only-dims))
-        (assoc  :cols    (map #(dissoc % :dimensions :values)
-                              (concat (add-remapping-info remapping-dimensions (:cols results) internal-only-cols)
-                                      internal-only-cols)))
-        (update :rows    #(map remap-fn %)))))
+        ;; add the names of each newly added column to the end of `:columns`
+        (update :columns concat (map :to internal-only-dims))
+        ;; add remapping info `:remapped_from` and `:remapped_to` to each existing `:col`
+        (update :cols add-remapping-info remapping-dimensions internal-only-cols)
+        ;; now add the entries for each newly added column to the end of `:cols`
+        (update :cols concat internal-only-cols)
+        ;; Call our `remap-fn` on each row to add the new values to the end
+        (update :rows (partial map remap-fn)))))
 
 
 ;;; --------------------------------------------------- middleware ---------------------------------------------------
@@ -201,7 +229,9 @@
   `add-fk-remaps` for making remapping changes to the query (before executing the query). Then delegates to
   `remap-results` to munge the results after query execution."
   [qp]
-  (fn [query]
-    (let [[remapping-dimensions query] (add-fk-remaps query)
-          results                      (qp query)]
-      (remap-results remapping-dimensions results))))
+  (fn [{query-type :type, :as query}]
+    (if (= query-type :native)
+      (qp query)
+      (let [[remapping-dimensions query] (add-fk-remaps query)
+            results                      (qp query)]
+        (remap-results remapping-dimensions results)))))
diff --git a/src/metabase/query_processor/middleware/add_implicit_clauses.clj b/src/metabase/query_processor/middleware/add_implicit_clauses.clj
index 82bf61c4242b34abe2197a30a64f885aacc8567f..09532f5fcd98348e8791955d222ab7929d55f0a3 100644
--- a/src/metabase/query_processor/middleware/add_implicit_clauses.clj
+++ b/src/metabase/query_processor/middleware/add_implicit_clauses.clj
@@ -1,7 +1,6 @@
 (ns metabase.query-processor.middleware.add-implicit-clauses
   "Middlware for adding an implicit `:fields` and `:order-by` clauses to certain queries."
-  (:require [clojure.tools.logging :as log]
-            [honeysql.core :as hsql]
+  (:require [honeysql.core :as hsql]
             [metabase
              [db :as mdb]
              [util :as u]]
@@ -20,35 +19,34 @@
 ;;; |                                              Add Implicit Fields                                               |
 ;;; +----------------------------------------------------------------------------------------------------------------+
 
-(defn- datetime-field? [{:keys [base_type special_type]}]
-  (or (isa? base_type :type/DateTime)
-      (isa? special_type :type/DateTime)))
+;; this is a fn because we don't want to call mdb/isa before type hierarchy is loaded!
+(defn- default-sort-rules []
+  [ ;; sort first by position,
+   [:position :asc]
+   ;; or if that's the same, sort PKs first, followed by names, followed by everything else
+   [(hsql/call :case
+      (mdb/isa :special_type :type/PK)   0
+      (mdb/isa :special_type :type/Name) 1
+      :else                              2)
+    :asc]
+   ;; finally, sort by name (case-insensitive)
+   [:%lower.name :asc]])
+
+(defn- table->sorted-fields [table-or-id]
+  (db/select [Field :id :base_type :special_type]
+    :table_id        (u/get-id table-or-id)
+    :active          true
+    :visibility_type [:not-in ["sensitive" "retired"]]
+    :parent_id       nil
+    ;; I suppose if we wanted to we could make the `order-by` rules swappable with something other set of rules
+    {:order-by (default-sort-rules)}))
 
 (s/defn ^:private sorted-implicit-fields-for-table :- [mbql.s/Field]
   "For use when adding implicit Field IDs to a query. Return a sequence of field clauses, sorted by the rules listed
   in `metabase.query-processor.sort`, for all the Fields in a given Table."
   [table-id :- su/IntGreaterThanZero]
-  (for [field (db/select [Field :id :base_type :special_type]
-                :table_id        table-id
-                :active          true
-                :visibility_type [:not-in ["sensitive" "retired"]]
-                :parent_id       nil
-                {:order-by [
-                            ;; we can skip 1-3 because queries w/ implicit Field IDs queries won't have
-                            ;; breakouts or fields clauses, and aggregation isn't an actual Field in the DB
-                            ;; anyway
-                            ;;
-                            ;; 4A. position
-                            [:position :asc]
-                            ;; 4B. special_type: :type/PK, :type/Name, then others
-                            [(hsql/call :case
-                               (mdb/isa :special_type :type/PK)   0
-                               (mdb/isa :special_type :type/Name) 1
-                               :else                              2)
-                             :asc]
-                            ;; 4C. name
-                            [:%lower.name :asc]]})]
-    (if (datetime-field? field)
+  (for [field (table->sorted-fields table-id)]
+    (if (mbql.u/datetime-field? field)
       ;; implicit datetime Fields get bucketing of `:default`. This is so other middleware doesn't try to give it
       ;; default bucketing of `:day`
       [:datetime-field [:field-id (u/get-id field)] :default]
@@ -78,9 +76,10 @@
                         ;; TODO - we need to wrap this in `u/keyword->qualified-name` because `:expressions` uses
                         ;; keywords as keys. We can remove this call once we fix that.
                         [:expression (u/keyword->qualified-name expression-name)])]
-      ;; if the Table has no Fields, log a warning.
+      ;; if the Table has no Fields, throw an Exception, because there is no way for us to proceed
       (when-not (seq fields)
-        (log/warn (tru "Table ''{0}'' has no Fields associated with it." (:name (qp.store/table source-table-id)))))
+        (throw (Exception. (str (tru "Table ''{0}'' has no Fields associated with it."
+                                     (:name (qp.store/table source-table-id)))))))
       ;; add the fields & expressions under the `:fields` clause
       (assoc-in query [:query :fields] (vec (concat fields expressions))))))
 
diff --git a/src/metabase/query_processor/middleware/annotate.clj b/src/metabase/query_processor/middleware/annotate.clj
new file mode 100644
index 0000000000000000000000000000000000000000..724dd03d9012aaca107b017ddd482cea0d5de1a2
--- /dev/null
+++ b/src/metabase/query_processor/middleware/annotate.clj
@@ -0,0 +1,298 @@
+(ns metabase.query-processor.middleware.annotate
+  "Middleware for annotating (adding type information to) the results of a query, under the `:cols` column."
+  (:require [clojure.string :as str]
+            [metabase
+             [driver :as driver]
+             [util :as u]]
+            [metabase.mbql
+             [predicates :as mbql.preds]
+             [schema :as mbql.s]
+             [util :as mbql.u]]
+            [metabase.models.humanization :as humanization]
+            [metabase.query-processor
+             [interface :as i]
+             [store :as qp.store]]
+            [metabase.util
+             [i18n :refer [tru]]
+             [schema :as su]]
+            [schema.core :as s]))
+
+(def ^:private Col
+  "Schema for a valid map of column info as found in the `:cols` key of the results after this namespace has ran."
+  ;; name and display name can be blank because some wacko DBMSes like SQL Server return blank column names for
+  ;; unaliased aggregations like COUNT(*) (this only applies to native queries, since we determine our own names for
+  ;; MBQL.)
+  {:name                          s/Str
+   :display_name                  s/Str
+   ;; type of the Field. For Native queries we look at the values in the first 100 rows to make an educated guess
+   :base_type                     su/FieldType
+   (s/optional-key :special_type) (s/maybe su/FieldType)
+   ;; where this column came from in the original query.
+   :source                        (s/enum :aggregation :fields :breakout :native)
+   ;; various other stuff from the original Field can and should be included such as `:settings`
+   s/Any                          s/Any})
+
+
+;;; +----------------------------------------------------------------------------------------------------------------+
+;;; |                                      Adding :cols info for native queries                                      |
+;;; +----------------------------------------------------------------------------------------------------------------+
+
+(defn- native-cols
+  "Infer the types of columns by looking at the first value for each in the results, which will be added to the results
+  as `:cols`. This is used for native queries, which don't have the type information from the original `Field` objects
+  used in the query."
+  [{:keys [columns rows]}]
+  (vec (for [i    (range (count columns))
+             :let [col (nth columns i)]]
+         {:name         (name col)
+          :display_name (or (humanization/name->human-readable-name (u/keyword->qualified-name col))
+                            (u/keyword->qualified-name col))
+          :base_type    (or (driver/values->base-type (for [row rows]
+                                                        (nth row i)))
+                            :type/*)
+          :source       :native})))
+
+(defn- add-native-column-info
+  [{:keys [columns], :as results}]
+  (assoc results
+    :columns (mapv name columns)
+    :cols    (native-cols results)))
+
+
+;;; +----------------------------------------------------------------------------------------------------------------+
+;;; |                                       Adding :cols info for MBQL queries                                       |
+;;; +----------------------------------------------------------------------------------------------------------------+
+
+;;; --------------------------------------------------- Field Info ---------------------------------------------------
+
+(s/defn ^:private col-info-for-field-clause :- su/Map
+  [clause :- mbql.s/Field]
+  ;; for various things that can wrap Field clauses recurse on the wrapped Field but include a little bit of info
+  ;; about the clause doing the wrapping
+  (mbql.u/match-one clause
+    [:binning-strategy field strategy _ resolved-options]
+    (assoc (col-info-for-field-clause field) :binning_info (assoc (u/snake-keys resolved-options)
+                                                             :binning_strategy strategy))
+
+    [:datetime-field field unit]
+    (assoc (col-info-for-field-clause field) :unit unit)
+
+    [:fk-> [:field-id source-field-id] field]
+    (assoc (col-info-for-field-clause field) :fk_field_id source-field-id)
+
+    ;; for FKs where source is a :field-literal don't include `:fk_field_id`
+    [:fk-> _ field]
+    (recur field)
+
+    [:field-literal field-name field-type]
+    {:name         field-name
+     :base_type    field-type
+     :display_name (humanization/name->human-readable-name field-name)}
+
+    [:expression expression-name]
+    {:name         expression-name
+     :display_name expression-name
+     :base_type    :type/Float
+     :special_type :type/Number}
+
+    [:field-id id]
+    (dissoc (qp.store/field id) :database_type)
+
+    ;; we should never reach this if our patterns are written right so this is more to catch code mistakes than
+    ;; something the user should expect to see
+    _ (throw (Exception. (str (tru "Don't know how to get information about Field:") " " &match)))))
+
+
+;;; ---------------------------------------------- Aggregate Field Info ----------------------------------------------
+
+(def ^:private arithmetic-op->text
+  {:+ "add"
+   :- "sub"
+   :/ "div"
+   :* "mul"})
+
+(declare aggregation-name)
+
+(defn- expression-ag-arg->name
+  "Generate an appropriate name for an `arg` in an expression aggregation."
+  [arg]
+  (mbql.u/match-one arg
+    ;; if the arg itself is a nested expression, recursively find a name for it, and wrap in parens
+    [(_ :guard #{:+ :- :/ :*}) & _]
+    (str "(" (aggregation-name &match) ")")
+
+    ;; if the arg is another aggregation, recurse to get its name. (Only aggregations, nested expressions, or numbers
+    ;; are allowed as args to expression aggregations; thus anything that's an MBQL clause, but not a nested
+    ;; expression, is a ag clause.)
+    [(_ :guard keyword?) & _]
+    (aggregation-name &match)
+
+    ;; otherwise for things like numbers just use that directly
+    _ &match))
+
+(s/defn aggregation-name :- su/NonBlankString
+  "Return an appropriate field *and* display name for an `:aggregation` subclause (an aggregation or
+  expression). Takes an options map as schema won't support passing keypairs directly as a varargs. `{:top-level?
+  true}` will cause a name to be generated that will appear in the results, other names with a leading __ will be
+  trimmed on some backends."
+  [ag-clause :- mbql.s/Aggregation & [{:keys [top-level?]}]]
+  (when-not i/*driver*
+    (throw (Exception. (str (tru "metabase.query-processor.interface/*driver* is unbound.")))))
+  (mbql.u/match-one ag-clause
+    ;; if a custom name was provided use it
+    [:named _ ag-name]
+    (driver/format-custom-field-name i/*driver* ag-name)
+
+    ;; For unnamed expressions, just compute a name like "sum + count"
+    ;; Top level expressions need a name without a leading __ as those are automatically removed from the results
+    [(operator :guard #{:+ :- :/ :*}) & args]
+    (str (when top-level?
+           (str (arithmetic-op->text operator)
+                "__"))
+         (str/join (str " " (name operator) " ")
+                   (map expression-ag-arg->name args)))
+
+    ;; for unnamed normal aggregations, the column alias is always the same as the ag type except for `:distinct` with
+    ;; is called `:count` (WHY?)
+    [:distinct _]
+    "count"
+
+    ;; for any other aggregation just use the name of the clause e.g. `sum`
+    [clause-name & _]
+    (name clause-name)))
+
+(defn- ag->name-info [ag]
+  (let [ag-name (aggregation-name ag)]
+    {:name         ag-name
+     :display_name ag-name}))
+
+(defn- col-info-for-aggregation-clause
+  "Return appropriate column metadata for an `:aggregation` clause."
+  [aggregation-clause]
+  (mbql.u/match-one aggregation-clause
+    ;; ok, if this is a named aggregation recurse so we can get information about the ag we are naming
+    [:named ag _]
+    (merge (col-info-for-aggregation-clause ag)
+           (ag->name-info &match))
+
+    ;; Always treat count or distinct count as an integer even if the DB in question returns it as something
+    ;; wacky like a BigDecimal or Float
+    [(_ :guard #{:count :distinct}) & args]
+    (merge (col-info-for-aggregation-clause args)
+           {:base_type    :type/Integer
+            :special_type :type/Number}
+           (ag->name-info &match))
+
+    ;; get info from a Field if we can (theses Fields are matched when ag clauses recursively call
+    ;; `col-info-for-ag-clause`, and this info is added into the results)
+    [(_ :guard #{:field-id :field-literal :fk-> :datetime-field :expression :binning-strategy}) & _]
+    (select-keys (col-info-for-field-clause &match) [:base_type :special_type :settings])
+
+    ;; For the time being every Expression is an arithmetic operator and returns a floating-point number, so
+    ;; hardcoding these types is fine; In the future when we extend Expressions to handle more functionality
+    ;; we'll want to introduce logic that associates a return type with a given expression. But this will work
+    ;; for the purposes of a patch release.
+    [(_ :guard #{:expression :+ :- :/ :*}) & _]
+    (merge {:base_type    :type/Float
+            :special_type :type/Number}
+           (when (mbql.preds/Aggregation? &match)
+             (ag->name-info &match)))
+
+    ;; get name/display-name of this ag
+    [(_ :guard keyword?) arg]
+    (merge (col-info-for-aggregation-clause arg)
+           (ag->name-info &match))))
+
+
+;;; ----------------------------------------- Putting it all together (MBQL) -----------------------------------------
+
+(defn- check-correct-number-of-columns-returned [mbql-cols results]
+  (let [expected-count (count mbql-cols)
+        actual-count   (count (:columns results))]
+    (when (seq (:rows results))
+      (when-not (= expected-count actual-count)
+        (throw
+         (Exception.
+          (str (tru "Query processor error: mismatched number of columns in query and results.")
+               " "
+               (tru "Expected {0} fields, got {1}" expected-count actual-count)
+               "\n"
+               (tru "Expected: {0}" (mapv :name mbql-cols))
+               "\n"
+               (tru "Actual: {0}" (vec (:columns results))))))))))
+
+(defn- cols-for-fields [{{fields-clause :fields} :query, :as query}]
+  (for [field fields-clause]
+    (assoc (col-info-for-field-clause field) :source :fields)))
+
+(defn- cols-for-ags-and-breakouts [{{aggregations :aggregation, breakouts :breakout} :query, :as query}]
+  (concat
+   (for [breakout breakouts]
+     (assoc (col-info-for-field-clause breakout) :source :breakout))
+   (for [aggregation aggregations]
+     (assoc (col-info-for-aggregation-clause aggregation) :source :aggregation))))
+
+(declare mbql-cols)
+
+(defn- cols-for-source-query [{native-source-query :native, :as source-query} results]
+  (if native-source-query
+    (native-cols results)
+    (mbql-cols {:query source-query} results)))
+
+(defn- mbql-cols [{{:keys [source-query]} :query, :as query}, results]
+  (let [cols (concat
+              (cols-for-ags-and-breakouts query)
+              (cols-for-fields query))]
+    (if (and (empty? cols) source-query)
+      (cols-for-source-query source-query results)
+      cols)))
+
+(defn- add-mbql-column-info [query results]
+  (let [cols (mbql-cols query results)]
+    (check-correct-number-of-columns-returned cols results)
+    (assoc results :cols cols)))
+
+
+;;; +----------------------------------------------------------------------------------------------------------------+
+;;; |                                              Deduplicating names                                               |
+;;; +----------------------------------------------------------------------------------------------------------------+
+
+(def ^:private ColsWithUniqueNames
+  (s/constrained [Col] #(distinct? (map :name %)) ":cols with unique names"))
+
+(s/defn ^:private deduplicate-cols-names :- ColsWithUniqueNames
+  [cols :- [Col]]
+  (map (fn [col unique-name]
+         (assoc col :name unique-name))
+       cols
+       (mbql.u/uniquify-names (map :name cols))))
+
+
+;;; +----------------------------------------------------------------------------------------------------------------+
+;;; |                                               GENERAL MIDDLEWARE                                               |
+;;; +----------------------------------------------------------------------------------------------------------------+
+
+(s/defn ^:private add-column-info* :- {:cols ColsWithUniqueNames, s/Keyword s/Any}
+  [{query-type :type, :as query} {cols-returned-by-driver :cols, :as results}]
+  (->
+   ;; add `:cols` info to the query, using the appropriate function based on query type
+   (if-not (= query-type :query)
+     (add-native-column-info results)
+     (add-mbql-column-info query results))
+   ;; If the driver returned a `:cols` map with its results, which is completely optional, merge our `:cols` derived
+   ;; from logic above with theirs. We'll prefer the values in theirs to ours. This is important for wacky drivers
+   ;; like GA that use things like native metrics, which we have no information about.
+   ;;
+   ;; It's the responsibility of the driver to make sure the `:cols` are returned in the correct number and order.
+   (update :cols (if (seq cols-returned-by-driver)
+                   #(map merge % cols-returned-by-driver)
+                   identity))
+   ;; Finally, make sure the `:name` of each map in `:cols` is unique, since the FE uses it as a key for stuff like
+   ;; column settings
+   (update :cols deduplicate-cols-names)))
+
+(defn add-column-info
+  "Middleware for adding type information about the columns in the query results (the `:cols` key)."
+  [qp]
+  (fn [query]
+    (add-column-info* query (qp query))))
diff --git a/src/metabase/query_processor/middleware/annotate_and_sort.clj b/src/metabase/query_processor/middleware/annotate_and_sort.clj
deleted file mode 100644
index 2c63eccfa7b922c5d9153a3a8ce97cebd5b118ef..0000000000000000000000000000000000000000
--- a/src/metabase/query_processor/middleware/annotate_and_sort.clj
+++ /dev/null
@@ -1,39 +0,0 @@
-(ns metabase.query-processor.middleware.annotate-and-sort
-  "Middleware for annotating (adding type information to) the results of a query and sorting the columns in the results."
-  (:require [metabase.driver :as driver]
-            [metabase.models.humanization :as humanization]
-            [metabase.query-processor.annotate :as annotate]))
-
-;;; +----------------------------------------------------------------------------------------------------------------+
-;;; |                                            NATIVE QUERY ANNOTATION                                             |
-;;; +----------------------------------------------------------------------------------------------------------------+
-
-(defn- infer-column-types
-  "Infer the types of columns by looking at the first value for each in the results, and add the relevant information in
-  `:cols`. This is used for native queries, which don't have the type information from the original `Field` objects
-  used in the query, which is added to the results by `annotate`."
-  [{:keys [columns rows], :as results}]
-  (assoc results
-    :columns (mapv name columns)
-    :cols    (vec (for [i    (range (count columns))
-                        :let [col (nth columns i)]]
-                    {:name         (name col)
-                     :display_name (humanization/name->human-readable-name (name col))
-                     :base_type    (or (driver/values->base-type (for [row rows]
-                                                                   (nth row i)))
-                                       :type/*)}))))
-
-;;; +----------------------------------------------------------------------------------------------------------------+
-;;; |                                               GENERAL MIDDLEWARE                                               |
-;;; +----------------------------------------------------------------------------------------------------------------+
-
-(defn annotate-and-sort
-  "Middleware for adding type information to columns returned by running a query, and sorting the columns in the results."
-  [qp]
-  (fn [{query-type :type, :as query}]
-    (let [results (qp query)]
-      (-> (if-not (or (= query-type :query)
-                      (:annotate? results))
-            (infer-column-types results)
-            (annotate/annotate-and-sort query results))
-          (dissoc :annotate?)))))
diff --git a/src/metabase/query_processor/middleware/auto_bucket_datetime_breakouts.clj b/src/metabase/query_processor/middleware/auto_bucket_datetime_breakouts.clj
index 72967669538cec0a59337e77269201053044fbdf..ece7b78f98d815e2f1a34f638a3126893670ddad 100644
--- a/src/metabase/query_processor/middleware/auto_bucket_datetime_breakouts.clj
+++ b/src/metabase/query_processor/middleware/auto_bucket_datetime_breakouts.clj
@@ -14,13 +14,11 @@
    :special_type (s/maybe su/FieldType)
    s/Keyword     s/Any})
 
-(s/defn ^:private is-datetime-field?
-  [{base-type :base_type, special-type :special_type} :- (s/maybe FieldTypeInfo)]
-  (or (isa? base-type :type/DateTime)
-      (isa? special-type :type/DateTime)))
-
-;; TODO - we should check the store to see if these Fields have already been resolved! And if they haven't, we should
-;; go ahead and resolve them, and save them in the store...
+;; Unfortunately these Fields won't be in the store yet since Field resolution can't happen before we add the implicit
+;; `:fields` clause, which happens after this
+;;
+;; TODO - What we could do tho is fetch all the stuff we need for the Store and then save these Fields in the store,
+;; which would save a bit of time when we do resolve them
 (s/defn ^:private unbucketed-breakouts->field-id->type-info :- {su/IntGreaterThanZero (s/maybe FieldTypeInfo)}
   "Fetch a map of Field ID -> type information for the Fields referred to by the `unbucketed-breakouts`."
   [unbucketed-breakouts :- (su/non-empty [mbql.s/field-id])]
@@ -31,16 +29,18 @@
   "Wrap each breakout in `breakouts` in a `:datetime-field` clause if appropriate; look at corresponing type
   information in `field-id->type-inf` to see if we should do so."
   [breakouts :- [mbql.s/Field], field-id->type-info :- {su/IntGreaterThanZero (s/maybe FieldTypeInfo)}]
-  (for [breakout breakouts]
-    (if (and (mbql.u/is-clause? :field-id breakout)
-             (is-datetime-field? (field-id->type-info (second breakout))))
-      [:datetime-field breakout :day]
-      breakout)))
+  (mbql.u/replace breakouts
+    ;; don't replace anything that's already wrapping a `field-id`
+    [(_ :guard keyword?) [:field-id _] & _]
+    &match
+
+    [:field-id (_ :guard (comp mbql.u/datetime-field? field-id->type-info))]
+    [:datetime-field &match :day]))
 
 (s/defn ^:private auto-bucket-datetime-breakouts* :- mbql.s/Query
   [{{breakouts :breakout} :query, :as query} :- mbql.s/Query]
   ;; find any breakouts in the query that are just plain `[:field-id ...]` clauses
-  (if-let [unbucketed-breakouts (seq (filter (partial mbql.u/is-clause? :field-id) breakouts))]
+  (if-let [unbucketed-breakouts (mbql.u/match breakouts, [(_ :guard keyword?) [:field-id _] & _] nil, [:field-id _] &match)]
     ;; if we found some unbuketed breakouts, fetch the Fields & type info that are referred to by those breakouts...
     (let [field-id->type-info (unbucketed-breakouts->field-id->type-info unbucketed-breakouts)]
       ;; ...and then update each breakout by wrapping it if appropriate
diff --git a/src/metabase/query_processor/middleware/bind_effective_timezone.clj b/src/metabase/query_processor/middleware/bind_effective_timezone.clj
index fa91205a5842a3b3cc5a135f4acebb757f6b3ba8..e06bc363d3c016cada910a968ff55c3f2c2bea2e 100644
--- a/src/metabase/query_processor/middleware/bind_effective_timezone.clj
+++ b/src/metabase/query_processor/middleware/bind_effective_timezone.clj
@@ -1,9 +1,9 @@
 (ns metabase.query-processor.middleware.bind-effective-timezone
-  (:require [metabase.util.date :as ud]))
+  (:require [metabase.util.date :as du]))
 
 (defn bind-effective-timezone
   "Middlware that ensures the report-timezone and data-timezone are bound based on the database being queried against"
   [qp]
   (fn [query]
-    (ud/with-effective-timezone (:database query)
+    (du/with-effective-timezone (:database query)
       (qp query))))
diff --git a/src/metabase/query_processor/middleware/binning.clj b/src/metabase/query_processor/middleware/binning.clj
index 21a0284ea0564f995f30a8593f194c6e3b03e0ef..b2fdd743c7df8405f0a660ffeb8f433066050209 100644
--- a/src/metabase/query_processor/middleware/binning.clj
+++ b/src/metabase/query_processor/middleware/binning.clj
@@ -25,9 +25,9 @@
   (reduce
    (partial merge-with concat)
    {}
-   (for [filter-clause (mbql.u/clause-instances #{:between :< :<= :> :>=} filter-clause)
-         [_ field-id]  (mbql.u/clause-instances #{:field-id} filter-clause)]
-     {field-id [filter-clause]})))
+   (for [subclause (mbql.u/match filter-clause #{:between :< :<= :> :>=})
+         field-id  (mbql.u/match subclause [:field-id field-id] field-id)]
+     {field-id [subclause]})))
 
 (s/defn ^:private extract-bounds :- {:min-value s/Num, :max-value s/Num}
   "Given query criteria, find a min/max value for the binning strategy using the greatest user specified min value and
@@ -37,14 +37,10 @@
   (let [{global-min :min, global-max :max} (get-in fingerprint [:type :type/Number])
         filter-clauses                     (get field-id->filters field-id)
         ;; [:between <field> <min> <max>] or [:< <field> <x>]
-        user-maxes                         (for [[clause-name :as filter-clause] filter-clauses
-                                                 :when                           (#{:< :<= :between} clause-name)]
-                                             (last filter-clause))
-        user-mins                          (for [[clause-name :as filter-clause] filter-clauses
-                                                 :when                           (#{:> :>= :between} clause-name)]
-                                             (if (= :between clause-name)
-                                               (nth filter-clause 2)
-                                               (last filter-clause)))
+        user-maxes                         (mbql.u/match filter-clauses
+                                             [(_ :guard #{:< :<= :between}) & args] (last args))
+        user-mins                          (mbql.u/match filter-clauses
+                                             [(_ :guard #{:> :>= :between}) _ min-val & _] min-val)
         min-value                          (or (when (seq user-mins)
                                                  (apply max user-mins))
                                                global-min)
@@ -185,9 +181,13 @@
                                                                     resolved-options)]))
 
 
-(defn- update-binning-strategy* [query]
-  (let [field-id->filters (filter->field-map (get-in query [:query :filter]))]
-    (mbql.u/replace-clauses-in query [:query] :binning-strategy (partial update-binned-field query field-id->filters))))
+(defn- update-binning-strategy* [{query-type :type, :as query}]
+  (if (= query-type :native)
+    query
+    (let [field-id->filters (filter->field-map (get-in query [:query :filter]))]
+      (mbql.u/replace-in query [:query]
+        :binning-strategy
+        (update-binned-field query field-id->filters &match)))))
 
 (defn update-binning-strategy
   "When a binned field is found, it might need to be updated if a relevant query criteria affects the min/max value of
diff --git a/src/metabase/query_processor/middleware/cache.clj b/src/metabase/query_processor/middleware/cache.clj
index 471040a384a71c5d646d7c8c27ddfd0c85ef310b..6589b27db0d27c149b0107025058ab1370ea8303 100644
--- a/src/metabase/query_processor/middleware/cache.clj
+++ b/src/metabase/query_processor/middleware/cache.clj
@@ -141,11 +141,13 @@
         running the query, satisfying this requirement.)
      *  The result *rows* of the query must be less than `query-caching-max-kb` when serialized (before compression)."
   [qp]
-  ;; choose the caching backend if needed
-  (when-not @backend-instance
-    (set-backend!))
-  ;; ok, now do the normal middleware thing
   (fn [query]
     (if-not (is-cacheable? query)
       (qp query)
-      (run-query-with-cache qp query))))
+      ;; wait until we're actually going to use the cache before initializing the backend. We don't want to initialize
+      ;; it when the files get compiled, because that would give it the wrong version of the
+      ;; `IQueryProcessorCacheBackend` protocol
+      (do
+        (when-not @backend-instance
+          (set-backend!))
+        (run-query-with-cache qp query)))))
diff --git a/src/metabase/query_processor/middleware/catch_exceptions.clj b/src/metabase/query_processor/middleware/catch_exceptions.clj
index 09e903ca9caf7756a9760810e8edce82f9280c44..4927fd93000f516f679ebf2dcd94fa3d8aef6751 100644
--- a/src/metabase/query_processor/middleware/catch_exceptions.clj
+++ b/src/metabase/query_processor/middleware/catch_exceptions.clj
@@ -1,28 +1,30 @@
 (ns metabase.query-processor.middleware.catch-exceptions
   "Middleware for catching exceptions thrown by the query processor and returning them in a friendlier format."
-  (:require [metabase.query-processor.middleware
-             [add-query-throttle :as query-throttle]
-             [expand :as expand]
-             [resolve :as resolve]
-             [source-table :as source-table]]
-            [metabase.query-processor.util :as qputil]
+  (:require [metabase.query-processor.middleware.add-query-throttle :as query-throttle]
             [metabase.util :as u]
             schema.utils)
   (:import [schema.utils NamedError ValidationError]))
 
-(defn- fail [query, ^Throwable e, & [additional-info]]
-  (merge {:status         :failed
-          :class          (class e)
-          :error          (or (.getMessage e) (str e))
-          :stacktrace     (u/filtered-stacktrace e)
-          :query          (dissoc query :database :driver)
-          :expanded-query (when (qputil/mbql-query? query)
-                            (-> query
-                                expand/expand
-                                source-table/resolve-source-table-middleware
-                                resolve/resolve
-                                (dissoc :database :driver)
-                                u/ignore-exceptions))}
+(def ^:dynamic ^:private *add-preprocessed-queries?* true)
+
+(defn- fail [{query-type :type, :as query}, ^Throwable e, & [additional-info]]
+  (merge {:status       :failed
+          :class        (class e)
+          :error        (or (.getMessage e) (str e))
+          :stacktrace   (u/filtered-stacktrace e)
+          ;; TODO - removing this stuff is not really needed anymore since `:database` is just the ID and not the
+          ;; entire map including `:details`
+          :query        (dissoc query :database :driver)}
+         ;; add the fully-preprocessed and native forms to the error message for MBQL queries, since they're extremely
+         ;; useful for debugging purposes. Since generating them requires us to recursively run the query processor,
+         ;; make sure we can skip adding them if we end up back here so we don't recurse forever
+         (when (and (= (keyword query-type) :query)
+                    *add-preprocessed-queries?*)
+           (binding [*add-preprocessed-queries?* false]
+             {:preprocessed (u/ignore-exceptions
+                              ((resolve 'metabase.query-processor/query->preprocessed) query))
+              :native       (u/ignore-exceptions
+                              ((resolve 'metabase.query-processor/query->native) query))}))
          (when-let [data (ex-data e)]
            {:ex-data (dissoc data :schema)})
          additional-info))
@@ -31,7 +33,9 @@
   "Return a nice error message to explain the schema validation error."
   [error]
   (cond
-    (instance? NamedError error)      (let [nested-error (.error ^NamedError error)] ; recurse until we find the innermost nested named error, which is the reason we actually failed
+    (instance? NamedError error)      (let [nested-error (.error ^NamedError error)]
+                                        ;; recurse until we find the innermost nested named error, which is the reason
+                                        ;; we actually failed
                                         (if (instance? NamedError nested-error)
                                           (recur nested-error)
                                           (or (when (map? nested-error)
@@ -43,7 +47,8 @@
                                                    :let  [explanation (explain-schema-validation-error e)]
                                                    :when explanation]
                                                explanation))
-    ;; When an exception is thrown, a ValidationError comes back like (throws? ("foreign-keys is not supported by this driver." 10))
+    ;; When an exception is thrown, a ValidationError comes back like
+    ;;    (throws? ("foreign-keys is not supported by this driver." 10))
     ;; Extract the message if applicable
     (instance? ValidationError error) (let [explanation (schema.utils/validation-error-explain error)]
                                         (or (when (list? explanation)
diff --git a/src/metabase/query_processor/middleware/check_features.clj b/src/metabase/query_processor/middleware/check_features.clj
new file mode 100644
index 0000000000000000000000000000000000000000..234b26993775b666fe2cc1b92fef2af324f3f551
--- /dev/null
+++ b/src/metabase/query_processor/middleware/check_features.clj
@@ -0,0 +1,43 @@
+(ns metabase.query-processor.middleware.check-features
+  (:require [metabase
+             [driver :as driver]
+             [util :as u]]
+            [metabase.mbql.util :as mbql.u]
+            [metabase.query-processor.interface :as qp.i]
+            [metabase.util.i18n :refer [tru]]))
+
+(defn- driver-supports?
+  "Does the currently bound `*driver*` support `feature`?
+   (This returns `nil` if `*driver*` is unbound. `*driver*` is always bound when running queries the normal way,
+   but may not be when calling this function directly from the REPL.)"
+  [feature]
+  (when qp.i/*driver*
+    (driver/driver-supports? qp.i/*driver* feature)))
+
+;; `assert-driver-supports` doesn't run check when `*driver*` is unbound (e.g., when used in the REPL)
+;; Allows flexibility when composing queries for tests or interactive development
+(defn assert-driver-supports
+  "When `*driver*` is bound, assert that is supports keyword FEATURE."
+  [feature]
+  (when qp.i/*driver*
+    (when-not (driver-supports? feature)
+      (throw (Exception. (str (tru "{0} is not supported by this driver." (name feature))))))))
+
+;; TODO - definitely a little incomplete. It would be cool if we cool look at the metadata in the schema namespace and
+;; auto-generate this logic
+(defn- query->required-features [query]
+  (mbql.u/match (:query query)
+    [:stddev _] :standard-deviation-aggregations
+    [:fk-> _ _] :foreign-keys))
+
+(defn- check-features* [{query-type :type, :as query}]
+  (if-not (= query-type :query)
+    query
+    (u/prog1 query
+      (doseq [required-feature (query->required-features query)]
+        (assert-driver-supports required-feature)))))
+
+(defn check-features
+  "Middleware that checks that drivers support the `:features` required to use certain clauses, like `:stddev`."
+  [qp]
+  (comp qp check-features*))
diff --git a/src/metabase/query_processor/middleware/cumulative_aggregations.clj b/src/metabase/query_processor/middleware/cumulative_aggregations.clj
index 5aac274eab2ba6555909741daff11e2ca32bc665..923325f3c37a599734347f11c0a1bbfd4ed0be67 100644
--- a/src/metabase/query_processor/middleware/cumulative_aggregations.clj
+++ b/src/metabase/query_processor/middleware/cumulative_aggregations.clj
@@ -1,74 +1,59 @@
 (ns metabase.query-processor.middleware.cumulative-aggregations
-  "Middlware for handling `cumulative-count` and `cumulative-sum` aggregations."
-  (:require [metabase.query-processor.util :as qputil]
-            [metabase.util :as u]))
-
-(defn- cumulative-aggregation-clause
-  "Does QUERY have any aggregations of AGGREGATION-TYPE?"
-  [aggregation-type {{aggregations :aggregation} :query, :as query}]
-  (when (qputil/mbql-query? query)
-    (some (fn [{ag-type :aggregation-type, :as ag}]
-            (when (= ag-type aggregation-type)
-              ag))
-          aggregations)))
-
-(defn- pre-cumulative-aggregation
-  "Rewrite queries containing a cumulative aggregation (e.g. `:cumulative-count`) as a different 'basic' aggregation
-  (e.g. `:count`). This lets various drivers handle the aggregation normallly; we implement actual behavior here in
-  post-processing."
-  [cumlative-ag-type basic-ag-type ag-field {{aggregations :aggregation, breakout-fields :breakout} :query, :as query}]
-  (update-in query [:query :aggregation] (fn [aggregations]
-                                           (for [{ag-type :aggregation-type, :as ag} aggregations]
-                                             (if-not (= ag-type cumlative-ag-type)
-                                               ag
-                                               {:aggregation-type basic-ag-type, :field ag-field})))))
-
-(defn- first-index-satisfying
-  "Return the index of the first item in COLL where `(pred item)` is logically `true`.
-
-     (first-index-satisfying keyword? ['a 'b :c 3 \"e\"]) -> 2"
-  {:style/indent 1}
-  [pred coll]
-  (loop [i 0, [item & more] coll]
-    (cond
-      (pred item) i
-      (seq more)  (recur (inc i) more))))
-
-(defn- post-cumulative-aggregation [basic-ag-type {rows :rows, cols :cols, :as results}]
-  (let [ ;; Determine the index of the field we need to cumulative sum
-        field-index (u/prog1 (first-index-satisfying (comp (partial = (name basic-ag-type)) :name)
-                               cols)
-                      (assert (integer? <>)))
-        ;; Now make a sequence of cumulative sum values for each row
-        values      (reductions + (for [row rows]
-                                    (nth row field-index)))
-        ;; Update the values in each row
-        rows        (map (fn [row value]
-                           (assoc (vec row) field-index value))
-                         rows values)]
-    (assoc results :rows rows)))
-
-(defn- cumulative-aggregation [cumulative-ag-type basic-ag-type qp]
-  (let [cumulative-ag-clause (partial cumulative-aggregation-clause cumulative-ag-type)
-        pre-cumulative-ag    (partial pre-cumulative-aggregation cumulative-ag-type basic-ag-type)
-        post-cumulative-ag   (partial post-cumulative-aggregation basic-ag-type)]
-    (fn [query]
-      (if-let [{ag-field :field} (cumulative-ag-clause query)]
-        (post-cumulative-ag (qp (pre-cumulative-ag ag-field query)))
-        (qp query)))))
-
-
-(def ^:private ^{:arglists '([qp])} cumulative-sum
-  "Handle `cumulative-sum` aggregations, which is done by rewriting the aggregation as a `:sum` in pre-processing and
-  acculumlating the results in post-processing."
-  (partial cumulative-aggregation :cumulative-sum :sum))
-
-(def ^:private ^{:arglists '([qp])} cumulative-count
-  "Handle `cumulative-count` aggregations, which is done by rewriting the aggregation as a `:count` in pre-processing
-  and acculumlating the results in post-processing."
-  (partial cumulative-aggregation :cumulative-count :count))
-
-(def ^{:arglists '([qp])} handle-cumulative-aggregations
-  "Handle `cumulative-sum` and `cumulative-count` aggregations by rewriting the aggregations appropriately in
-  pre-processing and accumulating the results in post-processing."
-  (comp cumulative-sum cumulative-count))
+  "Middlware for handling cumulative count and cumulative sum aggregations."
+  (:require [metabase.mbql
+             [schema :as mbql.s]
+             [util :as mbql.u]]
+            [schema.core :as s]))
+
+(defn- diff-indecies
+  "Given two sequential collections, return indecies that are different between the two."
+  [coll-1 coll-2]
+  (->> (map not= coll-1 coll-2)
+       (map-indexed (fn [i transformed?]
+                      (when transformed?
+                        i)))
+       (filter identity)
+       set))
+
+(s/defn ^:private replace-cumulative-ags :- mbql.s/Query
+  "Replace `cum-count` and `cum-sum` aggregations in `query` with `count` and `sum` aggregations, respectively."
+  [query]
+  (mbql.u/replace-in query [:query :aggregation]
+    ;; cumulative count doesn't neccesarily have a field-id arg
+    [:cum-count]       [:count]
+    [:cum-count field] [:count field]
+    [:cum-sum field]   [:sum field]))
+
+(defn- add-rows
+  "Update values in `row` by adding values from `last-row` for a set of specified indexes.
+
+    (add-rows #{0} [100 200] [50 60]) ; -> [150 60]"
+  [[index & more] last-row row]
+  (if-not index
+    row
+    (recur more last-row (update row index (partial + (nth last-row index))))))
+
+(defn- sum-rows
+  "Sum the values in `rows` at `indexes-to-sum`.
+
+    (sum-rows #{0} [[1] [2] [3]]) ; -> [[1] [3] [6]]"
+  [indexes-to-sum rows]
+  (reductions (partial add-rows indexes-to-sum)
+              (first rows)
+              (rest rows)))
+
+(defn handle-cumulative-aggregations
+  "Middleware that implements `cum-count` and `cum-sum` aggregations. These clauses are replaced with `count` and `sum`
+  clauses respectively and summation is performed on results in Clojure-land."
+  [qp]
+  (fn [{{aggregations :aggregation, breakouts :breakout} :query, :as query}]
+    (if (mbql.u/match aggregations #{:cum-count :cum-sum})
+      (let [new-query        (replace-cumulative-ags query)
+            ;; figure out which indexes are being changed in the results. Since breakouts always get included in
+            ;; results first we need to offset the indexes to change by the number of breakouts
+            replaced-indexes (set (for [i (diff-indecies (->     query :query :aggregation)
+                                                         (-> new-query :query :aggregation))]
+                                    (+ (count breakouts) i)))
+            results          (qp new-query)]
+        (update results :rows (partial sum-rows replaced-indexes)))
+      (qp query))))
diff --git a/src/metabase/query_processor/middleware/desugar.clj b/src/metabase/query_processor/middleware/desugar.clj
new file mode 100644
index 0000000000000000000000000000000000000000..07f231529af9925726f8739789bd6df608c3761b
--- /dev/null
+++ b/src/metabase/query_processor/middleware/desugar.clj
@@ -0,0 +1,91 @@
+(ns metabase.query-processor.middleware.desugar
+  (:require [metabase.mbql
+             [schema :as mbql.s]
+             [util :as mbql.u]]
+            [schema.core :as s]))
+
+(defn- desugar-inside [query]
+  (mbql.u/replace-in query [:query :filter]
+    [:inside lat-field lon-field lat-max lon-min lat-min lon-max]
+    [:and
+     [:between lat-field lat-min lat-max]
+     [:between lon-field lon-min lon-max]]))
+
+(defn- desugar-is-null-and-not-null [query]
+  (mbql.u/replace-in query [:query :filter]
+    [:is-null field]  [:=  field nil]
+    [:not-null field] [:!= field nil]))
+
+(defn- desugar-time-interval [query]
+  (mbql.u/replace-in query [:query :filter]
+    [:time-interval field n unit] (recur [:time-interval field n unit nil])
+
+    ;; replace current/last/next with corresponding value of n and recur
+    [:time-interval field :current unit options] (recur [:time-interval field  0 unit options])
+    [:time-interval field :last    unit options] (recur [:time-interval field -1 unit options])
+    [:time-interval field :next    unit options] (recur [:time-interval field  1 unit options])
+
+    [:time-interval field (n :guard #{-1 0 1}) unit _]
+    [:= [:datetime-field field unit] [:relative-datetime n unit]]
+
+    [:time-interval field (n :guard neg?) unit (_ :guard :include-current)]
+    [:between [:datetime-field field unit] [:relative-datetime n unit] [:relative-datetime 0 unit]]
+
+    [:time-interval field (n :guard neg?) unit _]
+    [:between [:datetime-field field unit] [:relative-datetime n unit] [:relative-datetime -1 unit]]
+
+    [:time-interval field n unit (_ :guard :include-current)]
+    [:between [:datetime-field field unit] [:relative-datetime 0 unit] [:relative-datetime n unit]]
+
+    [:time-interval field n unit _]
+    [:between [:datetime-field field unit] [:relative-datetime 1 unit] [:relative-datetime n unit]]))
+
+(defn- desugar-does-not-contain [query]
+  (mbql.u/replace-in query [:query :filter] [:does-not-contain & args]
+    [:not (vec (cons :contains args))]))
+
+(defn- desugar-equals-and-not-equals-with-extra-args
+  "`:=` and `!=` clauses with more than 2 args automatically get rewritten as compound filters.
+
+     [:= field x y]  -> [:or  [:=  field x] [:=  field y]]
+     [:!= field x y] -> [:and [:!= field x] [:!= field y]]"
+  [query]
+  (mbql.u/replace-in query [:query :filter]
+    [:= field x y & more]
+    (apply vector :or (for [x (concat [x y] more)]
+                        [:= field x]))
+
+    [:!= field x y & more]
+    (apply vector :and (for [x (concat [x y] more)]
+                         [:!= field x]))))
+
+(defn- desugar-current-relative-datetime
+  "Replace `relative-datetime` clauses like `[:relative-datetime :current]` with `[:relative-datetime 0 <unit>]`.
+  `<unit>` is inferred from the `:datetime-field` the clause is being compared to (if any), otherwise falls back to
+  `default.`"
+  [query]
+  (mbql.u/replace-in query [:query :filter]
+    [clause field [:relative-datetime :current & _]]
+    [clause field [:relative-datetime 0 (or (mbql.u/match-one field [:datetime-field _ unit] unit)
+                                            :default)]]))
+
+
+(s/defn ^:private desugar* :- mbql.s/Query
+  [{{filter-clause :filter} :query, :as query}]
+  (if-not (seq filter-clause)
+    query
+    (-> query
+        desugar-inside
+        desugar-is-null-and-not-null
+        desugar-time-interval
+        desugar-does-not-contain
+        desugar-equals-and-not-equals-with-extra-args
+        desugar-current-relative-datetime
+        (update-in [:query :filter] mbql.u/simplify-compound-filter))))
+
+(defn desugar
+  "Middleware that replaces high-level 'syntactic sugar' clauses with lower-level clauses. This is done to minimize the
+  amount of MBQL individual drivers need to support. For your convenience, clauses replaced by this middleware are
+  marked `^:sugar` in the MBQL schema."
+  [qp]
+  (comp qp desugar*))
diff --git a/src/metabase/query_processor/middleware/dev.clj b/src/metabase/query_processor/middleware/dev.clj
index aa9651b08c8620f7ab7711d5a913e0a69029bf00..0a1a244a22fb49cc969711a2bc714a2e7e0ef625 100644
--- a/src/metabase/query_processor/middleware/dev.clj
+++ b/src/metabase/query_processor/middleware/dev.clj
@@ -1,9 +1,7 @@
 (ns metabase.query-processor.middleware.dev
-  "Middleware that's only active in dev and test scenarios. These middleware functions do additional checks
-   of query processor behavior that are undesirable in normal production use."
-  (:require [metabase
-             [config :as config]
-             [util :as u]]
+  "Middleware that's only active in dev and test scenarios. These middleware functions do additional checks of query
+  processor behavior that are undesirable in normal production use."
+  (:require [metabase.config :as config]
             [schema.core :as s]))
 
 ;; The following are just assertions that check the behavior of the QP. It doesn't make sense to run them on prod
@@ -13,33 +11,22 @@
   "Schema for the expected format of results returned by a query processor."
   {:columns               [(s/cond-pre s/Keyword s/Str)]
    ;; This is optional because QPs don't neccesarily have to add it themselves; annotate will take care of that
+   ;; If QPs do add it, those will be merged in with what annotate adds
+   ;;
+   ;; A more complete schema is used to check this in `annotate`
    (s/optional-key :cols) [{s/Keyword s/Any}]
    :rows                  s/Any
    s/Keyword              s/Any})
 
 (def ^{:arglists '([results])} validate-results
-  "Validate that the RESULTS of executing a query match the `QPResultsFormat` schema.
-   Throws an `Exception` if they are not; returns RESULTS as-is if they are."
+  "Validate that the RESULTS of executing a query match the `QPResultsFormat` schema. Throws an `Exception` if they are
+  not; returns RESULTS as-is if they are."
   (s/validator QPResultsFormat))
 
 (def ^{:arglists '([qp])} check-results-format
-  "Make sure the results of a QP execution are in the expected format.
-   This takes place *after* the 'annotation' stage of post-processing.
-   This check is skipped in prod to avoid wasting CPU cycles."
+  "Make sure the results of a QP execution are in the expected format. This takes place *after* the 'annotation' stage
+  of post-processing. This check is skipped in prod to avoid wasting CPU cycles."
   (if config/is-prod?
     identity
     (fn [qp]
       (comp validate-results qp))))
-
-
-(def ^{:arglists '([qp])} guard-multiple-calls
-  "Throw an exception if a QP function accidentally calls (QP QUERY) more than once.
-   This test is skipped in prod to avoid wasting CPU cycles."
-  (if config/is-prod?
-    identity
-    (fn [qp]
-      (comp qp (let [called? (atom false)]
-                 (fn [query]
-                   (u/prog1 query
-                     (assert (not @called?) "(QP QUERY) IS BEING CALLED MORE THAN ONCE!")
-                     (reset! called? true))))))))
diff --git a/src/metabase/query_processor/middleware/expand.clj b/src/metabase/query_processor/middleware/expand.clj
deleted file mode 100644
index b7ba2be89ebc99d7c222d2fc8a09e8ff2e6ba64c..0000000000000000000000000000000000000000
--- a/src/metabase/query_processor/middleware/expand.clj
+++ /dev/null
@@ -1,613 +0,0 @@
-(ns ^:deprecated metabase.query-processor.middleware.expand
-  "Converts a Query Dict as received by the API into an *expanded* one that contains extra information that will be
-  needed to construct the appropriate native Query, and perform various post-processing steps such as Field ordering."
-  (:refer-clojure :exclude [< <= > >= = != and or not filter count distinct sum min max + - / *])
-  (:require [clojure.core :as core]
-            [clojure.tools.logging :as log]
-            [metabase.driver :as driver]
-            [metabase.query-processor
-             [interface :as i]
-             [util :as qputil]]
-            [metabase.util :as u]
-            [metabase.util
-             [date :as du]
-             [schema :as su]]
-            [schema.core :as s])
-  (:import [metabase.query_processor.interface AgFieldRef BetweenFilter ComparisonFilter CompoundFilter DateTimeValue
-            DateTimeField Expression ExpressionRef FieldLiteral FieldPlaceholder RelativeDatetime
-            RelativeDateTimeValue StringFilter Value ValuePlaceholder]))
-
-;;; +----------------------------------------------------------------------------------------------------------------+
-;;; |                                                CLAUSE HANDLERS                                                 |
-;;; +----------------------------------------------------------------------------------------------------------------+
-
-(s/defn ^:deprecated ^:ql field-id :- i/AnyField
-  "Create a generic reference to a `Field` with ID."
-  [id]
-  ;; If for some reason we were passed a field literal (e.g. [field-id [field-literal ...]])
-  ;; we should technically barf but since we know what people meant we'll be nice for once and fix it for them :D
-  (if (instance? FieldLiteral id)
-    (do
-      (log/warn (u/format-color 'yellow (str "It doesn't make sense to use `field-literal` forms inside `field-id` forms.\n"
-                                             "Instead of [field-id [field-literal ...]], just do [field-literal ...].")))
-      id)
-    (i/map->FieldPlaceholder {:field-id id})))
-
-(s/defn ^:deprecated ^:private field :- i/AnyField
-  "Generic reference to a `Field`. F can be an integer Field ID, or various other forms like `fk->` or `aggregation`."
-  [f]
-  (if (integer? f)
-    (do (log/warn (u/format-color 'yellow "Referring to fields by their bare ID (%d) is deprecated in MBQL '98. Please use [:field-id %d] instead." f f))
-        (field-id f))
-    f))
-
-(s/defn ^:deprecated ^:ql field-literal :- FieldLiteral
-  "Generic reference to a Field by FIELD-NAME. This is intended for use when using nested queries so as to allow one
-   to refer to the fields coming back from the source query."
-  [field-name :- su/KeywordOrString, field-type :- su/KeywordOrString]
-  (i/map->FieldLiteral {:field-name (u/keyword->qualified-name field-name), :base-type (keyword field-type)}))
-
-(s/defn ^:deprecated ^:ql named :- i/Aggregation
-  "Specify a CUSTOM-NAME to use for a top-level AGGREGATION-OR-EXPRESSION in the results.
-   (This will probably be extended to support Fields in the future, but for now, only the `:aggregation` clause is
-   supported.)"
-  {:added "0.22.0"}
-  [aggregation-or-expression :- i/Aggregation, custom-name :- su/NonBlankString]
-  (assoc aggregation-or-expression :custom-name custom-name))
-
-(s/defn ^:deprecated ^:ql datetime-field :- i/AnyField
-  "Reference to a `DateTimeField`. This is just a `Field` reference with an associated datetime UNIT."
-  ([f _ unit]
-   (log/warn (u/format-color 'yellow (str "The syntax for datetime-field has changed in MBQL '98. "
-                                          "[:datetime-field <field> :as <unit>] is deprecated. "
-                                          "Prefer [:datetime-field <field> <unit>] instead.")))
-   (datetime-field f unit))
-  ([f unit]
-   (cond
-     (instance? DateTimeField f) f
-     (instance? FieldLiteral f)  (i/map->DateTimeField {:field f, :unit (qputil/normalize-token unit)})
-     ;; if it already has a datetime unit don't replace it with a new one (?)
-     ;; (:datetime-unit f)          f
-     :else                       (assoc (field f) :datetime-unit (qputil/normalize-token unit)))))
-
-(s/defn ^:deprecated ^:ql fk-> :- FieldPlaceholder
-  "Reference to a `Field` that belongs to another `Table`. DEST-FIELD-ID is the ID of this Field, and FK-FIELD-ID is
-   the ID of the foreign key field belonging to the *source table* we should use to perform the join.
-
-   `fk->` is so named because you can think of it as \"going through\" the FK Field to get to the dest Field:
-
-     (fk-> 100 200) ; refer to Field 200, which is part of another Table; join to the other table via our foreign key 100"
-  [fk-field-id, dest-field-id]
-  (i/assert-driver-supports :foreign-keys)
-  (i/map->FieldPlaceholder {:fk-field-id (if (instance? FieldPlaceholder fk-field-id)
-                                           (:field-id fk-field-id)
-                                           fk-field-id)
-                            :field-id    (if (instance? FieldPlaceholder dest-field-id)
-                                           (:field-id dest-field-id)
-                                           dest-field-id)}))
-
-(defn- ^:deprecated datetime-unit
-  "Determine the appropriate datetime unit that should be used for a field F and a value V.
-
-  (Sometimes the value may already have a 'default' value that should be replaced with the value from the field it is
-  being used with, e.g. in a filter clause.)
-
-  For example when filtering by minute it is important both F and V are bucketed as minutes, and thus both most have
-  the same unit."
-  [f v]
-  (qputil/normalize-token (core/or (:datetime-unit f)
-                                   (:unit f)
-                                   (:unit v))))
-
-(s/defn ^:deprecated ^:private value :- i/AnyValue
-  "Literal value. F is the `Field` it relates to, and V is `nil`, or a boolean, string, numerical, or datetime value."
-  [f v]
-  (cond
-    (instance? ValuePlaceholder v)      v
-    (instance? Value v)                 v
-    (instance? RelativeDateTimeValue v) v
-    (instance? DateTimeValue v)         v
-    (instance? RelativeDatetime v)      (i/map->RelativeDateTimeValue (assoc v :unit (datetime-unit f v), :field (datetime-field f (datetime-unit f v))))
-    (instance? DateTimeField f)         (i/map->DateTimeValue {:value (du/->Timestamp v), :field f})
-    (instance? FieldLiteral f)          (if (isa? (:base-type f) :type/DateTime)
-                                          (i/map->DateTimeValue {:value (du/->Timestamp v)
-                                                                 :field (i/map->DateTimeField {:field f :unit :default})})
-                                          (i/map->Value {:value v, :field f}))
-    :else                               (i/map->ValuePlaceholder {:field-placeholder (field f), :value v})))
-
-(s/defn ^:deprecated ^:private field-or-value
-  "Use instead of `value` when something may be either a field or a value."
-  [f v]
-
-  (if (core/or (instance? FieldPlaceholder v)
-               (instance? ExpressionRef v))
-    v
-    (value f v)))
-
-(s/defn ^:deprecated ^:ql relative-datetime :- RelativeDatetime
-  "Value that represents a point in time relative to each moment the query is ran, e.g. \"today\" or \"1 year ago\".
-
-   With `:current` as the only arg, refer to the current point in time; otherwise N is some number and UNIT is a unit
-   like `:day` or `:year`.
-
-     (relative-datetime :current)
-     (relative-datetime -31 :day)"
-  ([n]                (s/validate (s/eq :current) (qputil/normalize-token n))
-                      (relative-datetime 0 nil))
-  ([n :- s/Int, unit] (i/map->RelativeDatetime {:amount n, :unit (if (nil? unit)
-                                                                   :day                        ; give :unit a default value so we can simplify the schema a bit and require a :unit
-                                                                   (qputil/normalize-token unit))})))
-
-(s/defn ^:deprecated ^:ql expression :- ExpressionRef
-  {:added "0.17.0"}
-  [expression-name :- su/KeywordOrString]
-  (i/strict-map->ExpressionRef {:expression-name (name expression-name)}))
-
-
-;;; ## aggregation
-
-(defn- ^:deprecated field-or-expression [f]
-  (if (instance? Expression f)
-    ;; recursively call field-or-expression on all the args inside the expression unless they're numbers
-    ;; plain numbers are always assumed to be numeric literals here; you must use MBQL '98 `:field-id` syntax to refer
-    ;; to Fields inside an expression <3
-    (update f :args #(for [arg %]
-                       (if (number? arg)
-                         arg
-                         (field-or-expression arg))))
-    ;; otherwise if it's not an Expression it's a Field
-    (field f)))
-
-(s/defn ^:deprecated ^:private ag-with-field :- i/Aggregation [ag-type f]
-  (i/map->AggregationWithField {:aggregation-type ag-type, :field (field-or-expression f)}))
-
-(def ^:ql ^:deprecated ^{:arglists '([f])} avg      "Aggregation clause. Return the average value of F."                (partial ag-with-field :avg))
-(def ^:ql ^:deprecated ^{:arglists '([f])} distinct "Aggregation clause. Return the number of distinct values of F."    (partial ag-with-field :distinct))
-(def ^:ql ^:deprecated ^{:arglists '([f])} sum      "Aggregation clause. Return the sum of the values of F."            (partial ag-with-field :sum))
-(def ^:ql ^:deprecated ^{:arglists '([f])} cum-sum  "Aggregation clause. Return the cumulative sum of the values of F." (partial ag-with-field :cumulative-sum))
-(def ^:ql ^:deprecated ^{:arglists '([f])} min      "Aggregation clause. Return the minimum value of F."                (partial ag-with-field :min))
-(def ^:ql ^:deprecated ^{:arglists '([f])} max      "Aggregation clause. Return the maximum value of F."                (partial ag-with-field :max))
-
-(defn ^:deprecated ^:ql stddev
-  "Aggregation clause. Return the standard deviation of values of F.
-   Requires the feature `:standard-deviation-aggregations`."
-  [f]
-  (i/assert-driver-supports :standard-deviation-aggregations)
-  (ag-with-field :stddev f))
-
-(s/defn ^:deprecated ^:ql count :- i/Aggregation
-  "Aggregation clause. Return total row count (e.g., `COUNT(*)`). If F is specified, only count rows where F is non-null (e.g. `COUNT(f)`)."
-  ([]  (i/map->AggregationWithoutField {:aggregation-type :count}))
-  ([f] (ag-with-field :count f)))
-
-(s/defn ^:deprecated ^:ql cum-count :- i/Aggregation
-  "Aggregation clause. Return the cumulative row count (presumably broken out in some way)."
-  ([]
-   (i/map->AggregationWithoutField {:aggregation-type :cumulative-count}))
-  ([f] (ag-with-field :cumulative-count f)))
-
-(s/defn ^:deprecated ^:ql aggregation
-  "Specify the aggregation to be performed for this query.
-
-     (aggregation {} (count 100))
-     (aggregation {} :count 100))"
-  ;; Handle ag field references like [:aggregation 0] (deprecated)
-  ([index :- s/Int]
-   (i/map->AgFieldRef {:index index}))
-
-  ;; Handle :aggregation top-level clauses. This is either a single map (single aggregation) or a vector of maps
-  ;; (multiple aggregations)
-  ([query ag-or-ags :- (s/maybe (s/cond-pre su/Map [su/Map]))]
-   (cond
-     (map? ag-or-ags)  (recur query [ag-or-ags])
-     (empty? ag-or-ags) query
-     :else              (assoc query :aggregation (vec (for [ag ag-or-ags]
-                                                         ;; make sure the ag map is still typed correctly
-                                                         (u/prog1 (cond
-                                                                    (:operator ag) (i/map->Expression ag)
-                                                                    (:field ag)    (i/map->AggregationWithField    (update ag :aggregation-type qputil/normalize-token))
-                                                                    :else          (i/map->AggregationWithoutField (update ag :aggregation-type qputil/normalize-token)))
-                                                           (s/validate i/Aggregation <>)))))))
-
-  ;; also handle varargs for convenience
-  ([query ag & more]
-   (aggregation query (cons ag more))))
-
-
-;;; ## breakout & fields
-
-(s/defn ^:deprecated ^:ql binning-strategy :- (s/cond-pre FieldPlaceholder FieldLiteral)
-  "Reference to a `BinnedField`. This is just a `Field` reference with an associated `STRATEGY-NAME` and
-  `STRATEGY-PARAM`"
-  ([f strategy-name & [strategy-param resolved-options]]
-   (let [strategy (qputil/normalize-token strategy-name)
-         field (field f)]
-     (assoc field :binning-strategy strategy, :binning-param strategy-param, :binning-opts resolved-options))))
-
-(defn- ^:deprecated fields-list-clause
-  ([_ query] query)
-  ([k query & fields] (assoc query k (mapv field fields))))
-
-(def ^:ql ^:deprecated ^{:arglists '([query & fields])} breakout "Specify which fields to breakout by." (partial fields-list-clause :breakout))
-(def ^:ql ^:deprecated ^{:arglists '([query & fields])} fields   "Specify which fields to return."      (partial fields-list-clause :fields))
-
-;;; ## filter
-
-(s/defn ^:deprecated ^:private compound-filter :- i/Filter
-  ([compound-type, subclause :- i/Filter]
-   (log/warn (u/format-color 'yellow "You shouldn't specify an %s filter with only one subclause." compound-type))
-   subclause)
-
-  ([compound-type, subclause :- i/Filter, & more :- [i/Filter]]
-   (i/map->CompoundFilter {:compound-type compound-type, :subclauses (vec (cons subclause more))})))
-
-(def ^:ql ^:deprecated ^{:arglists '([& subclauses])} and "Filter subclause. Return results that satisfy *all* SUBCLAUSES." (partial compound-filter :and))
-(def ^:ql ^:deprecated ^{:arglists '([& subclauses])} or  "Filter subclause. Return results that satisfy *any* of the SUBCLAUSES." (partial compound-filter :or))
-
-(s/defn ^:deprecated ^:private equality-filter :- i/Filter
-  ([filter-type _ f v]
-   (i/map->EqualityFilter {:filter-type filter-type, :field (field f), :value (field-or-value f v)}))
-  ([filter-type compound-fn f v & more]
-   (apply compound-fn (for [v (cons v more)]
-                        (equality-filter filter-type compound-fn f v)))))
-
-(def ^:ql ^:deprecated ^{:arglists '([f v & more])} =
-  "Filter subclause. With a single value, return results where F == V. With two or more values, return results where F
-  matches *any* of the values (i.e.`IN`)
-
-     (= f v)
-     (= f v1 v2) ; same as (or (= f v1) (= f v2))"
-  (partial equality-filter := or))
-
-(def ^:ql ^:deprecated ^{:arglists '([f v & more])} !=
-  "Filter subclause. With a single value, return results where F != V. With two or more values, return results where F
-  does not match *any* of the values (i.e. `NOT IN`)
-
-     (!= f v)
-     (!= f v1 v2) ; same as (and (!= f v1) (!= f v2))"
-  (partial equality-filter :!= and))
-
-(defn ^:deprecated ^:ql is-null  "Filter subclause. Return results where F is `nil`."     [f] (=  f nil)) ; TODO - Should we deprecate these? They're syntactic sugar, and not particualarly useful.
-(defn ^:deprecated ^:ql not-null "Filter subclause. Return results where F is not `nil`." [f] (!= f nil)) ; not-null is doubly unnecessary since you could just use `not` instead.
-
-(s/defn ^:deprecated ^:private comparison-filter :- ComparisonFilter [filter-type f v]
-  (i/map->ComparisonFilter {:filter-type filter-type, :field (field f), :value (value f v)}))
-
-(def ^:ql ^:deprecated ^{:arglists '([f v])} <  "Filter subclause. Return results where F is less than V. V must be orderable, i.e. a number or datetime."                (partial comparison-filter :<))
-(def ^:ql ^:deprecated ^{:arglists '([f v])} <= "Filter subclause. Return results where F is less than or equal to V. V must be orderable, i.e. a number or datetime."    (partial comparison-filter :<=))
-(def ^:ql ^:deprecated ^{:arglists '([f v])} >  "Filter subclause. Return results where F is greater than V. V must be orderable, i.e. a number or datetime."             (partial comparison-filter :>))
-(def ^:ql ^:deprecated ^{:arglists '([f v])} >= "Filter subclause. Return results where F is greater than or equal to V. V must be orderable, i.e. a number or datetime." (partial comparison-filter :>=))
-
-(s/defn ^:deprecated ^:ql between :- BetweenFilter
-  "Filter subclause. Return results where F is between MIN and MAX. MIN and MAX must be orderable, i.e. numbers or datetimes.
-   This behaves like SQL `BETWEEN`, i.e. MIN and MAX are inclusive."
-  [f min-val max-val]
-  (i/map->BetweenFilter {:filter-type :between, :field (field f), :min-val (value f min-val), :max-val (value f max-val)}))
-
-(s/defn ^:deprecated ^:ql inside :- CompoundFilter
-  "Filter subclause for geo bounding. Return results where LAT-FIELD and LON-FIELD are between some set of bounding values."
-  [lat-field lon-field lat-max lon-min lat-min lon-max]
-  (and (between lat-field lat-min lat-max)
-       (between lon-field lon-min lon-max)))
-
-
-(s/defn ^:deprecated ^:private string-filter :- StringFilter
-  "String search filter clauses: `contains`, `starts-with`, and `ends-with`. First shipped in `0.11.0` (before initial
-  public release) but only supported case-sensitive searches. In `0.29.0` support for case-insensitive searches was
-  added. For backwards-compatibility, and to avoid possible performance implications, case-sensitive is the default
-  option if no `options-maps` is specified for all drivers except GA. Whether we should default to case-sensitive can
-  be specified by the `IDriver` method `default-to-case-sensitive?`."
-  ([filter-type f s]
-   (string-filter filter-type f s {:case-sensitive (if i/*driver*
-                                                     (driver/default-to-case-sensitive? i/*driver*)
-                                                     ;; if *driver* isn't bound then just assume `true`
-                                                     true)}))
-  ([filter-type f s options-map]
-   (i/strict-map->StringFilter
-    {:filter-type     filter-type
-     :field           (field f)
-     :value           (value f s)
-     :case-sensitive? (get options-map :case-sensitive true)})))
-
-(def ^:ql ^:deprecated ^{:arglists '([f s] [f s options-map])} starts-with
-  "Filter subclause. Return results where F starts with the string S. By default, is case-sensitive, but you may pass an
-  `options-map` with `{:case-sensitive false}` for case-insensitive searches."
-  (partial string-filter :starts-with))
-
-(def ^:ql ^:deprecated ^{:arglists '([f s] [f s options-map])} contains
-  "Filter subclause. Return results where F contains the string S. By default, is case-sensitive, but you may pass an
-  `options-map` with `{:case-sensitive false}` for case-insensitive searches."
-  (partial string-filter :contains))
-
-(def ^:ql ^:deprecated ^{:arglists '([f s] [f s options-map])} ends-with
-  "Filter subclause. Return results where F ends with with the string S. By default, is case-sensitive, but you may pass
-  an `options-map` with `{:case-sensitive false}` for case-insensitive searches."
-  (partial string-filter :ends-with))
-
-
-(s/defn ^:deprecated ^:ql not :- i/Filter
-  "Filter subclause. Return results that do *not* satisfy SUBCLAUSE.
-
-   For the sake of simplifying driver implementation, `not` automatically translates its argument to a simpler,
-   logically equivalent form whenever possible:
-
-     (not (and x y)) -> (or (not x) (not y))
-     (not (not x))   -> x
-     (not (= x y)    -> (!= x y)"
-  {:added "0.15.0"}
-  [{:keys [compound-type subclause subclauses], :as clause} :- i/Filter]
-  (case compound-type
-    :and (apply or  (mapv not subclauses))
-    :or  (apply and (mapv not subclauses))
-    :not subclause
-    nil  (let [{:keys [field value filter-type]} clause]
-           (case filter-type
-             :=       (!= field value)
-             :!=      (=  field value)
-             :<       (>= field value)
-             :>       (<= field value)
-             :<=      (>  field value)
-             :>=      (<  field value)
-             :between (let [{:keys [min-val max-val]} clause]
-                        (or (< field min-val)
-                            (> field max-val)))
-             (i/strict-map->NotFilter {:compound-type :not, :subclause clause})))))
-
-(def ^:ql ^:deprecated ^{:arglists '([f s]), :added "0.15.0"} does-not-contain
-  "Filter subclause. Return results where F does not start with the string S."
-  (comp not contains))
-
-(s/defn ^:deprecated ^:ql time-interval :- i/Filter
-  "Filter subclause. Syntactic sugar for specifying a specific time interval.
-
- Optionally accepts a map of `options`. The following options are currently implemented:
-
- *  `:include-current` Should we include partial results for the current day/month/etc? Defaults to `false`; set
-     this to `true` to include them.
-
-     ;; return rows where datetime Field 100's value is in the current month
-     (filter {} (time-interval (field-id 100) :current :month))
-
-     ;; return rows where datetime Field 100's value is in the current month, including partial results for the
-     ;; current day
-     (filter {} (time-interval (field-id 100) :current :month {:include-current true}))"
-  [f n unit & [options]]
-  (if-not (integer? n)
-    (case (qputil/normalize-token n)
-      :current (recur f  0 unit options)
-      :last    (recur f -1 unit options)
-      :next    (recur f  1 unit options))
-    (let [f                (datetime-field f unit)
-          include-current? (:include-current options)]
-      (cond
-        (core/= n  0) (= f (value f (relative-datetime  0 unit)))
-        (core/= n -1) (= f (value f (relative-datetime -1 unit)))
-        (core/= n  1) (= f (value f (relative-datetime  1 unit)))
-        (core/< n -1) (between f (value f (relative-datetime                          n unit))
-                                 (value f (relative-datetime (if include-current? 0 -1) unit)))
-        (core/> n  1) (between f (value f (relative-datetime (if include-current? 0  1) unit))
-                                 (value f (relative-datetime                          n unit)))))))
-
-(s/defn ^:deprecated ^:ql filter
-  "Filter the results returned by the query.
-
-     (filter {} := 100 true) ; return rows where Field 100 == true"
-  [query, filter-map :- (s/maybe i/Filter)]
-  (if filter-map
-    (assoc query :filter filter-map)
-    query))
-
-(s/defn ^:deprecated ^:ql limit
-  "Limit the number of results returned by the query.
-
-     (limit {} 10)"
-  [query, limit :- (s/maybe s/Int)]
-  (if limit
-    (assoc query :limit limit)
-    query))
-
-
-;;; ## order-by
-
-(s/defn ^:deprecated ^:private order-by-subclause :- i/OrderBy
-  [direction :- i/OrderByDirection, f]
-  ;; it's not particularly useful to sort datetime fields with the default `:day` bucketing,
-  ;; so specifiy `:default` bucketing to prevent the default of `:day` from being set during resolution.
-  ;; This won't affect fields that aren't `DateTimeFields`.
-  {:direction direction
-   :field     (let [f (field f)]
-                (if-not (instance? FieldPlaceholder f)
-                  f
-                  (update f :datetime-unit (fn [unit]
-                                             (core/or unit :default)))))})
-
-(def ^:ql ^:deprecated ^{:arglists '([field])} asc
-  "`order-by` subclause. Specify that results should be returned in ascending order for Field or AgRef F.
-
-     (order-by {} (asc 100))"
-  (partial order-by-subclause :ascending))
-
-(def ^:ql ^:deprecated ^{:arglists '([field])} desc
-  "`order-by` subclause. Specify that results should be returned in ascending order for Field or AgRef F.
-
-     (order-by {} (desc 100))"
-  (partial order-by-subclause :descending))
-
-(s/defn ^:deprecated ^:private maybe-parse-order-by-subclause :- i/OrderBy
-  [subclause]
-  (cond
-    (map? subclause)    subclause ; already parsed by `asc` or `desc`
-    (vector? subclause) (let [[f direction] subclause]
-                          (log/warn (u/format-color 'yellow "The syntax for order-by has changed in MBQL '98. [<field> :ascending/:descending] is deprecated. Prefer [:asc/:desc <field>] instead."))
-                          (order-by-subclause (qputil/normalize-token direction) f))))
-
-(defn ^:deprecated ^:ql order-by
-  "Specify how ordering should be done for this query.
-
-     (order-by {} (asc 20))        ; order by field 20
-     (order-by {} [20 :ascending]) ; order by field 20 (deprecated/legacy syntax)
-     (order-by {} [(aggregation 0) :descending]) ; order by the aggregate field (e.g. :count)"
-  ([query] query)
-  ([query & subclauses]
-   (assoc query :order-by (mapv maybe-parse-order-by-subclause subclauses))))
-
-
-;;; ## page
-
-(s/defn ^:deprecated ^:ql page
-  "Specify which 'page' of results to fetch (offset and limit the results).
-
-     (page {} {:page 1, :items 20}) ; fetch first 20 rows"
-  [query page-clause :- (s/maybe i/Page)]
-  (if page-clause
-    (assoc query :page page-clause)
-    query))
-
-;;; ## source-table
-
-(s/defn ^:deprecated ^:ql source-table
-  "Specify the ID of the table to query.
-   Queries must specify *either* `:source-table` or `:source-query`.
-
-     (source-table {} 100)"
-  [query, table-id :- s/Int]
-  (assoc query :source-table table-id))
-
-(declare expand-inner)
-
-(s/defn ^:deprecated ^:ql source-query
-  "Specify a query to use as the source for this query (e.g., as a `SUBSELECT`).
-   Queries must specify *either* `:source-table` or `:source-query`.
-
-     (source-query {} (-> (source-table {} 100)
-                          (limit 10)))"
-  {:added "0.25.0"}
-  [query, source-query :- su/Map]
-  (assoc query :source-query (if (:native source-query)
-                               source-query
-                               (expand-inner source-query))))
-
-
-;;; ## calculated columns
-
-(s/defn ^:deprecated ^:ql expressions
-  "Top-level clause. Add additional calculated fields to a query."
-  {:added "0.17.0"}
-  [query, m :- {s/Keyword Expression}]
-  (assoc query :expressions m))
-
-(s/defn ^:deprecated ^:private expression-fn :- Expression
-  [k :- s/Keyword, & args]
-  (i/map->Expression {:operator k, :args (vec (for [arg args]
-                                                (if (number? arg)
-                                                  (float arg) ; convert args to floats so things like 5 / 10 -> 0.5 instead of 0
-                                                  arg)))}))
-
-(def ^:ql ^:deprecated ^{:arglists '([rvalue1 rvalue2 & more]), :added "0.17.0"} + "Arithmetic addition function."       (partial expression-fn :+))
-(def ^:ql ^:deprecated ^{:arglists '([rvalue1 rvalue2 & more]), :added "0.17.0"} - "Arithmetic subtraction function."    (partial expression-fn :-))
-(def ^:ql ^:deprecated ^{:arglists '([rvalue1 rvalue2 & more]), :added "0.17.0"} * "Arithmetic multiplication function." (partial expression-fn :*))
-(def ^:ql ^:deprecated ^{:arglists '([rvalue1 rvalue2 & more]), :added "0.17.0"} / "Arithmetic division function."       (partial expression-fn :/))
-
-;;; Metric & Segment handlers
-
-;; These *do not* expand the normal Metric and Segment macros used in normal queries; that's handled in
-;; `metabase.query-processor.macros` before this namespace ever even sees the query. But since the GA driver's queries
-;; consist of custom `metric` and `segment` clauses we need to at least accept them without barfing so we can expand a
-;; query in order to check what permissions it requires.  TODO - in the future, we should just make these functions
-;; expand Metric and Segment macros for consistency with the rest of the MBQL clauses
-(defn ^:deprecated ^:ql metric  "Placeholder expansion function for GA metric clauses. (This does not expand normal Metric macros; that is done in `metabase.query-processor.macros`.)"   [& _])
-(defn ^:deprecated ^:ql segment "Placeholder expansion function for GA segment clauses. (This does not expand normal Segment macros; that is done in `metabase.query-processor.macros`.)" [& _])
-
-
-;;; +----------------------------------------------------------------------------------------------------------------+
-;;; |                                                   EXPANSION                                                    |
-;;; +----------------------------------------------------------------------------------------------------------------+
-
-;; QL functions are any public function in this namespace marked with `^:ql`.
-(def ^:private token->ql-fn
-  "A map of keywords (e.g., `:=`), to the matching vars (e.g., `#'=`)."
-  (into {} (for [[symb varr] (ns-publics *ns*)
-                 :when       (:ql (meta varr))]
-             {(keyword symb) varr})))
-
-(defn- ^:deprecated fn-for-token
-  "Return fn var that matches a token, or throw an exception.
-
-     (fn-for-token :starts-with) -> #'starts-with"
-  [token]
-  (let [token (qputil/normalize-token token)]
-    (core/or (token->ql-fn token)
-             (throw (Exception. (str "Illegal clause (no matching fn found): " token))))))
-
-(s/defn ^:deprecated expand-ql-sexpr
-  "Expand a QL bracketed S-expression by dispatching to the appropriate `^:ql` function. If SEXPR is not a QL
-   S-expression (the first item isn't a token), it is returned as-is.
-
-     (expand-ql-sexpr [:field-id 10]) -> (field-id 10) -> {:field-id 10, :fk-field-id nil, :datetime-unit nil}"
-  [[token & args :as sexpr] :- (s/pred vector?)]
-  (if (core/or (keyword? token)
-               (string?  token))
-    (apply (fn-for-token token) args)
-    sexpr))
-
-(defn ^:deprecated walk-expand-ql-sexprs
-  "Walk QUERY depth-first and expand QL bracketed S-expressions."
-  [x]
-  (cond (map? x)        (into x (for [[k v] x]                    ; do `into x` instead of `into {}` so we can keep the original class,
-                                  [k (walk-expand-ql-sexprs v)])) ; e.g. FieldPlaceholder
-        (sequential? x) (expand-ql-sexpr (mapv walk-expand-ql-sexprs x))
-        :else           x))
-
-
-(s/defn ^:deprecated expand-inner :- i/Query
-  "Expand an inner query map."
-  [inner-query :- (s/pred map?)]
-  (loop [query {}, [[clause-name arg] & more] (seq inner-query)]
-    (let [arg   (walk-expand-ql-sexprs arg)
-          args  (cond
-                  (sequential? arg) arg
-                  arg               [arg])
-          query (if (seq args)
-                  (apply (fn-for-token clause-name) query args)
-                  query)]
-      (if (seq more)
-        (recur query more)
-        query))))
-
-(defn ^:deprecated expand
-  "Expand a query dictionary as it comes in from the API and return an \"expanded\" form, (almost) ready for use by
-   the Query Processor. This includes steps like token normalization and function dispatch.
-
-     (expand {:query {\"SOURCE_TABLE\" 10, \"FILTER\" [\"=\" 100 200]}})
-
-       -> {:query {:source-table 10
-                   :filter       {:filter-type :=
-                                  :field       {:field-id 100}
-                                  :value       {:field-placeholder {:field-id 100}
-                                                :value 200}}}}
-
-   The \"placeholder\" objects above are fetched from the DB and replaced in the next QP step, in
-   `metabase.query-processor.middleware.resolve`."
-  [outer-query]
-  (update outer-query :query expand-inner))
-
-(defn ^:deprecated expand-middleware
-  "Wraps `expand` in a query-processor middleware function"
-  [qp]
-  (fn [query]
-    (qp (if (qputil/mbql-query? query)
-          (expand query)
-          query))))
-
-
-;;; +----------------------------------------------------------------------------------------------------------------+
-;;; |                                                OTHER HELPER FNS                                                |
-;;; +----------------------------------------------------------------------------------------------------------------+
-
-(defn ^:deprecated ^:deprecated is-clause?
-  "Check to see whether CLAUSE is an instance of the clause named by normalized CLAUSE-KEYWORD.
-
-     (is-clause? :field-id [\"FIELD-ID\" 2000]) ; -> true"
-  [clause-keyword clause]
-  (core/and (sequential? clause)
-            (core/= (qputil/normalize-token (first clause)) clause-keyword)))
diff --git a/src/metabase/query_processor/middleware/expand_macros.clj b/src/metabase/query_processor/middleware/expand_macros.clj
index 50f24c251701b8275a368ecbfd7c4e973997f14e..a51212a39020e8f0714b490ce18421485a8cba6e 100644
--- a/src/metabase/query_processor/middleware/expand_macros.clj
+++ b/src/metabase/query_processor/middleware/expand_macros.clj
@@ -19,15 +19,6 @@
             [schema.core :as s]
             [toucan.db :as db]))
 
-(defn ga-metric-or-segment?
-  "Is this metric or segment clause not a Metabase Metric or Segment, but rather a GA one? E.g. something like `[:metric
-  ga:users]`. We want to ignore those because they're not the same thing at all as MB Metrics/Segments and don't
-  correspond to objects in our application DB."
-  [[_ id]]
-  (boolean
-   (when ((some-fn string? keyword?) id)
-     (re-find #"^ga(id)?:" (name id)))))
-
 
 ;;; +----------------------------------------------------------------------------------------------------------------+
 ;;; |                                                    SEGMENTS                                                    |
@@ -38,16 +29,14 @@
     (db/select-id->field :definition Segment, :id [:in (set segment-ids)])))
 
 (defn- replace-segment-clauses [outer-query segment-id->definition]
-  (mbql.u/replace-clauses-in outer-query [:query] :segment
-    (fn [[_ segment-id, :as segment]]
-      (if (ga-metric-or-segment? segment)
-        segment
-        (or (:filter (segment-id->definition segment-id))
-            (throw (IllegalArgumentException. (str (tru "Segment {0} does not exist, or is invalid." segment-id)))))))))
+  (mbql.u/replace-in outer-query [:query]
+    [:segment (segment-id :guard (complement mbql.u/ga-id?))]
+    (or (:filter (segment-id->definition segment-id))
+        (throw (IllegalArgumentException. (str (tru "Segment {0} does not exist, or is invalid." segment-id)))))))
 
 (s/defn ^:private expand-segments :- mbql.s/Query
   [{inner-query :query, :as outer-query} :- mbql.s/Query]
-  (if-let [segments (mbql.u/clause-instances :segment inner-query)]
+  (if-let [segments (mbql.u/match inner-query :segment)]
     (replace-segment-clauses outer-query (segment-clauses->id->definition segments))
     outer-query))
 
@@ -58,8 +47,9 @@
 
 (defn- metrics
   "Return a sequence of any (non-GA) `:metric` MBQL clauses in `query`."
-  [{inner-query :query}] ; metrics won't be in a native query but they could be in source-query or aggregation clause
-  (seq (filter (complement ga-metric-or-segment?) (mbql.u/clause-instances :metric inner-query))))
+  [query]
+  ;; metrics won't be in a native query but they could be in source-query or aggregation clause
+  (mbql.u/match query [:metric (_ :guard (complement mbql.u/ga-id?))]))
 
 (defn- metric-clauses->id->definition [metric-clauses]
   (db/select-id->field :definition Metric, :id [:in (set (map second metric-clauses))]))
@@ -69,13 +59,11 @@
     (reduce mbql.u/add-filter-clause query filters)))
 
 (defn- replace-metrics-aggregations [query metric-id->definition]
-  (mbql.u/replace-clauses-in query [:query] :metric
-    (fn [[_ metric-id, :as metric]]
-      (if (ga-metric-or-segment? metric)
-        metric
-        (or (first (:aggregation (metric-id->definition metric-id)))
-            (throw (IllegalArgumentException.
-                    (str (tru "Metric {0} does not exist, or is invalid." metric-id)))))))))
+  (mbql.u/replace-in query [:query]
+    [:metric (metric-id :guard (complement mbql.u/ga-id?))]
+    (or (first (:aggregation (metric-id->definition metric-id)))
+        (throw (IllegalArgumentException.
+                (str (tru "Metric {0} does not exist, or is invalid." metric-id)))))))
 
 (defn- add-metrics-clauses
   "Add appropriate `filter` and `aggregation` clauses for a sequence of Metrics.
diff --git a/src/metabase/query_processor/middleware/limit.clj b/src/metabase/query_processor/middleware/limit.clj
index d0637ded3d9ee6e7ff485abd606dad3795dcaa66..c18c87beb803d0e853c1619d11dd970e85bef869 100644
--- a/src/metabase/query_processor/middleware/limit.clj
+++ b/src/metabase/query_processor/middleware/limit.clj
@@ -7,11 +7,13 @@
   "Add an implicit `limit` clause to MBQL queries without any aggregations, and limit the maximum number of rows that
   can be returned in post-processing."
   [qp]
-  (fn [{{:keys [max-results max-results-bare-rows]} :constraints, :as query}]
+  (fn [{{:keys [max-results max-results-bare-rows]} :constraints, query-type :type, :as query}]
     (let [query   (cond-> query
-                    (qputil/query-without-aggregations-or-limits? query) (assoc-in [:query :limit] (or max-results-bare-rows
-                                                                                                       max-results
-                                                                                                       i/absolute-max-results)))
+                    (and (= query-type :query)
+                         (qputil/query-without-aggregations-or-limits? query))
+                    (assoc-in [:query :limit] (or max-results-bare-rows
+                                                  max-results
+                                                  i/absolute-max-results)))
           results (qp query)]
       (update results :rows (partial take (or max-results
                                               i/absolute-max-results))))))
diff --git a/src/metabase/query_processor/middleware/mbql_to_native.clj b/src/metabase/query_processor/middleware/mbql_to_native.clj
index 31270a4fc4fd57bff4c84ef9934b6d60d5af6c7c..f300d394af6b190b3e70c0cffa611d954828652f 100644
--- a/src/metabase/query_processor/middleware/mbql_to_native.clj
+++ b/src/metabase/query_processor/middleware/mbql_to_native.clj
@@ -5,14 +5,18 @@
             [metabase
              [driver :as driver]
              [util :as u]]
-            [metabase.query-processor.interface :as i]))
+            [metabase.query-processor.interface :as i]
+            [metabase.util.i18n :refer [tru]]))
 
 (defn- query->native-form
   "Return a `:native` query form for QUERY, converting it from MBQL if needed."
   [{query-type :type, :as query}]
   (u/prog1 (if-not (= :query query-type)
              (:native query)
-             (driver/mbql->native (:driver query) query))
+             (try (driver/mbql->native (:driver query) query)
+                  (catch Throwable e
+                    (log/error (tru "Error transforming MBQL query to native:") "\n" (u/pprint-to-str query))
+                    (throw e))))
     (when-not i/*disable-qp-logging*
       (log/debug (u/format-color 'green "NATIVE FORM: %s\n%s\n" (u/emoji "😳") (u/pprint-to-str <>))))))
 
@@ -20,10 +24,14 @@
   "Middleware that handles conversion of MBQL queries to native (by calling driver QP methods) so the queries
    can be executed. For queries that are already native, this function is effectively a no-op."
   [qp]
-  (fn [{query-type :type, :as query}]
-    (let [native-form  (query->native-form query)
-          native-query (if-not (= query-type :query)
-                         query
-                         (assoc query :native native-form))
-          results      (qp native-query)]
-      (assoc results :native_form native-form))))
+  (fn [{query-type :type, {:keys [disable-mbql->native?]} :middleware, :as query}]
+    ;; disabling mbql->native is only used by the `qp/query->preprocessed` function so we can get the fully
+    ;; pre-processed query *before* we convert it to native, which might fail for one reason or another
+    (if disable-mbql->native?
+      (qp query)
+      (let [native-form  (query->native-form query)
+            native-query (if-not (= query-type :query)
+                           query
+                           (assoc query :native native-form))
+            results      (qp native-query)]
+        (assoc results :native_form native-form)))))
diff --git a/src/metabase/query_processor/middleware/parameters.clj b/src/metabase/query_processor/middleware/parameters.clj
index 10f17a6fed99dd74dc4455a82806a1927beddfc8..ca1e0c9cc7272877a9b8aa14e28d6accd549d24b 100644
--- a/src/metabase/query_processor/middleware/parameters.clj
+++ b/src/metabase/query_processor/middleware/parameters.clj
@@ -3,9 +3,7 @@
   (:require [clojure.data :as data]
             [clojure.tools.logging :as log]
             [metabase.driver.generic-sql.util.unprepare :as unprepare]
-            [metabase.query-processor
-             [interface :as i]
-             [util :as qputil]]
+            [metabase.query-processor.interface :as i]
             [metabase.query-processor.middleware.parameters
              [mbql :as mbql-params]
              [sql :as sql-params]]
@@ -14,9 +12,9 @@
 (defn- expand-parameters*
   "Expand any `:parameters` set on the `query-dict` and apply them to the query definition. This function removes
   the `:parameters` attribute from the `query-dict` as part of its execution."
-  [{:keys [parameters], :as query-dict}]
+  [{:keys [parameters], query-type :type, :as query-dict}]
   ;; params in native queries are currently only supported for SQL drivers
-  (if (qputil/mbql-query? query-dict)
+  (if (= query-type :query)
     (mbql-params/expand (dissoc query-dict :parameters) parameters)
     (sql-params/expand query-dict)))
 
diff --git a/src/metabase/query_processor/middleware/parameters/dates.clj b/src/metabase/query_processor/middleware/parameters/dates.clj
index 9dd1799ee86baf9c43f96514678aa09ad5375cf2..3c9187e801ebb12afde46cb045fce39b648ef7b1 100644
--- a/src/metabase/query_processor/middleware/parameters/dates.clj
+++ b/src/metabase/query_processor/middleware/parameters/dates.clj
@@ -111,13 +111,13 @@
     :range  (fn [_ dt]
               {:start dt,
                :end   dt})
-    :filter (fn [_ field] [:= field [:relative-datetime :current]])}
+    :filter (fn [_ field] [:= [:datetime-field field :day] [:relative-datetime :current]])}
 
    {:parser #(= % "yesterday")
     :range  (fn [_ dt]
               {:start (t/minus dt (t/days 1))
                :end   (t/minus dt (t/days 1))})
-    :filter (fn [_ field] [:= field [:relative-datetime -1 :day]])}
+    :filter (fn [_ field] [:= [:datetime-field field :day] [:relative-datetime -1 :day]])}
 
    ;; adding a tilde (~) at the end of a past<n><unit> filter means we should include the current day/etc.
    ;; e.g. past30days  = past 30 days, not including partial data for today ({:include-current false})
@@ -152,9 +152,10 @@
 (defn- day->iso8601 [date]
   (tf/unparse (tf/formatters :year-month-day) date))
 
+;; TODO - using `range->filter` so much below seems silly. Why can't we just bucket the field and use `:=` clauses?
 (defn- range->filter
   [{:keys [start end]} field]
-  [:between field (day->iso8601 start) (day->iso8601 end)])
+  [:between [:datetime-field field :day] (day->iso8601 start) (day->iso8601 end)])
 
 (def ^:private absolute-date-string-decoders
   ;; year and month
@@ -176,25 +177,25 @@
               {:start date, :end date})
     :filter (fn [{:keys [date]} field-id-clause]
               (let [iso8601date (day->iso8601 date)]
-                [:between field-id-clause iso8601date iso8601date]))}
+                [:= [:datetime-field field-id-clause :day] :between]))}
    ;; day range
    {:parser (regex->parser #"([0-9-T:]+)~([0-9-T:]+)" [:date-1 :date-2])
     :range  (fn [{:keys [date-1 date-2]} _]
               {:start date-1, :end date-2})
     :filter (fn [{:keys [date-1 date-2]} field-id-clause]
-              [:between field-id-clause (day->iso8601 date-1) (day->iso8601 date-2)])}
+              [:between [:datetime-field field-id-clause :day] (day->iso8601 date-1) (day->iso8601 date-2)])}
    ;; before day
    {:parser (regex->parser #"~([0-9-T:]+)" [:date])
     :range  (fn [{:keys [date]} _]
               {:end date})
     :filter (fn [{:keys [date]} field-id-clause]
-              [:< field-id-clause (day->iso8601 date)])}
+              [:< [:datetime-field field-id-clause :day] (day->iso8601 date)])}
    ;; after day
    {:parser (regex->parser #"([0-9-T:]+)~" [:date])
     :range  (fn [{:keys [date]} _]
               {:start date})
     :filter (fn [{:keys [date]} field-id-clause]
-              [:> field-id-clause (day->iso8601 date)])}])
+              [:> [:datetime-field field-id-clause :day] (day->iso8601 date)])}])
 
 (def ^:private all-date-string-decoders
   (concat relative-date-string-decoders absolute-date-string-decoders))
@@ -226,7 +227,7 @@
              (m/map-vals (partial tf/unparse formatter-no-tz))))))
 
 (s/defn date-string->filter :- mbql.s/Filter
-  "Takes a string description of a date range such as 'lastmonth' or '2016-07-15~2016-08-6' and returns a
-   corresponding MBQL filter clause for a given field reference."
+  "Takes a string description of a *date* (not datetime) range such as 'lastmonth' or '2016-07-15~2016-08-6' and
+   returns a corresponding MBQL filter clause for a given field reference."
   [date-string :- s/Str, field :- (s/cond-pre su/IntGreaterThanZero mbql.s/Field)]
   (execute-decoders all-date-string-decoders :filter (params/wrap-field-id-if-needed field) date-string))
diff --git a/src/metabase/query_processor/middleware/parameters/sql.clj b/src/metabase/query_processor/middleware/parameters/sql.clj
index 38280370a44ee828c4fe1f7619e39c38d1b82af0..3763d0b6b07da3c07e74976803854c65fc0fc612 100644
--- a/src/metabase/query_processor/middleware/parameters/sql.clj
+++ b/src/metabase/query_processor/middleware/parameters/sql.clj
@@ -10,7 +10,6 @@
             [metabase.driver.generic-sql :as sql]
             [metabase.models.field :as field :refer [Field]]
             [metabase.query-processor.interface :as qp.i]
-            [metabase.query-processor.middleware.expand :as ql]
             [metabase.query-processor.middleware.parameters.dates :as date-params]
             [metabase.util
              [date :as du]
@@ -155,6 +154,8 @@
 (s/defn ^:private default-value-for-dimension :- (s/maybe DimensionValue)
   "Return the default value for a Dimension (Field Filter) param defined by the map TAG, if one is set."
   [tag :- TagParam]
+  (when (and (:required tag) (not (:default tag)))
+    (throw (Exception. (str (tru "''{0}'' is a required param." (:display-name tag))))))
   (when-let [default (:default tag)]
     {:type   (:widget-type tag :dimension)             ; widget-type is the actual type of the default value if set
      :target [:dimension [:template-tag (:name tag)]]
@@ -162,7 +163,7 @@
 
 (s/defn ^:private dimension->field-id :- su/IntGreaterThanZero
   [dimension]
-  (:field-id (ql/expand-ql-sexpr dimension)))
+  (second dimension))
 
 (s/defn ^:private dimension-value-for-tag :- (s/maybe Dimension)
   "Return the \"Dimension\" value of a param, if applicable. \"Dimension\" here means what is called a \"Field
@@ -355,7 +356,7 @@
 (s/defn ^:private honeysql->replacement-snippet-info :- ParamSnippetInfo
   "Convert X to a replacement snippet info map by passing it to HoneySQL's `format` function."
   [x]
-  (let [[snippet & args] (hsql/format x, :quoting (sql/quote-style qp.i/*driver*))]
+  (let [[snippet & args] (hsql/format x, :quoting (sql/quote-style qp.i/*driver*), :allow-dashed-names? true)]
     {:replacement-snippet     snippet
      :prepared-statement-args args}))
 
diff --git a/src/metabase/query_processor/middleware/permissions.clj b/src/metabase/query_processor/middleware/permissions.clj
index 3fc51fb0f1ee42ff7cb7b02a3d7889e07c066c7b..9845f62b7ef096f2116710ba0f63edb45c5159bf 100644
--- a/src/metabase/query_processor/middleware/permissions.clj
+++ b/src/metabase/query_processor/middleware/permissions.clj
@@ -22,7 +22,7 @@
 (s/defn ^:private check-ad-hoc-query-perms
   [outer-query]
   (when-not (perms/set-has-full-permissions-for-set? @*current-user-permissions-set*
-              (query-perms/perms-set outer-query :throw-exceptions))
+              (query-perms/perms-set outer-query :throw-exceptions :already-preprocessed))
     (throw (Exception. (str (tru "You do not have permissions to run this query."))))))
 
 (s/defn ^:private check-query-permissions*
diff --git a/src/metabase/query_processor/middleware/reconcile_breakout_and_order_by_bucketing.clj b/src/metabase/query_processor/middleware/reconcile_breakout_and_order_by_bucketing.clj
new file mode 100644
index 0000000000000000000000000000000000000000..a1d799fa0703e7365be3c4f9498e5b58a1045834
--- /dev/null
+++ b/src/metabase/query_processor/middleware/reconcile_breakout_and_order_by_bucketing.clj
@@ -0,0 +1,97 @@
+(ns metabase.query-processor.middleware.reconcile-breakout-and-order-by-bucketing
+  "SQL places restrictions when using a `GROUP BY` clause (MBQL `:breakout`) in combination with an `ORDER BY`
+  clause (MBQL `:order-by`) -- columns that appear in the `ORDER BY` must appear in the `GROUP BY`. When we apply
+  datetime or binning bucketing in a breakout, for example `cast(x AS DATE)` (MBQL `:datetime-field` clause), we need
+  to apply the same bucketing to instances of that Field in the `order-by` clause. In other words:
+
+  Bad:
+
+    SELECT count(*)
+    FROM table
+    GROUP BY CAST(x AS date)
+    ORDER BY x ASC
+
+  (MBQL)
+
+     {:source-table 1
+      :breakout     [[:datetime-field [:field-id 1] :day]]
+      :order-by     [[:asc [:field-id 1]]]}
+
+  Good:
+
+    SELECT count(*)
+    FROM table
+    GROUP BY CAST(x AS date)
+    ORDER BY CAST(x AS date) ASC
+
+  (MBQL)
+
+    {:source-table 1
+     :breakout     [[:datetime-field [:field-id 1] :day]]
+     :order-by     [[:asc [:datetime-field [:field-id 1] :day]]]}
+
+  The frontend, on the rare occasion it generates a query that explicitly specifies an `order-by` clause, usually will
+  generate one that directly corresponds to the bad example above. This middleware finds these cases and rewrites the
+  query to look like the good example."
+  (:require [metabase.mbql
+             [schema :as mbql.s]
+             [util :as mbql.u]]
+            [schema.core :as s]))
+
+(s/defn ^:private reconcile-bucketing :- mbql.s/Query
+  [{{breakouts :breakout, order-bys :order-by} :query, :as query}]
+  ;; Look for bucketed fields in the `breakout` clause and build a map of unbucketed reference -> bucketed reference,
+  ;; like:
+  ;;
+  ;;    {[:field-id 1] [:datetime-field [:field-id 1] :day]}
+  ;;
+  ;; In causes where a Field is broken out more than once, prefer the bucketing used by the first breakout; accomplish
+  ;; this by reversing the sequence of matches below, meaning the first match will get merged into the map last,
+  ;; overwriting later matches
+  (let [unbucketed-ref->bucketed-ref (into {} (reverse (mbql.u/match breakouts
+                                                         [:datetime-field field _]     [field &match]
+                                                         [:binning-strategy field & _] [field &match])))]
+    ;; rewrite order-by clauses as needed...
+    (-> (mbql.u/replace-in query [:query :order-by]
+          ;; if order by is already bucketed, nothing to do
+          [:datetime-field _ _]   &match
+          [:binning-strategy & _] &match
+
+          ;; if we run into a field-id that wasn't matched by the last pattern, see if there's an unbucketed reference
+          ;; -> bucketed reference from earlier
+          #{:field-id :fk-> :field-literal}
+          (if-let [bucketed-reference (unbucketed-ref->bucketed-ref &match)]
+            ;; if there is, replace it with the bucketed reference
+            bucketed-reference
+            ;; if there's not, again nothing to do.
+            &match))
+        ;; now remove any duplicate order-by clauses we may have introduced, as those are illegal in MBQL 2000
+        (update-in [:query :order-by] (comp vec distinct)))))
+
+(defn- reconcile-bucketing-if-needed
+  "Check if there's a chance we need to rewrite anything. If not, return query as is."
+  [{{breakouts :breakout, order-bys :order-by} :query, :as query}]
+  (if (or
+       ;; if there's no breakouts bucketed by a datetime-field or binning-strategy...
+       (not (mbql.u/match breakouts #{:datetime-field :binning-strategy}))
+       ;; or if there's no order-bys that are *not* bucketed...
+       (not (mbql.u/match order-bys
+              #{:datetime-field :binning-strategy} nil
+              #{:field-id :fk-> :field-literal}    &match)))
+    ;; return query as is
+    query
+    ;; otherwise, time to bucket
+    (reconcile-bucketing query)))
+
+(defn reconcile-breakout-and-order-by-bucketing
+  "Replace any unwrapped Field clauses (`:field-id`, `:field-literal`, or `:fk->`) in the `order-by` clause with
+  corresponding wrapped clauses (`:datetime-field` or `:binning-strategy`) used for the same Field in the `breakout`
+  clause.
+
+   {:query {:breakout [[:datetime-field [:field-id 1] :day]]
+            :order-by [[:asc [:field-id 1]]]}
+   ->
+   {:query {:breakout [[:datetime-field [:field-id 1] :day]]
+            :order-by [[:datetime-field [:asc [:field-id 1]] :day]]}"
+  [qp]
+  (comp qp reconcile-bucketing-if-needed))
diff --git a/src/metabase/query_processor/middleware/resolve.clj b/src/metabase/query_processor/middleware/resolve.clj
deleted file mode 100644
index ca331d0b4d5b19c45acfbf615cabac024636d2e8..0000000000000000000000000000000000000000
--- a/src/metabase/query_processor/middleware/resolve.clj
+++ /dev/null
@@ -1,482 +0,0 @@
-(ns ^:deprecated metabase.query-processor.middleware.resolve
-  "Resolve references to `Fields`, `Tables`, and `Databases` in an expanded query dictionary. During the `expand`
-  phase of the Query Processor, forms like `[:field-id 10]` are replaced with placeholder objects of types like
-  `FieldPlaceholder` or similar. During this phase, we'll take those placeholder objects and fetch information from
-  the DB and replace them with actual objects like `Field`."
-  (:refer-clojure :exclude [resolve])
-  (:require [clojure
-             [set :as set]
-             [walk :as walk]]
-            [metabase
-             [db :as mdb]
-             [util :as u]]
-            [metabase.models
-             [database :refer [Database]]
-             [field :as field]
-             [setting :as setting]
-             [table :refer [Table]]]
-            [metabase.query-processor
-             [interface :as i]
-             [store :as qp.store]
-             [util :as qputil]]
-            [metabase.util
-             [date :as du]
-             [schema :as su]]
-            [schema.core :as s]
-            [toucan
-             [db :as db]
-             [hydrate :refer [hydrate]]])
-  (:import java.util.TimeZone
-           [metabase.query_processor.interface DateTimeField DateTimeValue ExpressionRef Field FieldLiteral
-            FieldPlaceholder RelativeDatetime RelativeDateTimeValue TimeField TimeValue Value ValuePlaceholder]))
-
-;;; ---------------------------------------------------- UTIL FNS ----------------------------------------------------
-
-(defn rename-mb-field-keys
-  "Rename the keys in a Metabase `Field` to match the format of those in Query Expander `Fields`."
-  [field]
-  (set/rename-keys (into {} field) {:id              :field-id
-                                    :name            :field-name
-                                    :display_name    :field-display-name
-                                    :special_type    :special-type
-                                    :visibility_type :visibility-type
-                                    :database_type   :database-type
-                                    :base_type       :base-type
-                                    :table_id        :table-id
-                                    :parent_id       :parent-id}))
-
-(defn- rename-dimension-keys
-  [dimension]
-  (set/rename-keys (into {} dimension)
-                   {:id                      :dimension-id
-                    :name                    :dimension-name
-                    :type                    :dimension-type
-                    :field_id                :field-id
-                    :human_readable_field_id :human-readable-field-id
-                    :created_at              :created-at
-                    :updated_at              :updated-at}))
-
-(defn- rename-field-value-keys
-  [field-values]
-  (set/rename-keys (into {} field-values)
-                   {:id                    :field-value-id
-                    :field_id              :field-id
-                    :human_readable_values :human-readable-values
-                    :updated_at            :updated-at
-                    :created_at            :created-at}))
-
-(defn convert-db-field
-  "Converts a field map from that database to a Field instance"
-  [db-field]
-  (-> db-field
-      rename-mb-field-keys
-      i/map->Field
-      (update :values (fn [vals]
-                        (if (seq vals)
-                          (-> vals rename-field-value-keys i/map->FieldValues)
-                          vals)))
-      (update :dimensions (fn [dims]
-                            (if (seq dims)
-                              (-> dims rename-dimension-keys i/map->Dimensions)
-                              dims)))))
-
-;;; ----------------------------------------------- IRESOLVE PROTOCOL ------------------------------------------------
-
-(defprotocol ^:private IResolve
-  (^:private unresolved-field-id ^Integer [this]
-   "Return the unresolved Field ID associated with this object, if any.")
-
-  (^:private fk-field-id ^Integer [this]
-   "Return a the FK Field ID (for joining) associated with this object, if any.")
-
-  (^:private resolve-field [this, ^clojure.lang.IPersistentMap field-id->field]
-   "This method is called when walking the Query after fetching `Fields`.
-    Placeholder objects should lookup the relevant Field in FIELD-ID->FIELDS and return their expanded form. Other
-    objects should just return themselves.")
-
-  (resolve-table [this, ^clojure.lang.IPersistentMap fk-id+table-id->tables]
-   "Called when walking the Query after `Fields` have been resolved and `Tables` have been fetched.
-    Objects like `Fields` can add relevant information like the name of their `Table`."))
-
-(def ^:private IResolveDefaults
-  {:unresolved-field-id (constantly nil)
-   :fk-field-id         (constantly nil)
-   :resolve-field       (fn [this _] this)
-   :resolve-table       (fn [this _] this)})
-
-(u/strict-extend Object IResolve IResolveDefaults)
-(u/strict-extend nil    IResolve IResolveDefaults)
-
-
-;;; ----------------------------------------------------- FIELD ------------------------------------------------------
-
-(defn- field-unresolved-field-id
-  "Return the ID of a unresolved Fields belonging to a Field. (This means we'll just return the ID of the parent if
-  it's not yet resolved.)"
-  [{:keys [parent parent-id]}]
-  (or (unresolved-field-id parent)
-      (when (instance? FieldPlaceholder parent)
-        parent-id)))
-
-(defn- field-resolve-field
-  "Attempt to resolve the `:parent` of a `Field`, if there is one, if it's a `FieldPlaceholder`."
-  [{:keys [parent parent-id], :as this} field-id->field]
-  (cond
-    parent    (or (when (instance? FieldPlaceholder parent)
-                    (when-let [resolved (resolve-field parent field-id->field)]
-                      (assoc this :parent resolved)))
-                  this)
-    parent-id (assoc this :parent (or (field-id->field parent-id)
-                                      (i/map->FieldPlaceholder {:field-id parent-id})))
-    :else     this))
-
-(defn- field-resolve-table
-  "Resolve the `Table` belonging to a resolved `Field`. (This is only used for resolving Tables referred to via
-  foreign keys.)"
-  [{:keys [table-id fk-field-id field-id], :as this} fk-id+table-id->table]
-  ;; TODO - use schema for this instead of preconditions :D
-  {:pre [(map? fk-id+table-id->table) (every? vector? (keys fk-id+table-id->table))]}
-  (let [table (or (fk-id+table-id->table [fk-field-id table-id])
-                  ;; if we didn't find a matching table check and see whether we're trying to use a field from another
-                  ;; table without wrapping it in an fk-> form
-                  (doseq [[fk table] (keys fk-id+table-id->table)
-                          :when      (and fk (= table table-id))]
-                    (throw (Exception. (format (str "Invalid query: Field %d belongs to table %d. Since %d is not "
-                                                    "the source table, it must be wrapped in a fk-> form, e.g. "
-                                                    "[fk-> %d %d].")
-                                               field-id table-id table-id fk field-id))))
-                  ;; Otherwise, we're using what is most likely an invalid Field ID; complain about it and give a list
-                  ;; of tables that are valid
-                  (throw (Exception. (format (str "Query expansion failed: could not find table %d (FK ID = %d). "
-                                                  "Resolved tables ([fk-id table-id]): %s")
-                                             table-id fk-field-id (keys fk-id+table-id->table)))))]
-    (assoc this
-      :table-name  (:name table)
-      :schema-name (:schema table))))
-
-(u/strict-extend Field
-  IResolve (merge IResolveDefaults
-                  {:unresolved-field-id field-unresolved-field-id
-                   :resolve-field       field-resolve-field
-                   :resolve-table       field-resolve-table}))
-
-
-;;; ----------------------------------------------- FIELD PLACEHOLDER ------------------------------------------------
-
-(defn- resolve-binned-field [binning-strategy binning-param binning-opts field]
-  (i/map->BinnedField (merge {:field    field
-                              :strategy binning-strategy}
-                             binning-opts)))
-
-(defn- merge-non-nils
-  "Like `clojure.core/merge` but only merges non-nil values"
-  [& maps]
-  (apply merge-with #(or %2 %1) maps))
-
-(defn- field-ph-resolve-field
-  "Attempt to resolve the `Field` for a `FieldPlaceholder`. Return a resolved `Field` or `DateTimeField`."
-  [{:keys [field-id datetime-unit binning-strategy binning-param binning-opts], :as this} field-id->field]
-  (if-let [{:keys [base-type special-type], :as field} (some-> (field-id->field field-id)
-                                                               convert-db-field
-                                                               (merge-non-nils (select-keys this [:fk-field-id :remapped-from :remapped-to :field-display-name])))]
-    ;; try to resolve the Field with the ones available in field-id->field
-    (cond
-      (and (or (isa? base-type :type/DateTime)
-               (isa? special-type :type/DateTime))
-           (not (isa? base-type :type/Time)))
-      (i/map->DateTimeField {:field field
-                             :unit  (or datetime-unit :day)}) ; default to `:day` if a unit wasn't specified
-
-      (isa? base-type :type/Time)
-      (i/map->TimeField {:field field})
-
-      binning-strategy
-      (resolve-binned-field binning-strategy binning-param binning-opts field)
-
-      :else field)
-    ;; If that fails just return ourselves as-is
-    this))
-
-(u/strict-extend FieldPlaceholder
-  IResolve (merge IResolveDefaults
-                  {:unresolved-field-id :field-id
-                   :fk-field-id         :fk-field-id
-                   :resolve-field       field-ph-resolve-field}))
-
-
-;;; ----------------------------------------------- VALUE PLACEHOLDER ------------------------------------------------
-
-(defprotocol ^:private IParseValueForField
-  (^:private parse-value [this value]
-   "Parse a value for a given type of `Field`."))
-
-(extend-protocol IParseValueForField
-  Field
-  (parse-value [this value]
-    (s/validate Value (i/map->Value {:field this, :value value})))
-
-  ExpressionRef
-  (parse-value [this value]
-    (s/validate Value (i/map->Value {:field this, :value value})))
-
-  DateTimeField
-  (parse-value [this value]
-    (let [parsed-string-date (some-> value du/->Timestamp)]
-      (cond
-        parsed-string-date
-        (s/validate DateTimeValue (i/map->DateTimeValue {:field this, :value parsed-string-date}))
-
-        (instance? RelativeDatetime value)
-        (do (s/validate RelativeDatetime value)
-            (s/validate RelativeDateTimeValue (i/map->RelativeDateTimeValue
-                                               {:field this, :amount (:amount value), :unit (:unit value)})))
-
-        (nil? value)
-        nil
-
-        :else
-        (throw (Exception. (format "Invalid value '%s': expected a DateTime." value))))))
-
-  TimeField
-  (parse-value [this value]
-    (let [tz-id              ^String (setting/get :report-timezone)
-          tz                 (when tz-id
-                               (TimeZone/getTimeZone tz-id))
-          parsed-string-time (some-> value
-                                     (du/str->time tz))]
-      (cond
-        parsed-string-time
-        (s/validate TimeValue (i/map->TimeValue {:field this, :value parsed-string-time :timezone-id tz-id}))
-
-        (nil? value)
-        nil
-
-        :else
-        (throw (Exception. (format "Invalid value '%s': expected a Time." value)))))))
-
-(defn- value-ph-resolve-field
-  "Attempt to resolve the `Field` for a `ValuePlaceholder`. Return a resolved `Value` or `DateTimeValue`."
-  [{:keys [field-placeholder value]} field-id->field]
-  (let [resolved-field (resolve-field field-placeholder field-id->field)]
-    (when-not resolved-field
-      (throw (Exception. (format "Unable to resolve field: %s" field-placeholder))))
-    (parse-value resolved-field value)))
-
-(u/strict-extend ValuePlaceholder
-  IResolve (merge IResolveDefaults
-                  {:resolve-field value-ph-resolve-field}))
-
-
-;;; ------------------------------------------------------ IMPL ------------------------------------------------------
-
-(defn- collect-ids-with [f expanded-query-dict]
-  (let [ids (transient #{})]
-    (walk/postwalk (fn [form]
-                     (when-let [id (f form)]
-                       (conj! ids id)))
-                   expanded-query-dict)
-    (persistent! ids)))
-
-(def ^:private collect-unresolved-field-ids (partial collect-ids-with unresolved-field-id))
-(def ^:private collect-fk-field-ids         (partial collect-ids-with fk-field-id))
-
-
-(defn- record-fk-field-ids
-  "Record `:fk-field-id` referenced in the Query."
-  [expanded-query-dict]
-  (assoc expanded-query-dict :fk-field-ids (collect-fk-field-ids expanded-query-dict)))
-
-(defn- add-parent-placeholder-if-needed
-  "If `field` has a `:parent-id` add a `FieldPlaceholder` for its parent."
-  [field]
-  (assoc field :parent (when-let [parent-id (:parent-id field)]
-                         (i/map->FieldPlaceholder {:field-id parent-id}))))
-
-(defn- fetch-fields
-  "Fetch a sequence of Fields with appropriate information from the database for the IDs in `field-ids`, excluding
-  'sensitive' Fields. Then hydrate information that will be used by the QP and transform the keys so they're
-  Clojure-style as expected by the rest of the QP code."
-  [field-ids]
-  (as-> (db/select [field/Field :name :display_name :base_type :special_type :visibility_type :table_id :parent_id
-                    :description :id :fingerprint :database_type :settings]
-          :visibility_type [:not= "sensitive"]
-          :id              [:in field-ids]) fields
-    ;; hydrate values & dimensions for the `fields` we just fetched from the DB
-    (hydrate fields :values :dimensions)
-    ;; now for each Field call `rename-mb-field-keys` on it to take underscored DB-style names and replace them with
-    ;; hyphenated Clojure-style names. (I know, this is a questionable step!)
-    (map rename-mb-field-keys fields)
-    ;; now add a FieldPlaceholder for its parent if the Field has a parent-id so it can get resolved on the next pass
-    (map add-parent-placeholder-if-needed fields)))
-
-(defn- resolve-fields
-  "Resolve the `Fields` in an EXPANDED-QUERY-DICT. Record `:table-ids` referenced in the Query."
-  [expanded-query-dict]
-  (loop [max-iterations 5, expanded-query-dict expanded-query-dict]
-    (when (neg? max-iterations)
-      (throw (Exception. "Failed to resolve fields: too many iterations.")))
-    (let [field-ids (collect-unresolved-field-ids expanded-query-dict)]
-      (if-not (seq field-ids)
-        ;; If there are no more Field IDs to resolve we're done.
-        expanded-query-dict
-        ;; Otherwise fetch + resolve the Fields in question
-        (let [fields (fetch-fields field-ids)]
-          (->>
-           ;; Now record the IDs of Tables these fields references in the :table-ids property of the expanded query
-           ;; dict. Those will be used for Table resolution in the next step.
-           (update expanded-query-dict :table-ids set/union (set (map :table-id fields)))
-           ;; Walk the query and resolve all fields
-           (walk/postwalk (u/rpartial resolve-field (u/key-by :field-id fields)))
-           ;; Recurse in case any new (nested) unresolved fields were found.
-           (recur (dec max-iterations))))))))
-
-(defn- fk-field-ids->info
-  "Given a SOURCE-TABLE-ID and collection of FK-FIELD-IDS, return a sequence of maps containing IDs and identifiers
-  for those FK fields and their target tables and fields. FK-FIELD-IDS are IDs of fields that belong to the source
-  table. For example, SOURCE-TABLE-ID might be 'checkins' and FK-FIELD-IDS might have the IDs for 'checkins.user_id'
-  and the like."
-  [source-table-id fk-field-ids]
-  (when (seq fk-field-ids)
-    (db/query {:select    [[:source-fk.name      :source-field-name]
-                           [:source-fk.id        :source-field-id]
-                           [:target-pk.id        :target-field-id]
-                           [:target-pk.name      :target-field-name]
-                           [:target-table.id     :target-table-id]
-                           [:target-table.name   :target-table-name]
-                           [:target-table.schema :target-table-schema]]
-               :from      [[field/Field :source-fk]]
-               :left-join [[field/Field :target-pk] [:= :source-fk.fk_target_field_id :target-pk.id]
-                           [Table :target-table]    [:= :target-pk.table_id :target-table.id]]
-               :where     [:and [:in :source-fk.id       (set fk-field-ids)]
-                                [:=  :source-fk.table_id source-table-id]
-                                (mdb/isa :source-fk.special_type :type/FK)]})))
-
-(defn- fk-field-ids->joined-tables
-  "Fetch info for PK/FK `Fields` for the JOIN-TABLES referenced in a Query."
-  [source-table-id fk-field-ids]
-  (when (seq fk-field-ids)
-    (vec (for [{:keys [source-field-name source-field-id target-field-id target-field-name target-table-id
-                       target-table-name target-table-schema]} (fk-field-ids->info source-table-id fk-field-ids)]
-           (i/map->JoinTable {:table-id     target-table-id
-                              :table-name   target-table-name
-                              :schema       target-table-schema
-                              :pk-field     (i/map->JoinTableField {:field-id   target-field-id
-                                                                    :field-name target-field-name})
-                              :source-field (i/map->JoinTableField {:field-id   source-field-id
-                                                                    :field-name source-field-name})
-                              ;; some DBs like Oracle limit the length of identifiers to 30 characters so only take
-                              ;; the first 30 here
-                              :join-alias  (apply str (take 30 (str target-table-name "__via__" source-field-name)))})))))
-
-(defn- create-fk-id+table-id->table
-  "Create the `fk-id+table-id->table` map used in resolving table names in `resolve-table` calls"
-  [{source-table-id :id :as source-table} joined-tables]
-  (into {[nil source-table-id] source-table}
-        (for [{:keys [source-field table-id join-alias]} joined-tables]
-          {[(:field-id source-field) table-id] {:name join-alias
-                                                :id   table-id}})))
-
-(defn- append-new-fields
-  "Returns a vector fields that have all `existing-fields` and any field in `new-fields` not already found in
-  `existing-fields`"
-  [existing-fields new-fields]
-  (let [existing-field-names (set (map name existing-fields))]
-    (vec (concat existing-fields
-                 (remove (comp existing-field-names name) new-fields)))))
-
-;; Needed as `resolve-tables-in-nested-query` and `resolved-tables` are mutually recursive
-(declare resolve-tables)
-
-(defn- resolve-tables-in-nested-query
-  "This function is pull up a nested query found in `expanded-query-dict` and run it through
-  `resolve-tables`. Unfortunately our work isn't done there. If `expanded-query-dict` has a breakout that refers to a
-  column from the nested query we will need to resolve the fields in that breakout after the nested query has been
-  resolved. More comments in-line that breakout the work for that."
-  [{{:keys [source-query]} :query, :as expanded-query-dict}]
-  ;; No need to try and resolve a nested native query
-  (if (:native source-query)
-    expanded-query-dict
-    (let [ ;; Resolve the nested query as if it were a top level query
-          {nested-inner :query, :as nested-outer} (resolve-tables (assoc expanded-query-dict :query source-query))
-          nested-source-table-id                  (:source-table nested-inner)
-          ;; Build a list of join tables found from the newly resolved nested query
-          nested-joined-tables                    (fk-field-ids->joined-tables nested-source-table-id
-                                                                               (:fk-field-ids nested-outer))
-          ;; Create the map of fk to table info from the resolved nested query
-          fk-id+table-id->table                   (create-fk-id+table-id->table (some-> nested-source-table-id qp.store/table)
-                                                                                nested-joined-tables)
-          ;; Resolve the top level (original) breakout fields with the join information from the resolved nested query
-          resolved-breakout                       (for [breakout (get-in expanded-query-dict [:query :breakout])]
-                                                    (resolve-table breakout fk-id+table-id->table))]
-      (assoc-in expanded-query-dict [:query :source-query]
-                (if (and (contains? nested-inner :fields)
-                         (seq resolved-breakout))
-                  (update nested-inner :fields append-new-fields resolved-breakout)
-                  nested-inner)))))
-
-(defn- resolve-tables
-  "Resolve the `Tables` in an EXPANDED-QUERY-DICT."
-  [{:keys [fk-field-ids], {source-table-id :source-table} :query, :as expanded-query-dict}]
-  (if-not source-table-id
-    ;; if we have a `source-query`, recurse and resolve tables in that
-    (resolve-tables-in-nested-query expanded-query-dict)
-    ;; otherwise we can resolve tables in the (current) top-level
-    (let [joined-tables         (fk-field-ids->joined-tables source-table-id fk-field-ids)
-          fk-id+table-id->table (create-fk-id+table-id->table (qp.store/table source-table-id) joined-tables)]
-      (as-> expanded-query-dict <>
-        (assoc-in <> [:query :join-tables]  joined-tables)
-        (walk/postwalk #(resolve-table % fk-id+table-id->table) <>)))))
-
-(defn- resolve-field-literals
-  "When resolving a field, we connect a `field-id` with a `Field` in our metadata tables. This is a similar process
-  for `FieldLiteral`s, except we are attempting to connect a `FieldLiteral` with an associated entry in the
-  `source-metadata` attached to the query (typically from the `Card` of a nested query)."
-  [{:keys [source-metadata] :as expanded-query-dict}]
-  (let [name->fingerprint (zipmap (map :name source-metadata)
-                                  (map :fingerprint source-metadata))]
-    (qputil/postwalk-pred #(instance? FieldLiteral %)
-                          (fn [{:keys [binning-strategy binning-param binning-opts] :as node}]
-                            (let [fingerprint     (get name->fingerprint (:field-name node))
-                                  node-with-print (assoc node :fingerprint fingerprint)]
-                              (cond
-                                ;; We can't bin without min/max values found from a fingerprint
-                                (and binning-strategy (not fingerprint))
-                                (throw (Exception. "Binning not supported on a field literal with no fingerprint"))
-
-                                (and fingerprint binning-strategy)
-                                (resolve-binned-field binning-strategy binning-param binning-opts node-with-print)
-
-                                :else
-                                node-with-print)))
-                          expanded-query-dict)))
-
-
-;;; ------------------------------------------------ PUBLIC INTERFACE ------------------------------------------------
-
-(defn ^:deprecated resolve-fields-if-needed
-  "Resolves any unresolved fields found in `fields`. Will just return resolved fields with no changes."
-  [fields]
-  (let [fields-to-resolve (map unresolved-field-id fields)]
-    (if-let [field-id->field (and (seq fields-to-resolve)
-                                  (u/key-by :field-id (fetch-fields fields-to-resolve)))]
-      (map #(resolve-field % field-id->field) fields)
-      fields)))
-
-(s/defn ^:deprecated resolve :- su/Map
-  "Resolve placeholders by fetching `Fields`, `Databases`, and `Tables` that are referred to in EXPANDED-QUERY-DICT."
-  [expanded-query-dict :- su/Map]
-  (some-> expanded-query-dict
-          record-fk-field-ids
-          resolve-fields
-          resolve-field-literals
-          resolve-tables))
-
-(defn ^:deprecated resolve-middleware
-  "Wraps the `resolve` function in a query-processor middleware"
-  [qp]
-  (fn [{database-id :database, query-type :type, :as query}]
-    (let [resolved-db (db/select-one [Database :name :id :engine :details :timezone], :id database-id)
-          query       (if (= query-type :query)
-                        (resolve query)
-                        query)]
-      (qp (assoc query :database resolved-db)))))
diff --git a/src/metabase/query_processor/middleware/resolve_database.clj b/src/metabase/query_processor/middleware/resolve_database.clj
new file mode 100644
index 0000000000000000000000000000000000000000..ab439c1711db457bf4347c11638f7dd6de6e7f03
--- /dev/null
+++ b/src/metabase/query_processor/middleware/resolve_database.clj
@@ -0,0 +1,19 @@
+(ns metabase.query-processor.middleware.resolve-database
+  (:require [metabase.util :as u]
+            [metabase.query-processor.store :as qp.store]
+            [metabase.models.database :as database :refer [Database]]
+            [metabase.util.i18n :refer [tru]]
+            [toucan.db :as db]))
+
+(defn- resolve-database* [{database-id :database, :as query}]
+  (u/prog1 query
+    (when-not (= database-id database/virtual-id)
+      (qp.store/store-database! (or (db/select-one (apply vector Database qp.store/database-columns-to-fetch)
+                                      :id (u/get-id database-id))
+                                    (throw (Exception. (str (tru "Database {0} does not exist." database-id)))))))))
+
+(defn resolve-database
+  "Middleware that resolves the Database referenced by the query under that `:database` key and stores it in the QP
+  Store."
+  [qp]
+  (comp qp resolve-database*))
diff --git a/src/metabase/query_processor/middleware/resolve_fields.clj b/src/metabase/query_processor/middleware/resolve_fields.clj
index 129c39e4a5b030db5fc78050f8b9d1415ef84642..c9208a9d44825107bfb11c75130ad3fb9e8eb06c 100644
--- a/src/metabase/query_processor/middleware/resolve_fields.clj
+++ b/src/metabase/query_processor/middleware/resolve_fields.clj
@@ -6,13 +6,20 @@
             [metabase.util :as u]
             [toucan.db :as db]))
 
-(defn- resolve-fields* [{mbql-inner-query :query, :as query}]
+(defn- resolve-fields-with-ids! [field-ids]
+  (let [fetched-fields (db/select (apply vector Field qp.store/field-columns-to-fetch) :id [:in (set field-ids)])]
+    ;; store the new fields
+    (doseq [field fetched-fields]
+      (qp.store/store-field! field))
+    ;; now recursively fetch parents if needed
+    (when-let [parent-ids (seq (filter some? (map :parent_id fetched-fields)))]
+      (recur parent-ids))))
+
+(defn- resolve-fields* [query]
   (u/prog1 query
-    (when-let [field-ids (seq (map second (mbql.u/clause-instances :field-id mbql-inner-query)))]
-      ;; Just fetch the entire object for right now. We can pare this down at a later date
-      ;; TODO - figure out which Fields we need and only fetch those
-      (doseq [field (db/select Field :id [:in (set field-ids)])]
-        (qp.store/store-field! field)))))
+    (when-let [field-ids (seq (remove (qp.store/already-fetched-field-ids)
+                                      (mbql.u/match (:query query) [:field-id id] id)))]
+      (resolve-fields-with-ids! field-ids))))
 
 (defn resolve-fields
   "Fetch the Fields referenced by `:field-id` clauses in a query and store them in the Query Processor Store for the
diff --git a/src/metabase/query_processor/middleware/resolve_joined_tables.clj b/src/metabase/query_processor/middleware/resolve_joined_tables.clj
new file mode 100644
index 0000000000000000000000000000000000000000..00e2b4bf86d6702f7c583c31126e66cfc9407af4
--- /dev/null
+++ b/src/metabase/query_processor/middleware/resolve_joined_tables.clj
@@ -0,0 +1,126 @@
+(ns metabase.query-processor.middleware.resolve-joined-tables
+  "Middleware that fetches tables that will need to be joined, referred to by `fk->` clauses, and adds information to
+  the query about what joins should be done and how they should be performed."
+  (:require [metabase.db :as mdb]
+            [metabase.mbql
+             [schema :as mbql.s]
+             [util :as mbql.u]]
+            [metabase.models
+             [field :refer [Field]]
+             [table :refer [Table]]]
+            [metabase.query-processor.store :as qp.store]
+            [metabase.util.schema :as su]
+            [schema.core :as s]
+            [toucan.db :as db]
+            [metabase.driver :as driver]
+            [metabase.query-processor.interface :as qp.i]))
+
+(defn- both-args-are-field-id-clauses? [[_ x y]]
+  (and
+   (mbql.u/is-clause? :field-id x)
+   (mbql.u/is-clause? :field-id y)))
+
+(def ^:private FKClauseWithFieldIDArgs
+  (s/constrained mbql.s/fk-> both-args-are-field-id-clauses? "fk-> clause where both args are field-id clauses"))
+
+
+;;; ------------------------------------------------ Fetching PK Info ------------------------------------------------
+
+(def ^:private PKInfo
+  [{:fk-id     su/IntGreaterThanZero
+    :pk-id    su/IntGreaterThanZero
+    :table-id su/IntGreaterThanZero}])
+
+(s/defn ^:private fk-clauses->pk-info :- PKInfo
+  "Given a `source-table-id` and collection of `fk-field-ids`, return a sequence of maps containing IDs and identifiers
+  for those FK fields and their target tables and fields. `fk-field-ids` are IDs of fields that belong to the source
+  table. For example, `source-table-id` might be 'checkins' and `fk-field-ids` might have the IDs for 'checkins.user_id'
+  and the like."
+  [source-table-id :- su/IntGreaterThanZero, fk-clauses :- [FKClauseWithFieldIDArgs]]
+  (let [fk-field-ids (set (for [[_ [_ source-id]] fk-clauses]
+                            source-id))]
+    (when (seq fk-field-ids)
+      (db/query {:select    [[:source-fk.id    :fk-id]
+                             [:target-pk.id    :pk-id]
+                             [:target-table.id :table-id]]
+                 :from      [[Field :source-fk]]
+                 :left-join [[Field :target-pk]    [:= :source-fk.fk_target_field_id :target-pk.id]
+                             [Table :target-table] [:= :target-pk.table_id :target-table.id]]
+                 :where     [:and
+                             [:in :source-fk.id (set fk-field-ids)]
+                             [:=  :source-fk.table_id source-table-id]
+                             (mdb/isa :source-fk.special_type :type/FK)]}))))
+
+
+;;; -------------------------------- Fetching join Tables & adding them to the Store  --------------------------------
+
+(s/defn ^:private fks->dest-table-ids :- #{su/IntGreaterThanZero}
+  [fk-clauses :- [FKClauseWithFieldIDArgs]]
+  (set (for [[_ _ [_ dest-id]] fk-clauses]
+         (:table_id (qp.store/field dest-id)))))
+
+(s/defn ^:private store-join-tables! [fk-clauses :- [FKClauseWithFieldIDArgs]]
+  (let [table-ids-to-fetch (fks->dest-table-ids fk-clauses)]
+    (when (seq table-ids-to-fetch)
+      (doseq [table (db/select (vec (cons Table qp.store/table-columns-to-fetch)), :id [:in table-ids-to-fetch])]
+        (qp.store/store-table! table)))))
+
+
+;;; ------------------------------------ Adding join Table PK fields to the Store ------------------------------------
+
+(s/defn ^:private store-join-table-pk-fields!
+  [pk-info :- PKInfo]
+  (let [pk-field-ids (set (map :pk-id pk-info))
+        pk-fields    (when (seq pk-field-ids)
+                       (db/select (vec (cons Field qp.store/field-columns-to-fetch)) :id [:in pk-field-ids]))]
+    (doseq [field pk-fields]
+      (qp.store/store-field! field))))
+
+
+;;; -------------------------------------- Adding :join-tables key to the query --------------------------------------
+
+(s/defn fks->join-information :- [mbql.s/JoinTableInfo]
+  [fk-clauses :- [FKClauseWithFieldIDArgs], pk-info :- PKInfo]
+  (distinct
+   (for [[_ [_ source-id] [_ dest-id]] fk-clauses
+         :let [source-field (qp.store/field source-id)
+               dest-field   (qp.store/field dest-id)
+               table-id     (:table_id dest-field)
+               table        (qp.store/table table-id)
+               pk-id        (some (fn [info]
+                                    (when (and (= (:table-id info) table-id)
+                                               (= (:fk-id info) source-id))
+                                      (:pk-id info)))
+                                  pk-info)]]
+     ;; some DBs like Oracle limit the length of identifiers to 30 characters so only take
+     ;; the first 30 here
+     {:join-alias  (apply str (take 30 (str (:name table) "__via__" (:name source-field))))
+      :table-id    table-id
+      :fk-field-id source-id
+      :pk-field-id pk-id})))
+
+(s/defn ^:private add-join-info-to-query :- mbql.s/Query
+  [query fk-clauses pk-info]
+  (assoc-in query [:query :join-tables] (fks->join-information fk-clauses pk-info)))
+
+
+;;; -------------------------------------------- PUTTING it all together ---------------------------------------------
+
+(defn- resolve-joined-tables* [query]
+  (if (and qp.i/*driver* (not (driver/driver-supports? qp.i/*driver* :foreign-keys)))
+    query
+    (let [source-table-id (mbql.u/query->source-table-id query)
+          fk-clauses      (mbql.u/match (:query query) [:fk-> [:field-id _] [:field-id _]])]
+      (if-not (and (seq fk-clauses) source-table-id)
+        query
+        (let [pk-info (fk-clauses->pk-info source-table-id fk-clauses)]
+          (store-join-tables! fk-clauses)
+          (store-join-table-pk-fields! pk-info)
+          (add-join-info-to-query query fk-clauses pk-info))))))
+
+(defn resolve-joined-tables
+  "Fetch and store any Tables other than the source Table referred to by `fk->` clauses in an MBQL query, and add a
+  `:join-tables` key inside the MBQL inner query dictionary containing information about the `JOIN`s (or equivalent)
+  that need to be performed for these tables."
+  [qp]
+  (comp qp resolve-joined-tables*))
diff --git a/src/metabase/query_processor/middleware/source_table.clj b/src/metabase/query_processor/middleware/resolve_source_table.clj
similarity index 66%
rename from src/metabase/query_processor/middleware/source_table.clj
rename to src/metabase/query_processor/middleware/resolve_source_table.clj
index 76908209eaa62c76fb75a6b0433ea8649fa53aca..05a95efc58f47a836951d730c8ab97a0eafc369a 100644
--- a/src/metabase/query_processor/middleware/source_table.clj
+++ b/src/metabase/query_processor/middleware/resolve_source_table.clj
@@ -1,20 +1,21 @@
-(ns metabase.query-processor.middleware.source-table
+(ns metabase.query-processor.middleware.resolve-source-table
+  "TODO - maybe rename this `resolve-source-table` so it matches the other ones"
   (:require [metabase.mbql.util :as mbql.u]
             [metabase.models.table :refer [Table]]
             [metabase.query-processor.store :as qp.store]
             [metabase.util.i18n :refer [trs]]
             [toucan.db :as db]))
 
-(defn- resolve-source-table [query]
+(defn- resolve-source-table* [query]
   (when-let [source-table-id (mbql.u/query->source-table-id query)]
-    (let [source-table (or (db/select-one [Table :schema :name :id], :id source-table-id)
+    (let [source-table (or (db/select-one (vec (cons Table qp.store/table-columns-to-fetch)), :id source-table-id)
                            (throw (Exception. (str (trs "Cannot run query: could not find source table {0}."
                                                         source-table-id)))))]
       (qp.store/store-table! source-table)))
   query)
 
-(defn resolve-source-table-middleware
+(defn resolve-source-table
   "Middleware that will take the source-table (an integer) and hydrate that source table from the the database and
   attach it as `:source-table`"
   [qp]
-  (comp qp resolve-source-table))
+  (comp qp resolve-source-table*))
diff --git a/src/metabase/query_processor/middleware/results_metadata.clj b/src/metabase/query_processor/middleware/results_metadata.clj
index eab4f8a48889f04e2d7beacccb8c62be32c2ecdc..8e76f3e80f4cd60d45a78fff02ea7f8ff1fddcd2 100644
--- a/src/metabase/query_processor/middleware/results_metadata.clj
+++ b/src/metabase/query_processor/middleware/results_metadata.clj
@@ -5,14 +5,15 @@
   (:require [buddy.core.hash :as hash]
             [cheshire.core :as json]
             [clojure.tools.logging :as log]
-            [metabase.query-processor.interface :as i]
+            [metabase.driver :as driver]
+            [metabase.query-processor.interface :as qp.i]
             [metabase.sync.analyze.query-results :as qr]
-            [metabase.util :as u]
             [metabase.util
              [encryption :as encryption]
              [i18n :refer [tru]]]
             [ring.util.codec :as codec]
-            [toucan.db :as db]))
+            [toucan.db :as db]
+            [metabase.util :as u]))
 
 ;; TODO - is there some way we could avoid doing this every single time a Card is ran? Perhaps by passing the current
 ;; Card metadata as part of the query context so we can compare for changes
@@ -57,7 +58,8 @@
           (let [{:keys [metadata insights]} (qr/results->column-metadata results)]
             ;; At the very least we can skip the Extra DB call to update this Card's metadata results
             ;; if its DB doesn't support nested queries in the first place
-            (when (and (i/driver-supports? :nested-queries)
+            (when (and qp.i/*driver*
+                       (driver/driver-supports? qp.i/*driver* :nested-queries)
                        card-id
                        (not nested?))
               (record-metadata! card-id metadata))
@@ -70,6 +72,8 @@
           ;; rather than failing the entire query
           (catch Throwable e
             (log/error (tru "Error recording results metadata for query:")
-                       (.getMessage e) "\n"
+                       "\n"
+                       (class e) (.getMessage e)
+                       "\n"
                        (u/pprint-to-str (u/filtered-stacktrace e)))
             results))))))
diff --git a/src/metabase/query_processor/middleware/wrap_value_literals.clj b/src/metabase/query_processor/middleware/wrap_value_literals.clj
new file mode 100644
index 0000000000000000000000000000000000000000..602d1856dae98eb24cfa33a5df5f979e570a8c08
--- /dev/null
+++ b/src/metabase/query_processor/middleware/wrap_value_literals.clj
@@ -0,0 +1,113 @@
+(ns metabase.query-processor.middleware.wrap-value-literals
+  (:require [metabase.driver :as driver]
+            [metabase.mbql
+             [predicates :as mbql.preds]
+             [schema :as mbql.s]
+             [util :as mbql.u]]
+            [metabase.models.field :refer [Field]]
+            [metabase.query-processor.store :as qp.store]
+            [metabase.util.date :as du])
+  (:import java.util.TimeZone))
+
+;;; --------------------------------------------------- Type Info ----------------------------------------------------
+
+(defmulti ^:private ^{:doc (str "Get information about database, base, and special types for an object. This is passed "
+                                "to along to various `->honeysql` method implementations so drivers have the "
+                                "information they need to handle raw values like Strings, which may need to be parsed "
+                                "as a certain type.")}
+  type-info
+  mbql.u/dispatch-by-clause-name-or-class)
+
+(defmethod type-info :default [_] nil)
+
+(defmethod type-info (class Field) [this]
+  (let [field-info (select-keys this [:base_type :special_type :database_type])]
+    (merge
+     field-info
+     ;; add in a default unit for this Field so we know to wrap datetime strings in `absolute-datetime` below based on
+     ;; its presence. It will get replaced by `:datetime-field` unit if we're wrapped by one
+     (when (mbql.u/datetime-field? field-info)
+       {:unit :default}))))
+
+(defmethod type-info :field-id [[_ field-id]]
+  (type-info (qp.store/field field-id)))
+
+(defmethod type-info :fk-> [[_ _ dest-field]]
+  (type-info dest-field))
+
+(defmethod type-info :datetime-field [[_ field unit]]
+  (assoc (type-info field) :unit unit))
+
+
+;;; ------------------------------------------------- add-type-info --------------------------------------------------
+
+(defmulti ^:private add-type-info (fn [x info & [{:keys [parse-datetime-strings?]}]]
+                                    (class x)))
+
+(defmethod add-type-info nil [_ info & _]
+  [:value nil info])
+
+(defmethod add-type-info Object [this info & _]
+  [:value this info])
+
+(defmethod add-type-info java.util.Date [this info & _]
+  [:absolute-datetime (du/->Timestamp this) (get info :unit :default)])
+
+(defmethod add-type-info java.sql.Time [this info & _]
+  [:time this (get info :unit :default)])
+
+(defmethod add-type-info java.sql.Timestamp [this info & _]
+  [:absolute-datetime this (get info :unit :default)])
+
+(defn- maybe-parse-as-time [datetime-str unit]
+  (when (mbql.preds/TimeUnit? unit)
+    (du/str->time datetime-str (when-let [report-timezone (driver/report-timezone)]
+                                 (TimeZone/getTimeZone ^String report-timezone)))))
+
+(defmethod add-type-info String [this info & [{:keys [parse-datetime-strings?]
+                                               :or   {parse-datetime-strings? true}}]]
+  (if-let [unit (when (and (du/date-string? this)
+                           parse-datetime-strings?)
+                  (:unit info))]
+    ;; should use report timezone by default
+    (if-let [time (maybe-parse-as-time this unit)]
+      [:time time unit]
+      [:absolute-datetime (du/->Timestamp this) unit])
+    [:value this info]))
+
+
+;;; -------------------------------------------- wrap-literals-in-clause ---------------------------------------------
+
+(def ^:private raw-value? (complement mbql.u/mbql-clause?))
+
+(defn ^:private wrap-value-literals-in-mbql-query
+  [{:keys [source-query], :as inner-query}]
+  (let [inner-query (cond-> inner-query
+                      source-query (update :source-query wrap-value-literals-in-mbql-query))]
+    (mbql.u/replace-in inner-query [:filter]
+      [(clause :guard #{:= :!= :< :> :<= :>=}) field (x :guard raw-value?)]
+      [clause field (add-type-info x (type-info field))]
+
+      [:between field (min-val :guard raw-value?) (max-val :guard raw-value?)]
+      [:between
+       field
+       (add-type-info min-val (type-info field))
+       (add-type-info max-val (type-info field))]
+
+      [(clause :guard #{:starts-with :ends-with :contains}) field (s :guard string?) & more]
+      (apply vector clause field (add-type-info s (type-info field) {:parse-datetime-strings? false}) more))))
+
+(defn- wrap-value-literals*
+  [{query-type :type, :as query}]
+  (if-not (= query-type :query)
+    query
+    (mbql.s/validate-query
+     (update query :query wrap-value-literals-in-mbql-query))))
+
+(defn wrap-value-literals
+  "Middleware that wraps ran value literals in `:value` (for integers, strings, etc.) or `:absolute-datetime` (for
+  datetime strings, etc.) clauses which include info about the Field they are being compared to. This is done mostly
+  to make it easier for drivers to write implementations that rely on multimethod dispatch (by clause name) -- they
+  can dispatch directly off of these clauses."
+  [qp]
+  (comp qp wrap-value-literals*))
diff --git a/src/metabase/query_processor/sort.clj b/src/metabase/query_processor/sort.clj
deleted file mode 100644
index 7c8b6c2cef46ae6ccf0b383e271fd2a92ce48bdd..0000000000000000000000000000000000000000
--- a/src/metabase/query_processor/sort.clj
+++ /dev/null
@@ -1,93 +0,0 @@
-(ns metabase.query-processor.sort
-  "Code for determining the order columns should be returned in from query results."
-  (:require [clojure.tools.logging :as log]
-            [metabase.query-processor.interface :as i]
-            [metabase.util :as u]))
-
-;; TODO - shouldn't this be `metabase.query-processor.middleware.sort` ??
-
-;; Fields should be returned in the following order:
-;; 1.  Breakout Fields
-;;
-;; 2.  Aggregation Fields (e.g. sum, count)
-;;
-;; 3.  Fields clause Fields, if they were added explicitly
-;;
-;; 4.  All other Fields, sorted by:
-;;     A.  :position (ascending)
-;;         Users can manually specify default Field ordering for a Table in the Metadata admin. In that case, return
-;;         Fields in the specified order; most of the time they'll have the default value of 0, in which case we'll
-;;         compare...
-;;
-;;     B.  :special_type "group" -- :id Fields, then :name Fields, then everyting else
-;;         Attempt to put the most relevant Fields first. Order the Fields as follows:
-;;         1.  :id Fields
-;;         2.  :name Fields
-;;         3.  all other Fields
-;;
-;;     C.  Field Name
-;;         When two Fields have the same :position and :special_type "group", fall back to sorting Fields
-;;         alphabetically by name. This is arbitrary, but it makes the QP deterministic by keeping the results in a
-;;         consistent order, which makes it testable.
-
-;;; ## Field Sorting
-
-;; We sort Fields with a "importance" vector like [source-importance position special-type-importance name]
-
-;; THE FOLLOWING FUNCTIONS ARE DEPRECATED: THEY WILL BE REMOVED IN A FUTURE RELEASE.
-;;
-;; We plan to move towards a pattern of figuring out sort order *before* queries are ran, rather than after.
-
-(defn- ^:deprecated source-importance-fn
-  "Create a function to return a importance for FIELD based on its source clause in the query.
-   e.g. if a Field comes from a `:breakout` clause, we should return that column first in the results."
-  [{:keys [fields-is-implicit]}]
-  (fn [{:keys [source]}]
-    (cond
-      (= source :breakout)          :0-breakout
-      (= source :aggregation)       :1-aggregation
-      (and (not fields-is-implicit)
-           (= source :fields))      :2-fields
-      :else                         :3-other)))
-
-(defn- ^:deprecated special-type-importance
-  "Return a importance for FIELD based on the relative importance of its `:special-type`.
-   i.e. a Field with special type `:id` should be sorted ahead of all other Fields in the results."
-  [{:keys [special-type]}]
-  (cond
-    (isa? special-type :type/PK)   :0-id
-    (isa? special-type :type/Name) :1-name
-    :else                          :2-other))
-
-(defn- ^:deprecated field-importance-fn
-  "Create a function to return an \"importance\" vector for use in sorting FIELD."
-  [query]
-  (let [source-importance (source-importance-fn query)]
-    (fn [{:keys [position clause-position field-name source], :as field}]
-      [(source-importance field)
-       (or position
-           (when (contains? #{:fields :breakout} source)
-             clause-position)
-           Integer/MAX_VALUE)
-       (special-type-importance field)
-       field-name])))
-
-(defn- ^:deprecated should-sort? [inner-query]
-  (or
-   ;; if there's no source query then always sort
-   (not (:source-query inner-query))
-   ;; if the source query is MBQL then sort
-   (not (get-in inner-query [:source-query :native]))
-   ;; otherwise only sort queries with *NATIVE* source queries if the query has an aggregation and/or breakout
-   (:aggregation inner-query)
-   (:breakout inner-query)))
-
-(defn ^:deprecated sort-fields
-  "Sort FIELDS by their \"importance\" vectors."
-  [inner-query fields]
-  (if-not (should-sort? inner-query)
-    fields
-    (let [field-importance (field-importance-fn inner-query)]
-      (when-not i/*disable-qp-logging*
-        (log/debug (u/format-color 'yellow "Sorted fields:\n%s" (u/pprint-to-str (sort (map field-importance fields))))))
-      (sort-by field-importance fields))))
diff --git a/src/metabase/query_processor/store.clj b/src/metabase/query_processor/store.clj
index 758db871040b44110d5d18b8012dc6c070fbb715..853014a6ef3647132691d4eb17759167951db867 100644
--- a/src/metabase/query_processor/store.clj
+++ b/src/metabase/query_processor/store.clj
@@ -13,8 +13,10 @@
   but fetching all Fields in a single pass and storing them for reuse is dramatically more efficient than fetching
   those Fields potentially dozens of times in a single query execution."
   (:require [metabase.models
+             [database :refer [Database]]
              [field :refer [Field]]
              [table :refer [Table]]]
+            [metabase.util.i18n :refer [tru]]
             [metabase.util :as u]
             [metabase.util.schema :as su]
             [schema.core :as s]))
@@ -23,9 +25,9 @@
 
 (def ^:private ^:dynamic *store*
   "Dynamic var used as the QP store for a given query execution."
-  (atom nil))
+  (delay (throw (Exception. (str (tru "Error: Query Processor store is not initialized."))))))
 
-(defn do-with-store
+(defn do-with-new-store
   "Execute `f` with a freshly-bound `*store*`."
   [f]
   (binding [*store* (atom {})]
@@ -36,54 +38,142 @@
   each query execution; you should have no need to use this macro yourself outside of that namespace."
   {:style/indent 0}
   [& body]
-  `(do-with-store (fn [] ~@body)))
+  `(do-with-new-store (fn [] ~@body)))
 
-;; TODO - DATABASE ??
+(defn do-with-pushed-store
+  "Execute bind a *copy* of the current store and execute `f`."
+  [f]
+  (binding [*store* (atom @*store*)]
+    (f)))
+
+(defmacro with-pushed-store
+  "Bind a temporary copy of the current store (presumably so you can make temporary changes) for the duration of `body`.
+  All changes to this 'pushed' copy will be discarded after the duration of `body`.
+
+  This is used to make it easily to write downstream clause-handling functions in driver QP implementations without
+  needing to code them in a way where they are explicitly aware of the context in which they are called. For example,
+  we use this to temporarily give Tables a different `:name` in the SQL QP when we need to use an alias for them in
+  `fk->` forms.
+
+  Pushing stores is cumulative: nesting a `with-pushed-store` form inside another will make a copy of the copy.
+
+    (with-pushed-store
+      ;; store is now a temporary copy of original
+      (store-table! (assoc (table table-id) :name \"Temporary New Name\"))
+      (with-pushed-store
+        ;; store is now a temporary copy of the copy
+        (:name (table table-id)) ; -> \"Temporary New Name\"
+        ...)
+    ...)
+    (:name (table table-id)) ; -> \"Original Name\""
+  {:style/indent 0}
+  [& body]
+  `(do-with-pushed-store (fn [] ~@body)))
+
+(def database-columns-to-fetch
+  "Columns you should fetch for the Database referenced by the query before stashing in the store."
+  [:id
+   :engine
+   :name
+   :details])
+
+(def ^:private DatabaseInstanceWithRequiredStoreKeys
+  (s/both
+   (class Database)
+   {:id      su/IntGreaterThanZero
+    :engine  s/Keyword
+    :name    su/NonBlankString
+    :details su/Map
+    s/Any    s/Any}))
+
+(def table-columns-to-fetch
+  "Columns you should fetch for any Table you want to stash in the Store."
+  [:id
+   :name
+   :schema])
 
 (def ^:private TableInstanceWithRequiredStoreKeys
   (s/both
    (class Table)
-   {:id     su/IntGreaterThanZero ; TODO - what's the point of storing ID if it's already the key?
-    :schema (s/maybe s/Str)
+   {:schema (s/maybe s/Str)
     :name   su/NonBlankString
-    s/Any s/Any}))
+    s/Any   s/Any}))
+
+
+(def field-columns-to-fetch
+  "Columns to fetch for and Field you want to stash in the Store. These get returned as part of the `:cols` metadata in
+  query results. Try to keep this set pared down to just what's needed by the QP and frontend, since it has to be done
+  for every MBQL query."
+  [:base_type
+   :database_type
+   :description
+   :display_name
+   :fingerprint
+   :id
+   :name
+   :parent_id
+   :settings
+   :special_type
+   :table_id
+   :visibility_type])
 
 (def ^:private FieldInstanceWithRequiredStorekeys
   (s/both
    (class Field)
-   {:id           su/IntGreaterThanZero
-    :name         su/NonBlankString
-    :display_name su/NonBlankString
-    :description  (s/maybe s/Str)
-    :base_type    su/FieldType
-    :special_type (s/maybe su/FieldType)
-    :fingerprint  (s/maybe su/Map)
-    s/Any         s/Any}))
+   {:name          su/NonBlankString
+    :display_name  su/NonBlankString
+    :description   (s/maybe s/Str)
+    :database_type su/NonBlankString
+    :base_type     su/FieldType
+    :special_type  (s/maybe su/FieldType)
+    :fingerprint   (s/maybe su/Map)
+    :parent_id     (s/maybe su/IntGreaterThanZero)
+    s/Any          s/Any}))
 
 
 ;;; ------------------------------------------ Saving objects in the Store -------------------------------------------
 
+(s/defn store-database!
+  "Store the Database referenced by this query for the duration of the current query execution. Throws an Exception if
+  database is invalid or doesn't have all the required keys."
+  [database :- DatabaseInstanceWithRequiredStoreKeys]
+  (swap! *store* assoc :database database))
+
 (s/defn store-table!
-  "Store a `table` in the QP Store for the duration of the current query execution. Throws an Exception if Table is
+  "Store a `table` in the QP Store for the duration of the current query execution. Throws an Exception if table is
   invalid or doesn't have all required keys."
   [table :- TableInstanceWithRequiredStoreKeys]
   (swap! *store* assoc-in [:tables (u/get-id table)] table))
 
 (s/defn store-field!
-  "Store a `field` in the QP Store for the duration of the current query execution. Throws an Exception if Field is
+  "Store a `field` in the QP Store for the duration of the current query execution. Throws an Exception if field is
   invalid or doesn't have all required keys."
   [field :- FieldInstanceWithRequiredStorekeys]
   (swap! *store* assoc-in [:fields (u/get-id field)] field))
 
+(s/defn already-fetched-field-ids :- #{su/IntGreaterThanZero}
+  "Get a set of all the IDs of Fields that have already been fetched -- which means you don't have to do it again."
+  []
+  (set (keys (:fields @*store*))))
+
 
 ;;; ---------------------------------------- Fetching objects from the Store -----------------------------------------
 
+(s/defn database :- DatabaseInstanceWithRequiredStoreKeys
+  "Fetch the Database referenced by the current query from the QP Store. Throws an Exception if valid item is not
+  returned."
+  []
+  (or (:database @*store*)
+      (throw (Exception. (str (tru "Error: Database is not present in the Query Processor Store."))))))
+
 (s/defn table :- TableInstanceWithRequiredStoreKeys
   "Fetch Table with `table-id` from the QP Store. Throws an Exception if valid item is not returned."
   [table-id :- su/IntGreaterThanZero]
-  (get-in @*store* [:tables table-id]))
+  (or (get-in @*store* [:tables table-id])
+      (throw (Exception. (str (tru "Error: Table {0} is not present in the Query Processor Store." table-id))))))
 
 (s/defn field :- FieldInstanceWithRequiredStorekeys
   "Fetch Field with `field-id` from the QP Store. Throws an Exception if valid item is not returned."
   [field-id :- su/IntGreaterThanZero]
-  (get-in @*store* [:fields field-id]))
+  (or (get-in @*store* [:fields field-id])
+      (throw (Exception. (str (tru "Error: Field {0} is not present in the Query Processor Store." field-id))))))
diff --git a/src/metabase/query_processor/util.clj b/src/metabase/query_processor/util.clj
index 3a9983ea04467e680faedc9c3a8ea617e79f1bb6..283b0f3a08a0642d99e2b9f1db38271ac22610d5 100644
--- a/src/metabase/query_processor/util.clj
+++ b/src/metabase/query_processor/util.clj
@@ -4,26 +4,18 @@
              [codecs :as codecs]
              [hash :as hash]]
             [cheshire.core :as json]
-            [clojure
-             [string :as str]
-             [walk :as walk]]
+            [clojure.string :as str]
             [metabase.util.schema :as su]
             [schema.core :as s]))
 
 ;; TODO - I think most of the functions in this namespace that we don't remove could be moved to `metabase.mbql.util`
 
 (defn ^:deprecated mbql-query? ;; not really needed anymore since we don't need to normalize tokens
-  "Is the given query an MBQL query?"
+  "Is the given query an MBQL query?
+   DEPRECATED: just look at `:type` directly since it is guaranteed to be normalized?"
   [query]
   (= :query (keyword (:type query))))
 
-(defn ^:deprecated datetime-field?
-  "Is FIELD a `DateTime` field?
-   (DEPRECATED because this only works for expanded queries.)"
-  [{:keys [base-type special-type]}]
-  (or (isa? base-type :type/DateTime)
-      (isa? special-type :type/DateTime)))
-
 (defn query-without-aggregations-or-limits?
   "Is the given query an MBQL query without a `:limit`, `:aggregation`, or `:page` clause?"
   [{{aggregations :aggregation, :keys [limit page]} :query}]
@@ -107,33 +99,3 @@
     (when (string? source-table)
       (when-let [[_ card-id-str] (re-matches #"^card__(\d+$)" source-table)]
         (Integer/parseInt card-id-str)))))
-
-
-;;; ---------------------------------------- General Tree Manipulation Helpers ---------------------------------------
-
-(defn ^:deprecated postwalk-pred
-  "Transform `form` by applying `f` to each node where `pred` returns true
-
-  DEPRECATED: use `mbql.u/replace-clauses` instead, or if that's not sophisticated enough, use a `clojure.walk` fn
-  directly."
-  [pred f form]
-  (walk/postwalk (fn [node]
-                   (if (pred node)
-                     (f node)
-                     node))
-                 form))
-
-(defn ^:deprecated postwalk-collect
-  "Invoke `collect-fn` on each node satisfying `pred`. If `collect-fn` returns a value, accumulate that and return the
-  results.
-
-  DEPRECATED: Use `mbql.u/clause-instances` instead to find all instances of a clause."
-  [pred collect-fn form]
-  (let [results (atom [])]
-    (postwalk-pred pred
-                   (fn [node]
-                     (when-let [result (collect-fn node)]
-                       (swap! results conj result))
-                     node)
-                   form)
-    @results))
diff --git a/src/metabase/sync/analyze/fingerprint/fingerprinters.clj b/src/metabase/sync/analyze/fingerprint/fingerprinters.clj
index 0442c123d0821ed5417820e33b34dedc669bbbf3..680957e64119f654e2a1ca2afe10d32181ce12ac 100644
--- a/src/metabase/sync/analyze/fingerprint/fingerprinters.clj
+++ b/src/metabase/sync/analyze/fingerprint/fingerprinters.clj
@@ -1,8 +1,11 @@
 (ns metabase.sync.analyze.fingerprint.fingerprinters
   "Non-identifying fingerprinters for various field types."
-  (:require [cheshire.core :as json]
+  (:require [bigml.histogram.core :as hist]
+            [cheshire.core :as json]
             [clj-time.coerce :as t.coerce]
-            [kixi.stats.core :as stats]
+            [kixi.stats
+             [core :as stats]
+             [math :as math]]
             [metabase.models.field :as field]
             [metabase.sync.analyze.classifiers.name :as classify.name]
             [metabase.sync.util :as sync-util]
@@ -11,7 +14,8 @@
              [date :as du]
              [i18n :refer [trs]]]
             [redux.core :as redux])
-  (:import com.clearspring.analytics.stream.cardinality.HyperLogLogPlus
+  (:import com.bigml.histogram.Histogram
+           com.clearspring.analytics.stream.cardinality.HyperLogLogPlus
            org.joda.time.DateTime))
 
 (defn col-wise
@@ -61,7 +65,8 @@
 
 (def ^:private global-fingerprinter
   (redux/post-complete
-   (redux/fuse {:distinct-count cardinality})
+   (redux/fuse {:distinct-count cardinality
+                :nil%           (stats/share nil?)})
    (partial hash-map :global)))
 
 (defmethod fingerprinter :default
@@ -167,10 +172,23 @@
    (redux/fuse {:earliest earliest
                 :latest   latest})))
 
+(defn- histogram
+  "Transducer that summarizes numerical data with a histogram."
+  ([] (hist/create))
+  ([^Histogram histogram] histogram)
+  ([^Histogram histogram x] (hist/insert-simple! histogram x)))
+
 (deffingerprinter :type/Number
-  (redux/fuse {:min stats/min
-               :max stats/max
-               :avg stats/mean}))
+  (redux/post-complete
+   histogram
+   (fn [h]
+     (let [{q1 0.25 q3 0.75} (hist/percentiles h 0.25 0.75)]
+       {:min (hist/minimum h)
+        :max (hist/maximum h)
+        :avg (hist/mean h)
+        :sd  (some-> h hist/variance math/sqrt)
+        :q1  q1
+        :q3  q3}))))
 
 (defn- valid-serialized-json?
   "Is x a serialized JSON dictionary or array."
diff --git a/src/metabase/sync/analyze/fingerprint/insights.clj b/src/metabase/sync/analyze/fingerprint/insights.clj
index 6032b5102e5e952388f4b1e5ad4cb50f7133f683..eae4216b873b8d44f9f1b1fdb4c7a12ff3d2f0f4 100644
--- a/src/metabase/sync/analyze/fingerprint/insights.clj
+++ b/src/metabase/sync/analyze/fingerprint/insights.clj
@@ -11,7 +11,8 @@
   [n]
   (fn
     ([] [])
-    ([acc] acc)
+    ([acc]
+     (concat (repeat (- n (count acc)) nil) acc))
     ([acc x]
      (if (< (count acc) n)
        (conj acc x)
@@ -44,24 +45,15 @@
          :else     [reservoir c])))
     ([[reservoir _]] (persistent! reservoir))))
 
-(defn rmse
+(defn mae
   "Given two functions: (fÅ· input) and (fy input), returning the predicted and actual values of y
-   respectively, calculates the root mean squared error of the estimate.
-   https://en.wikipedia.org/wiki/Root-mean-square_deviation"
+   respectively, calculates the mean absolute error of the estimate.
+   https://en.wikipedia.org/wiki/Mean_absolute_error"
   [fy-hat fy]
-  (fn
-    ([] [0.0 0.0])
-    ([[^double c ^double mse :as acc] e]
-     (let [y-hat (fy-hat e)
-           y (fy e)]
-       (if (or (nil? y-hat) (nil? y))
-         acc
-         (let [se (math/sq (- y y-hat))
-               c' (inc c)]
-           [c' (+ mse (/ (- se mse) c'))]))))
-    ([[c mse]]
-     (when (pos? c)
-       (math/sqrt mse)))))
+  ((map (fn [x]
+          (when x
+            (math/abs (- (fy x) (fy-hat x))))))
+   stats/mean))
 
 (def ^:private trendline-function-families
   ;; http://mathworld.wolfram.com/LeastSquaresFitting.html
@@ -108,21 +100,30 @@
    (redux/fuse
     {:fits (->> (for [{:keys [x-link-fn y-link-fn formula model]} trendline-function-families]
                   (redux/post-complete
-                   (stats/simple-linear-regression (comp x-link-fn fx) (comp y-link-fn fy))
+                   (stats/simple-linear-regression (comp (stats/somef x-link-fn) fx)
+                                                   (comp (stats/somef y-link-fn) fy))
                    (fn [[offset slope]]
-                     (when-not (or (Double/isNaN offset)
+                     (when-not (or (nil? offset)
+                                   (nil? slope)
+                                   (Double/isNaN offset)
                                    (Double/isNaN slope))
                        {:model   (model offset slope)
                         :formula (formula offset slope)}))))
                 (apply redux/juxt))
-     :validation-set ((map (juxt fx fy)) (reservoir-sample validation-set-size))})
+     :validation-set ((keep (fn [row]
+                              (let [x (fx row)
+                                    y (fy row)]
+                                (when (and x y)
+                                  [x y]))))
+                      (reservoir-sample validation-set-size))})
    (fn [{:keys [validation-set fits]}]
-     (->> fits
-          (remove nil?)
-          (apply min-key #(transduce identity
-                                     (rmse (comp (:model %) first) second)
-                                     validation-set))
-          :formula))))
+     (some->> fits
+              (remove nil?)
+              not-empty
+              (apply min-key #(transduce identity
+                                         (mae (comp (:model %) first) second)
+                                         validation-set))
+              :formula))))
 
 (defn- timeseries?
   [{:keys [numbers datetimes others]}]
@@ -149,7 +150,7 @@
                      ;; unit=year workaround. While the field is in this case marked as :type/Text,
                      ;; at this stage in the pipeline the value is still an int, so we can use it
                      ;; directly.
-                     (comp ms->day #(nth % x-position)))]
+                     (comp (stats/somef ms->day) #(nth % x-position)))]
     (apply redux/juxt (for [number-col numbers]
                         (redux/post-complete
                          (let [y-position (:position number-col)
diff --git a/src/metabase/sync/analyze/query_results.clj b/src/metabase/sync/analyze/query_results.clj
index 2f44bd334b782ab635fcca1d499acd812b136f76..568113975740f44be2e78efef217e23f10eb0e12 100644
--- a/src/metabase/sync/analyze/query_results.clj
+++ b/src/metabase/sync/analyze/query_results.clj
@@ -3,10 +3,11 @@
   results. The current focus of this namespace is around column metadata from the results of a query. Going forward
   this is likely to extend beyond just metadata about columns but also about the query results as a whole and over
   time."
-  (:require [metabase.query-processor.interface :as qp.i]
+  (:require [metabase.mbql.predicates :as mbql.preds]
             [metabase.sync.analyze.classifiers.name :as classify-name]
-            [metabase.sync.analyze.fingerprint.fingerprinters :as f]
-            [metabase.sync.analyze.fingerprint.insights :as insights]
+            [metabase.sync.analyze.fingerprint
+             [fingerprinters :as f]
+             [insights :as insights]]
             [metabase.sync.interface :as i]
             [metabase.util :as u]
             [metabase.util.schema :as su]
@@ -16,7 +17,7 @@
 (def ^:private DateTimeUnitKeywordOrString
   "Schema for a valid datetime unit string like \"default\" or \"minute-of-hour\"."
   (s/constrained su/KeywordOrString
-                 qp.i/datetime-field-unit?
+                 #(mbql.preds/DatetimeFieldUnit? (keyword %))
                  "Valid field datetime unit keyword or string"))
 
 (def ^:private ResultColumnMetadata
@@ -70,24 +71,16 @@
                (redux/post-complete
                 (redux/juxt
                  (apply f/col-wise (for [metadata result-metadata]
-                                     (if (and (seq (:name metadata))
-                                              (nil? (:fingerprint metadata)))
+                                     (if-not (:fingerprint metadata)
                                        (f/fingerprinter metadata)
                                        (f/constant-fingerprinter (:fingerprint metadata)))))
                  (insights/insights result-metadata))
                 (fn [[fingerprints insights]]
-                  ;; Rarely certain queries will return columns with no names. For example
-                  ;; `SELECT COUNT(*)` in SQL Server seems to come back with no name. Since we
-                  ;; can't use those as field literals in subsequent queries just filter them out
-                  {:metadata (->> (map (fn [fingerprint metadata]
-                                         (cond
-                                           (instance? Throwable fingerprint)
-                                           metadata
-
-                                           (not-empty (:name metadata))
-                                           (assoc metadata :fingerprint fingerprint)))
-                                       fingerprints
-                                       result-metadata)
-                                  (remove nil?))
+                  {:metadata (map (fn [fingerprint metadata]
+                                    (if (instance? Throwable fingerprint)
+                                      metadata
+                                      (assoc metadata :fingerprint fingerprint)))
+                                  fingerprints
+                                  result-metadata)
                    :insights insights}))
                (:rows results))))
diff --git a/src/metabase/sync/interface.clj b/src/metabase/sync/interface.clj
index 2b9cfcaa995f4ea92e1164c648071502b94f9c15..c0f87e55e2db604bb6f46fd6199a609fc4bf67f5 100644
--- a/src/metabase/sync/interface.clj
+++ b/src/metabase/sync/interface.clj
@@ -89,19 +89,23 @@
   [[s/Any]])
 
 
-(def GlobalFingerprint
-  "Fingerprint values that Fields of all types should have."
-  {(s/optional-key :distinct-count) s/Int})
-
 (def Percent
   "Schema for something represting a percentage. A floating-point value between (inclusive) 0 and 1."
   (s/constrained s/Num #(<= 0 % 1) "Valid percentage between (inclusive) 0 and 1."))
 
+(def GlobalFingerprint
+  "Fingerprint values that Fields of all types should have."
+  {(s/optional-key :distinct-count) s/Int
+   (s/optional-key :nil%)           (s/maybe Percent)})
+
 (def NumberFingerprint
   "Schema for fingerprint information for Fields deriving from `:type/Number`."
   {(s/optional-key :min) (s/maybe s/Num)
    (s/optional-key :max) (s/maybe s/Num)
-   (s/optional-key :avg) (s/maybe s/Num)})
+   (s/optional-key :avg) (s/maybe s/Num)
+   (s/optional-key :q1)  (s/maybe s/Num)
+   (s/optional-key :q3)  (s/maybe s/Num)
+   (s/optional-key :sd)  (s/maybe s/Num)})
 
 (def TextFingerprint
   "Schema for fingerprint information for Fields deriving from `:type/Text`."
@@ -160,6 +164,7 @@
   "Map of fingerprint version to the set of Field base types that need to be upgraded to this version the next
    time we do analysis. The highest-numbered entry is considered the latest version of fingerprints."
   {1 #{:type/*}
+   2 #{:type/Number}
    3 #{:type/DateTime}})
 
 (def latest-fingerprint-version
diff --git a/src/metabase/task/sync_databases.clj b/src/metabase/task/sync_databases.clj
index 234ec36cd489c423f2e382259a54b4d54b37df8a..5a8f9bb4fc941e69a3cf049c6b1e5a2e364bb8e1 100644
--- a/src/metabase/task/sync_databases.clj
+++ b/src/metabase/task/sync_databases.clj
@@ -64,7 +64,7 @@
    :job-class          UpdateFieldValues})
 
 
-;; These getter functions are not strictly neccesary but are provided primarily so we can get some extra validation by
+;; These getter functions are not strictly necessary but are provided primarily so we can get some extra validation by
 ;; using them
 
 (s/defn ^:private job-key :- JobKey
diff --git a/src/metabase/util.clj b/src/metabase/util.clj
index 6d610201598f4a85f140dc5742eba4e3e1a6d6b0..ddf34cce0296c52b293f471658cd2c06cbce2b0e 100644
--- a/src/metabase/util.clj
+++ b/src/metabase/util.clj
@@ -3,12 +3,14 @@
   (:require [clojure
              [data :as data]
              [pprint :refer [pprint]]
-             [string :as s]]
+             [string :as s]
+             [walk :as walk]]
             [clojure.java.classpath :as classpath]
             [clojure.math.numeric-tower :as math]
             [clojure.tools.logging :as log]
             [clojure.tools.namespace.find :as ns-find]
             [colorize.core :as colorize]
+            [medley.core :as m]
             [metabase.config :as config]
             [metabase.util.i18n :refer [trs]]
             [ring.util.codec :as codec])
@@ -256,7 +258,8 @@
 
 (defprotocol ^:private IFilteredStacktrace
   (filtered-stacktrace [this]
-    "Get the stack trace associated with E and return it as a vector with non-metabase frames filtered out."))
+    "Get the stack trace associated with E and return it as a vector with non-metabase frames after the last Metabase
+    frame filtered out."))
 
 ;; These next two functions are a workaround for this bug https://dev.clojure.org/jira/browse/CLJ-1790
 ;; When Throwable/Thread are type-hinted, they return an array of type StackTraceElement, this causes
@@ -279,13 +282,25 @@
   IFilteredStacktrace {:filtered-stacktrace (fn [this]
                                               (filtered-stacktrace (thread-get-stack-trace this)))})
 
+(defn- metabase-frame? [frame]
+  (re-find #"metabase" (str frame)))
+
 ;; StackTraceElement[] is what the `.getStackTrace` method for Thread and Throwable returns
 (extend (Class/forName "[Ljava.lang.StackTraceElement;")
-  IFilteredStacktrace {:filtered-stacktrace (fn [this]
-                                              (vec (for [frame this
-                                                         :let  [s (str frame)]
-                                                         :when (re-find #"metabase" s)]
-                                                     (s/replace s #"^metabase\." ""))))})
+  IFilteredStacktrace
+  {:filtered-stacktrace
+   (fn [this]
+     ;; keep all the frames before the last Metabase frame, but then filter out any other non-Metabase frames after
+     ;; that
+     (let [[frames-after-last-mb other-frames]     (split-with (complement metabase-frame?)
+                                                               (map str (seq this)))
+           [last-mb-frame & frames-before-last-mb] (map #(s/replace % #"^metabase\." "")
+                                                        (filter metabase-frame? other-frames))]
+       (concat
+        frames-after-last-mb
+        ;; add a little arrow to the frame so it stands out more
+        (cons (str "--> " last-mb-frame)
+              frames-before-last-mb))))})
 
 (defn deref-with-timeout
   "Call `deref` on a FUTURE and throw an exception if it takes more than TIMEOUT-MS."
@@ -554,3 +569,25 @@
   ([java-version-str]
    (when-let [[_ java-major-version-str] (re-matches #"^(?:1\.)?(\d+).*$" java-version-str)]
      (>= (Integer/parseInt java-major-version-str) 9))))
+
+
+(defn snake-key
+  "Convert a keyword or string `k` from `lisp-case` to `snake-case`."
+  [k]
+  (if (keyword? k)
+    (keyword (snake-key (name k)))
+    (s/replace k #"-" "_")))
+
+(defn recursive-map-keys
+  "Recursively replace the keys in a map with the value of `(f key)`."
+  [f m]
+  (walk/postwalk
+   #(if (map? %)
+      (m/map-keys f %)
+      %)
+   m))
+
+(defn snake-keys
+  "Convert the keys in a map from `lisp-case` to `snake-case`."
+  [m]
+  (recursive-map-keys snake-key m))
diff --git a/src/metabase/util/date.clj b/src/metabase/util/date.clj
index 4c8cda71f8b3346c8a70d39114fd368b58c952b6..b4503600bd2e163daea3ce153b5fec0d0986aa6c 100644
--- a/src/metabase/util/date.clj
+++ b/src/metabase/util/date.clj
@@ -61,13 +61,16 @@
         (when (and (not report-timezone)
                    jvm-data-tz-conflict?)
           (log/warn (str (trs "Possible timezone conflict found on database {0}." (:name db))
+                         " "
                          (trs "JVM timezone is {0} and detected database timezone is {1}."
                               (.getID jvm-timezone) (.getID data-timezone))
+                         " "
                          (trs "Configure a report timezone to ensure proper date and time conversions."))))
         ;; This database doesn't support a report timezone, check the JVM and data timezones, if they don't match,
         ;; warn the user
         (when jvm-data-tz-conflict?
           (log/warn (str (trs "Possible timezone conflict found on database {0}." (:name db))
+                         " "
                          (trs "JVM timezone is {0} and detected database timezone is {1}."
                               (.getID jvm-timezone) (.getID data-timezone)))))))))
 
diff --git a/src/metabase/util/i18n.clj b/src/metabase/util/i18n.clj
index 17c32e25e248009653e1eb14b89b16efc068dc32..755e9d0fd575dfb21c7a9bb1109d57a50c51bcfa 100644
--- a/src/metabase/util/i18n.clj
+++ b/src/metabase/util/i18n.clj
@@ -67,7 +67,7 @@
   localized string needs to be 'late bound' and only occur when the user's locale is in scope. Calling `str` on the
   results of this invocation will lookup the translated version of the string."
   [msg & args]
-  `(UserLocalizedString. (namespace-munge *ns*) ~msg ~(vec args)))
+  `(UserLocalizedString. ~(namespace-munge *ns*) ~msg ~(vec args)))
 
 (defmacro trs
   "Similar to `puppetlabs.i18n.core/trs` but creates a `SystemLocalizedString` instance so that conversion to the
@@ -75,7 +75,7 @@
   overridden/changed by a setting. Calling `str` on the results of this invocation will lookup the translated version
   of the string."
   [msg & args]
-  `(SystemLocalizedString. (namespace-munge *ns*) ~msg ~(vec args)))
+  `(SystemLocalizedString. ~(namespace-munge *ns*) ~msg ~(vec args)))
 
 (def ^:private localized-string-checker
   "Compiled checker for `LocalizedString`s which is more efficient when used repeatedly like in `localized-string?`
diff --git a/test/metabase/api/automagic_dashboards_test.clj b/test/metabase/api/automagic_dashboards_test.clj
index 7af0566573630e5447278bd5a3d41980c2acadab..929aea8f9a439d8514604df86a4765d13dd5a749 100644
--- a/test/metabase/api/automagic_dashboards_test.clj
+++ b/test/metabase/api/automagic_dashboards_test.clj
@@ -130,21 +130,23 @@
 
 ;;; ------------------- Comparisons -------------------
 
-(def ^:private segment {:table_id (data/id :venues)
-                        :definition {:filter [:> [:field-id (data/id :venues :price)] 10]}})
+(def ^:private segment
+  (delay
+   {:table_id   (data/id :venues)
+    :definition {:filter [:> [:field-id (data/id :venues :price)] 10]}}))
 
 (expect
-  (tt/with-temp* [Segment [{segment-id :id} segment]]
+  (tt/with-temp* [Segment [{segment-id :id} @segment]]
     (api-call "table/%s/compare/segment/%s"
               [(data/id :venues) segment-id])))
 
 (expect
-  (tt/with-temp* [Segment [{segment-id :id} segment]]
+  (tt/with-temp* [Segment [{segment-id :id} @segment]]
     (api-call "table/%s/rule/example/indepth/compare/segment/%s"
               [(data/id :venues) segment-id])))
 
 (expect
-  (tt/with-temp* [Segment [{segment-id :id} segment]]
+  (tt/with-temp* [Segment [{segment-id :id} @segment]]
     (api-call "adhoc/%s/cell/%s/compare/segment/%s"
               [(->> {:query {:filter [:> [:field-id (data/id :venues :price)] 10]
                              :source-table (data/id :venues)}
diff --git a/test/metabase/api/card_test.clj b/test/metabase/api/card_test.clj
index 408f811150e0f245757be98ee87d695435328feb..f62b5d2bb55006049b3a22b87096cc9885183f94 100644
--- a/test/metabase/api/card_test.clj
+++ b/test/metabase/api/card_test.clj
@@ -274,6 +274,7 @@
             :can_write              true
             :dashboard_count        0
             :read_permissions       nil
+            :result_metadata        true
             :creator                (match-$ (fetch-user :rasta)
                                       {:common_name  "Rasta Toucan"
                                        :is_superuser false
@@ -285,20 +286,19 @@
                                        :email        "rasta@metabase.com"
                                        :id           $})})
     (tu/with-non-admin-groups-no-root-collection-perms
-      (tt/with-temp* [Database   [db]
-                      Table      [table {:db_id (u/get-id db)}]
-                      Collection [collection]]
+      (tt/with-temp* [Collection [collection]]
         (tu/with-model-cleanup [Card]
           (perms/grant-collection-readwrite-permissions! (perms-group/all-users) collection)
           (-> ((user->client :rasta) :post 200 "card"
-               (assoc (card-with-name-and-query card-name (mbql-count-query (u/get-id db) (u/get-id table)))
+               (assoc (card-with-name-and-query card-name (mbql-count-query (data/id) (data/id :venues)))
                  :collection_id (u/get-id collection)))
               (dissoc :created_at :updated_at :id)
               (update :table_id integer?)
               (update :database_id integer?)
               (update :collection_id integer?)
               (update :dataset_query map?)
-              (update :collection map?)))))))
+              (update :collection map?)
+              (update :result_metadata (partial every? map?))))))))
 
 ;; Make sure when saving a Card the query metadata is saved (if correct)
 (expect
@@ -330,8 +330,9 @@
     :display_name "count"
     :name         "count"
     :special_type "type/Quantity"
-    :fingerprint  {:global {:distinct-count 1},
-                   :type   {:type/Number {:min 100.0, :max 100.0, :avg 100.0}}}}]
+    :fingerprint  {:global {:distinct-count 1
+                            :nil%           0.0},
+                   :type   {:type/Number {:min 100.0, :max 100.0, :avg 100.0, :q1 100.0, :q3 100.0 :sd nil}}}}]
   (tu/with-non-admin-groups-no-root-collection-perms
     (let [metadata  [{:base_type    :type/Integer
                       :display_name "Count Chocula"
@@ -381,11 +382,13 @@
 ;;; +----------------------------------------------------------------------------------------------------------------+
 
 ;; Test that we can fetch a card
-(tt/expect-with-temp [Database   [db]
-                      Table      [table {:db_id (u/get-id db)}]
+(tt/expect-with-temp [Database   [db          (select-keys (data/db) [:engine :details])]
+                      Table      [table       (-> (Table (data/id :venues))
+                                                  (dissoc :id)
+                                                  (assoc :db_id (u/get-id db)))]
                       Collection [collection]
-                      Card       [card  {:collection_id (u/get-id collection)
-                                         :dataset_query (mbql-count-query (u/get-id db) (u/get-id table))}]]
+                      Card       [card        {:collection_id (u/get-id collection)
+                                               :dataset_query (mbql-count-query (u/get-id db) (u/get-id table))}]]
   (merge card-defaults
          (match-$ card
            {:dashboard_count        0
@@ -538,8 +541,9 @@
     :display_name "count"
     :name         "count"
     :special_type "type/Quantity"
-    :fingerprint  {:global {:distinct-count 1},
-                   :type   {:type/Number {:min 100.0, :max 100.0, :avg 100.0}}}}]
+    :fingerprint  {:global {:distinct-count 1
+                            :nil%           0.0},
+                   :type   {:type/Number {:min 100.0, :max 100.0, :avg 100.0, :q1 100.0, :q3 100.0 :sd nil}}}}]
   (let [metadata [{:base_type    :type/Integer
                    :display_name "Count Chocula"
                    :name         "count_chocula"
diff --git a/test/metabase/api/database_test.clj b/test/metabase/api/database_test.clj
index 6624910d24a6740797921f826e38624f975f087a..56212e79584f7465f54415f4b2511caa7e433401 100644
--- a/test/metabase/api/database_test.clj
+++ b/test/metabase/api/database_test.clj
@@ -24,9 +24,8 @@
             [metabase.test.data
              [datasets :as datasets]
              [users :refer :all]]
-            [toucan
-             [db :as db]
-             [hydrate :as hydrate]]
+            [metabase.test.util.log :as tu.log]
+            [toucan.db :as db]
             [toucan.util.test :as tt]))
 
 ;; HELPER FNS
@@ -50,12 +49,12 @@
       (finally
         (db/delete! Database :id (:id db))))))
 
-(defmacro ^:private expect-with-temp-db-created-via-api {:style/indent 1} [[binding & [options]] expected actual]
+(defmacro ^:private expect-with-temp-db-created-via-api {:style/indent 1} [[db-binding & [options]] expected actual]
   ;; use `gensym` instead of auto gensym here so we can be sure it's a unique symbol every time. Otherwise since
   ;; expectations hashes its body to generate function names it will treat every usage this as the same test and only
   ;; a single one will end up being ran
   (let [result (gensym "result-")]
-    `(let [~result (delay (do-with-temp-db-created-via-api ~options (fn [~binding]
+    `(let [~result (delay (do-with-temp-db-created-via-api ~options (fn [~db-binding]
                                                                       [~expected
                                                                        ~actual])))]
        (expect
@@ -181,62 +180,40 @@
                 :fields_hash     $}))
       (update :entity_type (comp (partial str "entity/") name))))
 
-
-;; TODO - this is a test code smell, each test should clean up after itself and this step shouldn't be neccessary. One
-;; day we should be able to remove this! If you're writing a NEW test that needs this, fix your brain and your test!
-;; To reïterate, this is BAD BAD BAD BAD BAD BAD! It will break tests if you use it! Don't use it!
-(defn- ^:deprecated delete-randomly-created-databases!
-  "Delete all the randomly created Databases we've made so far. Optionally specify one or more IDs to SKIP."
-  [& {:keys [skip]}]
-  (let [ids-to-skip (into (set skip)
-                          (for [engine datasets/all-valid-engines
-                                :let   [id (datasets/when-testing-engine engine
-                                             (:id (data/get-or-create-test-data-db! (driver/engine->driver engine))))]
-                                :when  id]
-                            id))]
-    (when-let [dbs (seq (db/select [Database :name :engine :id] :id [:not-in ids-to-skip]))]
-      (println (u/format-color 'red (str "\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
-                                         "WARNING: deleting randomly created databases:\n%s"
-                                         "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n\n")
-                 (u/pprint-to-str (for [db dbs]
-                                    (dissoc db :features))))))
-    (db/delete! Database :id [:not-in ids-to-skip])))
-
-
 ;; ## GET /api/database
 ;; Test that we can get all the DBs (ordered by name)
 ;; Database details *should not* come back for Rasta since she's not a superuser
-(expect-with-temp-db-created-via-api [{db-id :id}]
-  (set (filter identity (conj (for [engine datasets/all-valid-engines]
-                                (datasets/when-testing-engine engine
-                                  (merge default-db-details
-                                         (match-$ (data/get-or-create-test-data-db! (driver/engine->driver engine))
-                                           {:created_at         $
-                                            :engine             (name $engine)
-                                            :id                 $
-                                            :updated_at         $
-                                            :timezone           $
-                                            :name               "test-data"
-                                            :native_permissions "write"
-                                            :features           (map name (driver/features (driver/engine->driver engine)))}))))
-                              (merge default-db-details
-                                     (match-$ (Database db-id)
-                                       {:created_at         $
-                                        :engine             "postgres"
-                                        :id                 $
-                                        :updated_at         $
-                                        :name               $
-                                        :timezone           $
-                                        :native_permissions "write"
-                                        :features           (map name (driver/features (driver/engine->driver :postgres)))})))))
-  (do
-    (delete-randomly-created-databases! :skip [db-id])
-    (set ((user->client :rasta) :get 200 "database"))))
+(expect-with-temp-db-created-via-api [{db-id :id, db-name :name}]
+  (set (filter some? (conj (for [engine datasets/all-valid-engines]
+                             (datasets/when-testing-engine engine
+                               (merge default-db-details
+                                      (match-$ (data/get-or-create-test-data-db! (driver/engine->driver engine))
+                                        {:created_at         $
+                                         :engine             (name $engine)
+                                         :id                 $
+                                         :updated_at         $
+                                         :timezone           $
+                                         :name               "test-data"
+                                         :native_permissions "write"
+                                         :features           (map name (driver/features (driver/engine->driver engine)))}))))
+                           (merge default-db-details
+                                  (match-$ (Database db-id)
+                                    {:created_at         $
+                                     :engine             "postgres"
+                                     :id                 $
+                                     :updated_at         $
+                                     :name               $
+                                     :timezone           $
+                                     :native_permissions "write"
+                                     :features           (map name (driver/features (driver/engine->driver :postgres)))})))))
+  (->> ((user->client :rasta) :get 200 "database")
+       (filter #(#{"test-data" db-name} (:name %)))
+       set))
 
 
 
 ;; GET /api/databases (include tables)
-(expect-with-temp-db-created-via-api [{db-id :id}]
+(expect-with-temp-db-created-via-api [{db-id :id, db-name :name}]
   (set (cons (merge default-db-details
                     (match-$ (Database db-id)
                       {:created_at         $
@@ -263,9 +240,9 @@
                                               :tables             (sort-by :name (for [table (db/select Table, :db_id (:id database))]
                                                                                    (table-details table)))
                                               :features           (map name (driver/features (driver/engine->driver engine)))}))))))))
-  (do
-    (delete-randomly-created-databases! :skip [db-id])
-    (set ((user->client :rasta) :get 200 "database" :include_tables true))))
+  (->> ((user->client :rasta) :get 200 "database" :include_tables true)
+       (filter #(#{"test-data" db-name} (:name %)))
+       set))
 
 (def ^:private default-field-details
   {:description        nil
@@ -646,13 +623,15 @@
 (expect
   {:valid false, :message "Error!"}
   (with-redefs [database-api/test-database-connection test-database-connection]
-    (#'database-api/test-connection-details "h2" {:db "ABC"})))
+    (tu.log/suppress-output
+      (#'database-api/test-connection-details "h2" {:db "ABC"}))))
 
 (expect
   {:valid false}
   (with-redefs [database-api/test-database-connection test-database-connection]
-    ((user->client :crowberto) :post 200 "database/validate"
-     {:details {:engine :h2, :details {:db "ABC"}}})))
+    (tu.log/suppress-output
+      ((user->client :crowberto) :post 200 "database/validate"
+       {:details {:engine :h2, :details {:db "ABC"}}}))))
 
 
 ;;; +----------------------------------------------------------------------------------------------------------------+
diff --git a/test/metabase/api/dataset_test.clj b/test/metabase/api/dataset_test.clj
index 24869072a7edbfbe5faa696816896088d36cb0e2..6d693e86c59ca2d19c4bb530bc9a0be9475ae6c3 100644
--- a/test/metabase/api/dataset_test.clj
+++ b/test/metabase/api/dataset_test.clj
@@ -21,6 +21,7 @@
              [dataset-definitions :as defs]
              [datasets :refer [expect-with-engine]]
              [users :refer :all]]
+            [metabase.test.util.log :as tu.log]
             [toucan.db :as db]
             [toucan.util.test :as tt]))
 
@@ -52,11 +53,13 @@
 ;; Just a basic sanity check to make sure Query Processor endpoint is still working correctly.
 (expect
   [ ;; API call response
-   {:data                   {:rows    [[1000]]
-                             :columns ["count"]
-                             :cols    [{:base_type "type/Integer", :special_type "type/Number", :name "count",
-                                        :display_name "count", :id nil, :table_id nil, :description nil, :target nil,
-                                        :extra_info {}, :source "aggregation", :settings nil}]
+   {:data                   {:rows        [[1000]]
+                             :columns     ["count"]
+                             :cols        [{:base_type    "type/Integer"
+                                            :special_type "type/Number"
+                                            :name         "count"
+                                            :display_name "count"
+                                            :source       "aggregation"}]
                              :native_form true}
     :row_count              1
     :status                 "completed"
@@ -124,10 +127,11 @@
   (let [check-error-message (fn [output]
                               (update output :error (fn [error-message]
                                                       (boolean (re-find #"Syntax error in SQL statement" error-message)))))
-        result              ((user->client :rasta) :post 200 "dataset" {:database (id)
-                                                                        :type     "native"
-                                                                        :native   {:query "foobar"}})]
-    [(check-error-message (format-response result))
+        result              (tu.log/suppress-output
+                              ((user->client :rasta) :post 200 "dataset" {:database (id)
+                                                                          :type     "native"
+                                                                          :native   {:query "foobar"}}))]
+    [(check-error-message (dissoc (format-response result) :stacktrace))
      (check-error-message (format-response (most-recent-query-execution)))]))
 
 
diff --git a/test/metabase/api/embed_test.clj b/test/metabase/api/embed_test.clj
index 8e84a9910a69dcc40bebe91d6eed7d781a13e7c5..ec46b0f61e9568593856b1f056110161414a40fd 100644
--- a/test/metabase/api/embed_test.clj
+++ b/test/metabase/api/embed_test.clj
@@ -22,6 +22,7 @@
             [metabase.test
              [data :as data]
              [util :as tu]]
+            [metabase.test.util.log :as tu.log]
             [toucan.db :as db]
             [toucan.util.test :as tt])
   (:import java.io.ByteArrayInputStream))
@@ -67,9 +68,11 @@
 (defn successful-query-results
   ([]
    {:data       {:columns ["count"]
-                 :cols    [{:description nil, :table_id nil, :special_type "type/Number", :name "count",
-                            :source "aggregation", :extra_info {}, :id nil, :target nil, :display_name "count",
-                            :base_type "type/Integer", :settings nil}]
+                 :cols    [{:base_type    "type/Integer"
+                            :special_type "type/Number"
+                            :name         "count"
+                            :display_name "count"
+                            :source       "aggregation"}]
                  :rows    [[100]]}
     :json_query {:parameters nil}
     :status     "completed"})
@@ -194,11 +197,12 @@
 ;; query info)
 (expect-for-response-formats [response-format]
   "An error occurred while running the query."
-  (with-embedding-enabled-and-new-secret-key
-    (with-temp-card [card {:enable_embedding true, :dataset_query {:database (data/id)
-                                                                   :type     :native
-                                                                   :native   {:query "SELECT * FROM XYZ"}}}]
-      (http/client :get 400 (card-query-url card response-format)))))
+  (tu.log/suppress-output
+    (with-embedding-enabled-and-new-secret-key
+      (with-temp-card [card {:enable_embedding true, :dataset_query {:database (data/id)
+                                                                     :type     :native
+                                                                     :native   {:query "SELECT * FROM XYZ"}}}]
+        (http/client :get 400 (card-query-url card response-format))))))
 
 ;; check that the endpoint doesn't work if embedding isn't enabled
 (expect-for-response-formats [response-format]
@@ -393,12 +397,13 @@
 ;; query info)
 (expect
   "An error occurred while running the query."
-  (with-embedding-enabled-and-new-secret-key
-    (with-temp-dashcard [dashcard {:dash {:enable_embedding true}
-                                   :card {:dataset_query {:database (data/id)
-                                                          :type     :native,
-                                                          :native   {:query "SELECT * FROM XYZ"}}}}]
-      (http/client :get 400 (dashcard-url dashcard)))))
+  (tu.log/suppress-output
+    (with-embedding-enabled-and-new-secret-key
+      (with-temp-dashcard [dashcard {:dash {:enable_embedding true}
+                                     :card {:dataset_query {:database (data/id)
+                                                            :type     :native,
+                                                            :native   {:query "SELECT * FROM XYZ"}}}}]
+        (http/client :get 400 (dashcard-url dashcard))))))
 
 ;; check that the endpoint doesn't work if embedding isn't enabled
 (expect
diff --git a/test/metabase/api/field_test.clj b/test/metabase/api/field_test.clj
index 293c2c11e9a5bdd9e5049458324e25dda47cb6b7..1dc71b8d106f571f03a14a51509e79d77ea907d3 100644
--- a/test/metabase/api/field_test.clj
+++ b/test/metabase/api/field_test.clj
@@ -19,7 +19,8 @@
              [db :as db]
              [hydrate :refer [hydrate]]]
             [toucan.util.test :as tt]
-            [metabase.util :as u]))
+            [metabase.util :as u]
+            [metabase.test.util.log :as tu.log]))
 
 ;; Helper Fns
 
@@ -227,12 +228,17 @@
 (expect
   [{:values [], :field_id true}
    {:status "success"}
+   {:values [1 2 3 4], :human_readable_values ["$" "$$" "$$$" "$$$$"]}
    {:values [[1 "$"] [2 "$$"] [3 "$$$"] [4 "$$$$"]], :field_id true}]
   (tt/with-temp* [Field [{field-id :id} list-field]]
     (mapv tu/boolean-ids-and-timestamps
-          [((user->client :crowberto) :get 200 (format "field/%d/values" field-id))
+          [ ;; this will print an error message because it will try to fetch the FieldValues, but the Field doesn't
+           ;; exist; we can ignore that
+           (tu.log/suppress-output
+             ((user->client :crowberto) :get 200 (format "field/%d/values" field-id)))
            ((user->client :crowberto) :post 200 (format "field/%d/values" field-id)
             {:values [[1 "$"] [2 "$$"] [3 "$$$"] [4 "$$$$"]]})
+           (db/select-one [FieldValues :values :human_readable_values] :field_id field-id)
            ((user->client :crowberto) :get 200 (format "field/%d/values" field-id))])))
 
 ;; Can unset values
diff --git a/test/metabase/api/pulse_test.clj b/test/metabase/api/pulse_test.clj
index bbc76a33db91eb26d021163a61f05693887dd755..296807543d40b5d83aeeacbd2e75aa0afbeedf16 100644
--- a/test/metabase/api/pulse_test.clj
+++ b/test/metabase/api/pulse_test.clj
@@ -23,9 +23,7 @@
             [metabase.test
              [data :as data]
              [util :as tu]]
-            [metabase.test.data
-             [dataset-definitions :as defs]
-             [users :refer :all]]
+            [metabase.test.data.users :refer :all]
             [metabase.test.mock.util :refer [pulse-channel-defaults]]
             [toucan.db :as db]
             [toucan.util.test :as tt]))
@@ -754,8 +752,10 @@
 ;; Check that a rando (e.g. someone without collection write access) isn't allowed to delete a pulse
 (expect
   "You don't have permissions to do that."
-  (tt/with-temp* [Database  [db]
-                  Table     [table {:db_id (u/get-id db)}]
+  (tt/with-temp* [Database  [db    (select-keys (data/db) [:engine :details])]
+                  Table     [table (-> (Table (data/id :venues))
+                                       (dissoc :id)
+                                       (assoc :db_id (u/get-id db)))]
                   Card      [card  {:dataset_query {:database (u/get-id db)
                                                     :type     "query"
                                                     :query    {:source-table (u/get-id table)
@@ -779,7 +779,7 @@
   [(assoc (pulse-details pulse-1) :can_write false, :collection_id true)
    (assoc (pulse-details pulse-2) :can_write false, :collection_id true)]
   (with-pulses-in-readable-collection [pulse-1 pulse-2]
-    ;; delete anything else in DB just to be sure; this step may not be neccesary any more
+    ;; delete anything else in DB just to be sure; this step may not be necessary any more
     (db/delete! Pulse :id [:not-in #{(u/get-id pulse-1)
                                      (u/get-id pulse-2)}])
     (for [pulse ((user->client :rasta) :get 200 "pulse")]
@@ -791,7 +791,7 @@
   [(assoc (pulse-details pulse-1) :can_write true)
    (assoc (pulse-details pulse-2) :can_write true)]
   (do
-    ;; delete anything else in DB just to be sure; this step may not be neccesary any more
+    ;; delete anything else in DB just to be sure; this step may not be necessary any more
     (db/delete! Pulse :id [:not-in #{(u/get-id pulse-1)
                                      (u/get-id pulse-2)}])
     ((user->client :crowberto) :get 200 "pulse")))
@@ -855,13 +855,11 @@
   (tu/with-non-admin-groups-no-root-collection-perms
     (tu/with-model-cleanup [Pulse]
       (et/with-fake-inbox
-        (data/with-db (data/get-or-create-database! defs/sad-toucan-incidents)
+        (data/dataset sad-toucan-incidents
           (tt/with-temp* [Collection [collection]
-                          Database   [db]
-                          Table      [table {:db_id (u/get-id db)}]
-                          Card       [card  {:dataset_query {:database (u/get-id db)
+                          Card       [card  {:dataset_query {:database (data/id)
                                                              :type     "query"
-                                                             :query    {:source-table (u/get-id table),
+                                                             :query    {:source-table (data/id :incidents)
                                                                         :aggregation  [[:count]]}}}]]
             (perms/grant-collection-readwrite-permissions! (perms-group/all-users) collection)
             (card-api-test/with-cards-in-readable-collection [card]
@@ -886,8 +884,10 @@
 ;; This test follows a flow that the user/UI would follow by first creating a pulse, then making a small change to
 ;; that pulse and testing it. The primary purpose of this test is to ensure tha the pulse/test endpoint accepts data
 ;; of the same format that the pulse GET returns
-(tt/expect-with-temp [Card [card-1]
-                      Card [card-2]]
+(tt/expect-with-temp [Card [card-1 {:dataset_query
+                                    {:database (data/id), :type :query, :query {:source-table (data/id :venues)}}}]
+                      Card [card-2 {:dataset_query
+                                    {:database (data/id), :type :query, :query {:source-table (data/id :venues)}}}]]
   {:response {:ok true}
    :emails   (et/email-to :rasta {:subject "Pulse: A Pulse"
                                   :body    {"A Pulse" true}})}
@@ -918,6 +918,7 @@
             {:response ((user->client :rasta) :post 200 "pulse/test" (assoc result :channels [email-channel]))
              :emails   (et/regex-email-bodies #"A Pulse")}))))))
 
+
 ;;; +----------------------------------------------------------------------------------------------------------------+
 ;;; |                                         GET /api/pulse/form_input                                              |
 ;;; +----------------------------------------------------------------------------------------------------------------+
diff --git a/test/metabase/api/session_test.clj b/test/metabase/api/session_test.clj
index 43fe66d506bd7d8e552b28768e1f146d05dad59d..e46dbce332db07230d68b2032d6cf932cf7d4260 100644
--- a/test/metabase/api/session_test.clj
+++ b/test/metabase/api/session_test.clj
@@ -15,6 +15,7 @@
              [util :as tu]]
             [metabase.test.data.users :refer :all]
             [metabase.test.integrations.ldap :refer [expect-with-ldap-server]]
+            [metabase.test.util.log :as tu.log]
             [toucan.db :as db]
             [toucan.util.test :as tt]))
 
@@ -286,13 +287,13 @@
   (client :post 400 "session" (user->credentials :lucky))) ; NOTE: there's a different password in LDAP for Lucky
 
 ;; Test that login will fallback to local for broken LDAP settings
-;; NOTE: This will ERROR out in the logs, it's normal
 (expect-with-ldap-server
   true
   (tu/with-temporary-setting-values [ldap-user-base "cn=wrong,cn=com"]
     ;; delete all other sessions for the bird first, otherwise test doesn't seem to work (TODO - why?)
     (do (db/simple-delete! Session, :user_id (user->id :rasta))
-        (tu/is-uuid-string? (:id (client :post 200 "session" (user->credentials :rasta)))))))
+        (tu/is-uuid-string? (:id (tu.log/suppress-output
+                                   (client :post 200 "session" (user->credentials :rasta))))))))
 
 ;; Test that we can login with LDAP with new user
 (expect-with-ldap-server
diff --git a/test/metabase/api/table_test.clj b/test/metabase/api/table_test.clj
index 46c6333bf1c0c315d63fd1b2b8f12ede3c048d09..fce422c37eddb291f8fc6b6d1a8be2f8eca19398 100644
--- a/test/metabase/api/table_test.clj
+++ b/test/metabase/api/table_test.clj
@@ -18,7 +18,6 @@
              [permissions :as perms]
              [permissions-group :as perms-group]
              [table :as table :refer [Table]]]
-            [metabase.query-processor.util :as qputil]
             [metabase.test
              [data :as data]
              [util :as tu :refer [match-$]]]
@@ -493,7 +492,12 @@
     ;; run the Card which will populate its result_metadata column
     ((user->client :crowberto) :post 200 (format "card/%d/query" (u/get-id card)))
     ;; Now fetch the metadata for this "table"
-    (tu/round-all-decimals 2 ((user->client :crowberto) :get 200 (format "table/card__%d/query_metadata" (u/get-id card))))))
+    (->> card
+         u/get-id
+         (format "table/card__%d/query_metadata")
+         ((user->client :crowberto) :get 200)
+         (tu/round-fingerprint-cols [:fields])
+         (tu/round-all-decimals 2))))
 
 ;; Test date dimensions being included with a nested query
 (tt/expect-with-temp [Card [card {:name          "Users"
@@ -516,7 +520,8 @@
                           :special_type             "type/Name"
                           :default_dimension_option nil
                           :dimension_options        []
-                          :fingerprint              {:global {:distinct-count 15},
+                          :fingerprint              {:global {:distinct-count 15
+                                                              :nil%           0.0},
                                                      :type   {:type/Text {:percent-json  0.0, :percent-url    0.0,
                                                                           :percent-email 0.0, :average-length 13.27}}}}
                          {:name                     "LAST_LOGIN"
@@ -527,14 +532,20 @@
                           :special_type             nil
                           :default_dimension_option (var-get #'table-api/date-default-index)
                           :dimension_options        (var-get #'table-api/datetime-dimension-indexes)
-                          :fingerprint              {:global {:distinct-count 15},
+                          :fingerprint              {:global {:distinct-count 15
+                                                              :nil%           0.0},
                                                      :type   {:type/DateTime {:earliest "2014-01-01T08:30:00.000Z",
                                                                               :latest   "2014-12-05T15:15:00.000Z"}}}}]})
   (do
     ;; run the Card which will populate its result_metadata column
     ((user->client :crowberto) :post 200 (format "card/%d/query" (u/get-id card)))
     ;; Now fetch the metadata for this "table"
-    (tu/round-all-decimals 2 ((user->client :crowberto) :get 200 (format "table/card__%d/query_metadata" (u/get-id card))))))
+    (->> card
+         u/get-id
+         (format "table/card__%d/query_metadata")
+         ((user->client :crowberto) :get 200)
+         (tu/round-fingerprint-cols [:fields])
+         (tu/round-all-decimals 2))))
 
 
 ;; make sure GET /api/table/:id/fks just returns nothing for 'virtual' tables
@@ -691,7 +702,7 @@
 
 (qpt/expect-with-non-timeseries-dbs-except #{:oracle :mongo :redshift :sparksql}
   []
-  (data/with-db (data/get-or-create-database! defs/test-data-with-time)
+  (data/dataset test-data-with-time
     (let [response ((user->client :rasta) :get 200 (format "table/%d/query_metadata" (data/id :users)))]
       (dimension-options-for-field response "last_login_time"))))
 
diff --git a/test/metabase/automagic_dashboards/comparison_test.clj b/test/metabase/automagic_dashboards/comparison_test.clj
index 5d374c6e19d12df5bf4448cff5851963f9a18f8d..52cdbe40514bb55c4e1ceaf632fb4f502c796492 100644
--- a/test/metabase/automagic_dashboards/comparison_test.clj
+++ b/test/metabase/automagic_dashboards/comparison_test.clj
@@ -12,8 +12,10 @@
             [metabase.test.automagic-dashboards :refer :all]
             [toucan.util.test :as tt]))
 
-(def ^:private segment {:table_id (data/id :venues)
-                        :definition {:filter [:> [:field-id (data/id :venues :price)] 10]}})
+(def ^:private segment
+  (delay
+   {:table_id   (data/id :venues)
+    :definition {:filter [:> [:field-id (data/id :venues :price)] 10]}}))
 
 (defn- test-comparison
   [left right]
@@ -25,14 +27,14 @@
       pos?))
 
 (expect
-  (tt/with-temp* [Segment [{segment-id :id} segment]]
+  (tt/with-temp* [Segment [{segment-id :id} @segment]]
     (with-rasta
       (with-dashboard-cleanup
         (and (test-comparison (Table (data/id :venues)) (Segment segment-id))
              (test-comparison (Segment segment-id) (Table (data/id :venues))))))))
 
 (expect
-  (tt/with-temp* [Segment [{segment1-id :id} segment]
+  (tt/with-temp* [Segment [{segment1-id :id} @segment]
                   Segment [{segment2-id :id} {:table_id (data/id :venues)
                                               :definition {:filter [:< [:field-id (data/id :venues :price)] 4]}}]]
     (with-rasta
@@ -42,7 +44,7 @@
 (expect
   (with-rasta
     (with-dashboard-cleanup
-      (let [q (query/adhoc-query {:query {:filter (-> segment :definition :filter)
+      (let [q (query/adhoc-query {:query {:filter (-> @segment :definition :filter)
                                           :source-table (data/id :venues)}
                                   :type :query
                                   :database (data/id)})]
@@ -50,7 +52,7 @@
 
 (expect
   (tt/with-temp* [Card [{card-id :id} {:table_id      (data/id :venues)
-                                       :dataset_query {:query {:filter (-> segment :definition :filter)
+                                       :dataset_query {:query {:filter (-> @segment :definition :filter)
                                                                :source-table (data/id :venues)}
                                                        :type :query
                                                        :database (data/id)}}]]
diff --git a/test/metabase/automagic_dashboards/core_test.clj b/test/metabase/automagic_dashboards/core_test.clj
index 7aef64c5e674654e688ebe7b89d8c5884aedc9e7..6f414fbc0b7f5be099512287c5add13f920d77ba 100644
--- a/test/metabase/automagic_dashboards/core_test.clj
+++ b/test/metabase/automagic_dashboards/core_test.clj
@@ -476,47 +476,47 @@
                                (t.format/unparse
                                 (t.format/formatter formatter (t/time-zone-for-id tz)) dt))]
   (expect
-    [(tru "at {0}" (unparse-with-formatter "h:mm a, MMMM d, YYYY" dt))
-     (tru "at {0}" (unparse-with-formatter "h a, MMMM d, YYYY" dt))
-     (tru "on {0}" (unparse-with-formatter "MMMM d, YYYY" dt))
-     (tru "in {0} week - {1}"
-          (#'magic/pluralize (date/date-extract :week-of-year dt tz))
-          (str (date/date-extract :year dt tz)))
-     (tru "in {0}" (unparse-with-formatter "MMMM YYYY" dt))
-     (tru "in Q{0} - {1}"
-          (date/date-extract :quarter-of-year dt tz)
-          (str (date/date-extract :year dt tz)))
-     (unparse-with-formatter "YYYY" dt)
-     (unparse-with-formatter "EEEE" dt)
-     (tru "at {0}" (unparse-with-formatter "h a" dt))
-     (unparse-with-formatter "MMMM" dt)
-     (tru "Q{0}" (date/date-extract :quarter-of-year dt tz))
-     (date/date-extract :minute-of-hour dt tz)
-     (date/date-extract :day-of-month dt tz)
-     (date/date-extract :week-of-year dt tz)]
+    (map str [(tru "at {0}" (unparse-with-formatter "h:mm a, MMMM d, YYYY" dt))
+              (tru "at {0}" (unparse-with-formatter "h a, MMMM d, YYYY" dt))
+              (tru "on {0}" (unparse-with-formatter "MMMM d, YYYY" dt))
+              (tru "in {0} week - {1}"
+                   (#'magic/pluralize (date/date-extract :week-of-year dt tz))
+                   (str (date/date-extract :year dt tz)))
+              (tru "in {0}" (unparse-with-formatter "MMMM YYYY" dt))
+              (tru "in Q{0} - {1}"
+                   (date/date-extract :quarter-of-year dt tz)
+                   (str (date/date-extract :year dt tz)))
+              (unparse-with-formatter "YYYY" dt)
+              (unparse-with-formatter "EEEE" dt)
+              (tru "at {0}" (unparse-with-formatter "h a" dt))
+              (unparse-with-formatter "MMMM" dt)
+              (tru "Q{0}" (date/date-extract :quarter-of-year dt tz))
+              (date/date-extract :minute-of-hour dt tz)
+              (date/date-extract :day-of-month dt tz)
+              (date/date-extract :week-of-year dt tz)])
     (let [dt (t.format/unparse (t.format/formatters :date-hour-minute-second) dt)]
-      [(#'magic/humanize-datetime dt :minute)
-       (#'magic/humanize-datetime dt :hour)
-       (#'magic/humanize-datetime dt :day)
-       (#'magic/humanize-datetime dt :week)
-       (#'magic/humanize-datetime dt :month)
-       (#'magic/humanize-datetime dt :quarter)
-       (#'magic/humanize-datetime dt :year)
-       (#'magic/humanize-datetime dt :day-of-week)
-       (#'magic/humanize-datetime dt :hour-of-day)
-       (#'magic/humanize-datetime dt :month-of-year)
-       (#'magic/humanize-datetime dt :quarter-of-year)
-       (#'magic/humanize-datetime dt :minute-of-hour)
-       (#'magic/humanize-datetime dt :day-of-month)
-       (#'magic/humanize-datetime dt :week-of-year)])))
-
-(expect
-  [(tru "{0}st" 1)
-   (tru "{0}nd" 22)
-   (tru "{0}rd" 303)
-   (tru "{0}th" 0)
-   (tru "{0}th" 8)]
-  (map #'magic/pluralize [1 22 303 0 8]))
+      (map (comp str (partial #'magic/humanize-datetime dt)) [:minute
+                                                              :hour
+                                                              :day
+                                                              :week
+                                                              :month
+                                                              :quarter
+                                                              :year
+                                                              :day-of-week
+                                                              :hour-of-day
+                                                              :month-of-year
+                                                              :quarter-of-year
+                                                              :minute-of-hour
+                                                              :day-of-month
+                                                              :week-of-year]))))
+
+(expect
+  (map str [(tru "{0}st" 1)
+            (tru "{0}nd" 22)
+            (tru "{0}rd" 303)
+            (tru "{0}th" 0)
+            (tru "{0}th" 8)])
+  (map (comp str #'magic/pluralize) [1 22 303 0 8]))
 
 ;; Make sure we have handlers for all the units available
 (expect
diff --git a/test/metabase/driver/bigquery_test.clj b/test/metabase/driver/bigquery_test.clj
index 76e1810d5815288ae15be4e144e117b9b011de9c..1b711da80f5c73a7bed974cebe41821c34790491 100644
--- a/test/metabase/driver/bigquery_test.clj
+++ b/test/metabase/driver/bigquery_test.clj
@@ -8,11 +8,13 @@
              [query-processor-test :as qptest]
              [util :as u]]
             [metabase.driver.bigquery :as bigquery]
+            [metabase.mbql.util :as mbql.u]
             [metabase.models
              [database :refer [Database]]
              [field :refer [Field]]
              [table :refer [Table]]]
             [metabase.query-processor.interface :as qpi]
+            [metabase.query-processor.middleware.check-features :as check-features]
             [metabase.test
              [data :as data]
              [util :as tu]]
@@ -49,9 +51,9 @@
 ;; ordering shouldn't apply (Issue #2821)
 (expect-with-engine :bigquery
   {:columns ["venue_id" "user_id" "checkins_id"],
-   :cols    [{:name "venue_id",    :display_name "Venue ID",    :base_type :type/Integer}
-             {:name "user_id",     :display_name  "User ID",    :base_type :type/Integer}
-             {:name "checkins_id", :display_name "Checkins ID", :base_type :type/Integer}]}
+   :cols    [{:name "venue_id",    :display_name "Venue ID",    :source :native, :base_type :type/Integer}
+             {:name "user_id",     :display_name  "User ID",    :source :native, :base_type :type/Integer}
+             {:name "checkins_id", :display_name "Checkins ID", :source :native, :base_type :type/Integer}]}
 
   (select-keys (:data (qp/process-query
                         {:native   {:query (str "SELECT `test_data.checkins`.`venue_id` AS `venue_id`, "
@@ -75,27 +77,27 @@
                                                                       ["field-id" (data/id :checkins :venue_id)]]]
                                                   "User ID Plus Venue ID"]]}})))
 
-(defn- aggregation-names [query-map]
-  (->> query-map
-       :aggregation
-       (map :custom-name)))
+;; ok, make sure we actually wrap all of our ag clauses in `:named` clauses with unique names
+(defn- aggregation-names [query]
+  (mbql.u/match (-> query :query :aggregation)
+    [:named _ ag-name] ag-name))
 
-(defn- pre-alias-aggregations' [query-map]
+(defn- pre-alias-aggregations [outer-query]
   (binding [qpi/*driver* (driver/engine->driver :bigquery)]
-    (aggregation-names (#'bigquery/pre-alias-aggregations query-map))))
+    (aggregation-names (#'bigquery/pre-alias-aggregations outer-query))))
 
-(defn- expanded-query-with-aggregations [aggregations]
-  (-> (qp/expand {:database (data/id)
-                  :type     :query
-                  :query    {:source-table (data/id :venues)
-                             :aggregation  aggregations}})
-      :query))
+(defn- query-with-aggregations
+  [aggregations]
+  {:database (data/id)
+   :type     :query
+   :query    {:source-table (data/id :venues)
+              :aggregation  aggregations}})
 
 ;; make sure BigQuery can handle two aggregations with the same name (#4089)
 (expect
   ["sum" "count" "sum_2" "avg" "sum_3" "min"]
-  (pre-alias-aggregations'
-   (expanded-query-with-aggregations
+  (pre-alias-aggregations
+   (query-with-aggregations
     [[:sum [:field-id (data/id :venues :id)]]
      [:count [:field-id (data/id :venues :id)]]
      [:sum [:field-id (data/id :venues :id)]]
@@ -105,13 +107,21 @@
 
 (expect
   ["sum" "count" "sum_2" "avg" "sum_2_2" "min"]
-  (pre-alias-aggregations'
-   (expanded-query-with-aggregations [[:sum [:field-id (data/id :venues :id)]]
-                                      [:count [:field-id (data/id :venues :id)]]
-                                      [:sum [:field-id (data/id :venues :id)]]
-                                      [:avg [:field-id (data/id :venues :id)]]
-                                      [:named [:sum [:field-id (data/id :venues :id)]] "sum_2"]
-                                      [:min [:field-id (data/id :venues :id)]]])))
+  (pre-alias-aggregations
+   (query-with-aggregations
+    [[:sum [:field-id (data/id :venues :id)]]
+     [:count [:field-id (data/id :venues :id)]]
+     [:sum [:field-id (data/id :venues :id)]]
+     [:avg [:field-id (data/id :venues :id)]]
+     [:named [:sum [:field-id (data/id :venues :id)]] "sum_2"]
+     [:min [:field-id (data/id :venues :id)]]])))
+
+;; if query has no aggregations then pre-alias-aggregations should do nothing
+(expect
+  {}
+  (binding [qpi/*driver* (driver/engine->driver :bigquery)]
+    (#'bigquery/pre-alias-aggregations {})))
+
 
 (expect-with-engine :bigquery
   {:rows [[7929 7929]], :columns ["sum" "sum_2"]}
@@ -141,16 +151,17 @@
 ;; alias, e.g. something like `categories__via__category_id`, which is considerably different from what other SQL
 ;; databases do. (#4218)
 (expect-with-engine :bigquery
-  (str "SELECT count(*) AS `count`,"
-       " `test_data.categories__via__category_id`.`name` AS `categories__via__category_id___name` "
+  (str "SELECT `test_data.categories__via__category_id`.`name` AS `categories___name`,"
+       " count(*) AS `count` "
        "FROM `test_data.venues` "
        "LEFT JOIN `test_data.categories` `test_data.categories__via__category_id`"
        " ON `test_data.venues`.`category_id` = `test_data.categories__via__category_id`.`id` "
-       "GROUP BY `categories__via__category_id___name` "
-       "ORDER BY `categories__via__category_id___name` ASC")
+       "GROUP BY `categories___name` "
+       "ORDER BY `categories___name` ASC")
   ;; normally for test purposes BigQuery doesn't support foreign keys so override the function that checks that and
   ;; make it return `true` so this test proceeds as expected
-  (with-redefs [qpi/driver-supports? (constantly true)]
+  (with-redefs [driver/driver-supports?         (constantly true)
+                check-features/driver-supports? (constantly true)]
     (tu/with-temp-vals-in-db 'Field (data/id :venues :category_id) {:fk_target_field_id (data/id :categories :id)
                                                                     :special_type       "type/FK"}
       (let [results (qp/process-query
diff --git a/test/metabase/driver/druid_test.clj b/test/metabase/driver/druid_test.clj
index b7f7b32237fa6ddb2f8ce1f64b9f2c61b6fb531a..e3caff49df87a10e4362efb0b8dedabf4c90c395 100644
--- a/test/metabase/driver/druid_test.clj
+++ b/test/metabase/driver/druid_test.clj
@@ -17,6 +17,7 @@
              [data :as data]
              [util :as tu]]
             [metabase.test.data.datasets :as datasets :refer [expect-with-engine]]
+            [metabase.test.util.log :as tu.log]
             [metabase.timeseries-query-processor-test.util :as tqpt]
             [toucan.util.test :as tt]))
 
@@ -30,7 +31,8 @@
    ["101"  "Golden Road Brewing"         #inst "2015-09-04T07:00:00.000Z"]]
   (->> (driver/table-rows-sample (Table (data/id :checkins))
                                  [(Field (data/id :checkins :id))
-                                  (Field (data/id :checkins :venue_name))])
+                                  (Field (data/id :checkins :venue_name))
+                                  (Field (data/id :checkins :timestamp))])
        (sort-by first)
        (take 5)))
 
@@ -44,7 +46,8 @@
   (tu/with-temporary-setting-values [report-timezone "America/Los_Angeles"]
     (->> (driver/table-rows-sample (Table (data/id :checkins))
                                    [(Field (data/id :checkins :id))
-                                    (Field (data/id :checkins :venue_name))])
+                                    (Field (data/id :checkins :venue_name))
+                                    (Field (data/id :checkins :timestamp))])
          (sort-by first)
          (take 5))))
 
@@ -58,7 +61,8 @@
   (tu/with-jvm-tz (time/time-zone-for-id "America/Chicago")
     (->> (driver/table-rows-sample (Table (data/id :checkins))
                                    [(Field (data/id :checkins :id))
-                                    (Field (data/id :checkins :venue_name))])
+                                    (Field (data/id :checkins :venue_name))
+                                    (Field (data/id :checkins :timestamp))])
          (sort-by first)
          (take 5))))
 
@@ -94,12 +98,12 @@
                :rows        [["2013-01-03T08:00:00.000Z" "931" "Simcha Yan" "1" "Kinaree Thai Bistro"       1]
                              ["2013-01-10T08:00:00.000Z" "285" "Kfir Caj"   "2" "Ruen Pair Thai Restaurant" 1]]
                :cols        (mapv #(merge col-defaults %)
-                                  [{:name "timestamp",   :display_name "Timestamp"}
-                                   {:name "id",          :display_name "ID"}
-                                   {:name "user_name",   :display_name "User Name"}
-                                   {:name "venue_price", :display_name "Venue Price"}
-                                   {:name "venue_name",  :display_name "Venue Name"}
-                                   {:name "count",       :display_name "Count", :base_type :type/Integer}])
+                                  [{:name "timestamp",   :source :native, :display_name "Timestamp"}
+                                   {:name "id",          :source :native, :display_name "ID"}
+                                   {:name "user_name",   :source :native, :display_name "User Name"}
+                                   {:name "venue_price", :source :native, :display_name "Venue Price"}
+                                   {:name "venue_name",  :source :native, :display_name "Venue Name"}
+                                   {:name "count",       :source :native, :display_name "Count", :base_type :type/Integer}])
                :native_form {:query native-query-1}}}
   (-> (process-native-query native-query-1)
       (m/dissoc-in [:data :insights])))
@@ -139,7 +143,7 @@
 ;; use Monday.All of the below events should happen in one week. Using Druid's default grouping, 3 of the events would
 ;; have counted for the previous week
 (expect-with-engine :druid
-  [["2015-10-04T00:00:00.000Z" 9]]
+  [["2015-10-04" 9]]
   (druid-query-returning-rows
     {:filter      [:between [:datetime-field $timestamp :day] "2015-10-04" "2015-10-10"]
      :aggregation [[:count $id]]
@@ -326,7 +330,8 @@
                       :tunnel-enabled true
                       :tunnel-port    22
                       :tunnel-user    "bogus"}]
-      (driver/can-connect-with-details? engine details :rethrow-exceptions))
+      (tu.log/suppress-output
+        (driver/can-connect-with-details? engine details :rethrow-exceptions)))
        (catch Exception e
          (.getMessage e))))
 
diff --git a/test/metabase/driver/generic_sql/connection_test.clj b/test/metabase/driver/generic_sql/connection_test.clj
index c75d0f4908526d73aee41df83600bd422be298fc..56d1e00ab8318c5e0d17848c8acb4cb9b7b0b712 100644
--- a/test/metabase/driver/generic_sql/connection_test.clj
+++ b/test/metabase/driver/generic_sql/connection_test.clj
@@ -1,7 +1,9 @@
 (ns metabase.driver.generic-sql.connection-test
   (:require [expectations :refer :all]
             [metabase.driver :as driver]
-            [metabase.test.data :refer :all]))
+            [metabase.test.data :refer :all]
+            [metabase.test.util.log :as tu.log]
+            [metabase.util :as u]))
 
 ;; ## TESTS FOR CAN-CONNECT?
 
@@ -13,14 +15,17 @@
 ;; Lie and say Test DB is Postgres. CAN-CONNECT? should fail
 (expect
   false
-  (driver/can-connect-with-details? :postgres (:details (db))))
+  (tu.log/suppress-output
+    (driver/can-connect-with-details? :postgres (:details (db)))))
 
 ;; Random made-up DBs should fail
 (expect
   false
-  (driver/can-connect-with-details? :postgres {:host "localhost", :port 5432, :dbname "ABCDEFGHIJKLMNOP", :user "rasta"}))
+  (tu.log/suppress-output
+    (driver/can-connect-with-details? :postgres {:host "localhost", :port 5432, :dbname "ABCDEFGHIJKLMNOP", :user "rasta"})))
 
 ;; Things that you can connect to, but are not DBs, should fail
 (expect
   false
-  (driver/can-connect-with-details? :postgres {:host "google.com", :port 80}))
+  (tu.log/suppress-output
+    (driver/can-connect-with-details? :postgres {:host "google.com", :port 80})))
diff --git a/test/metabase/driver/generic_sql/native_test.clj b/test/metabase/driver/generic_sql/native_test.clj
index 0ecb550f3291a41b6338c2ede6961d9281ecb8ad..eaca29833c69eb973d7f3fa25296f5d42872cf0a 100644
--- a/test/metabase/driver/generic_sql/native_test.clj
+++ b/test/metabase/driver/generic_sql/native_test.clj
@@ -3,7 +3,8 @@
   (:require [expectations :refer :all]
             [medley.core :as m]
             [metabase.query-processor :as qp]
-            [metabase.test.data :as data]))
+            [metabase.test.data :as data]
+            [metabase.test.util.log :as tu.log]))
 
 ;; Just check that a basic query works
 (expect
@@ -12,7 +13,7 @@
    :data      {:rows        [[100]
                              [99]]
                :columns     ["ID"]
-               :cols        [{:name "ID", :display_name "ID", :base_type :type/Integer}]
+               :cols        [{:name "ID", :display_name "ID", :base_type :type/Integer, :source :native}]
                :native_form {:query "SELECT ID FROM VENUES ORDER BY ID DESC LIMIT 2", :params []}}}
   (-> (qp/process-query {:native   {:query "SELECT ID FROM VENUES ORDER BY ID DESC LIMIT 2"}
                          :type     :native
@@ -27,9 +28,9 @@
    :data      {:rows        [[100 "Mohawk Bend" 46]
                              [99 "Golden Road Brewing" 10]]
                :columns     ["ID" "NAME" "CATEGORY_ID"]
-               :cols        [{:name "ID",          :display_name "ID",          :base_type :type/Integer}
-                             {:name "NAME",        :display_name "Name",        :base_type :type/Text}
-                             {:name "CATEGORY_ID", :display_name "Category ID", :base_type :type/Integer}]
+               :cols        [{:name "ID",          :display_name "ID",          :source :native, :base_type :type/Integer}
+                             {:name "NAME",        :display_name "Name",        :source :native, :base_type :type/Text}
+                             {:name "CATEGORY_ID", :display_name "Category ID", :source :native, :base_type :type/Integer}]
                :native_form {:query "SELECT ID, NAME, CATEGORY_ID FROM VENUES ORDER BY ID DESC LIMIT 2", :params []}}}
   (-> (qp/process-query {:native   {:query "SELECT ID, NAME, CATEGORY_ID FROM VENUES ORDER BY ID DESC LIMIT 2"}
                          :type     :native
@@ -38,12 +39,13 @@
       (m/dissoc-in [:data :insights])))
 
 ;; Check that we get proper error responses for malformed SQL
-(expect {:status :failed
-         :class  java.lang.Exception
-         :error  "Column \"ZID\" not found"}
-  (dissoc (qp/process-query {:native   {:query "SELECT ZID FROM CHECKINS LIMIT 2"} ; make sure people know it's to be expected
-                             :type     :native
-                             :database (data/id)})
+(expect
+  {:status :failed
+   :class  java.lang.Exception
+   :error  "Column \"ZID\" not found"}
+  (dissoc (tu.log/suppress-output
+            (qp/process-query {:native   {:query "SELECT ZID FROM CHECKINS LIMIT 2"}
+                               :type     :native
+                               :database (data/id)}))
           :stacktrace
-          :query
-          :expanded-query))
+          :query))
diff --git a/test/metabase/driver/generic_sql_test.clj b/test/metabase/driver/generic_sql_test.clj
index 000b827f61df350cf33848d000c4d5be0d56b724..2845071b9924f883d9275f81252e7e74d9de4962 100644
--- a/test/metabase/driver/generic_sql_test.clj
+++ b/test/metabase/driver/generic_sql_test.clj
@@ -5,10 +5,10 @@
             [metabase.models
              [field :refer [Field]]
              [table :as table :refer [Table]]]
-            [metabase.test.data :refer :all]
+            [metabase.test.data :as data :refer :all]
             [metabase.test.data.datasets :as datasets]
-            [toucan.db :as db]
-            [metabase.test.data :as data])
+            [metabase.test.util.log :as tu.log]
+            [toucan.db :as db])
   (:import metabase.driver.h2.H2Driver))
 
 (def ^:private users-table      (delay (Table :name "USERS")))
@@ -84,9 +84,9 @@
    {:name "The Apple Pan",                :price 2, :category_id 11, :id 3}
    {:name "Wurstküche",                   :price 2, :category_id 29, :id 4}
    {:name "Brite Spot Family Restaurant", :price 2, :category_id 20, :id 5}]
-  (for [row (take 5 (sort-by :id (#'sql/table-rows-seq datasets/*driver*
-                                   (db/select-one 'Database :id (id))
-                                   (db/select-one 'Table :id (id :venues)))))]
+  (for [row (take 5 (sort-by :id (driver/table-rows-seq datasets/*driver*
+                                                        (db/select-one 'Database :id (id))
+                                                        (db/select-one 'Table :id (id :venues)))))]
     ;; different DBs use different precisions for these
     (-> (dissoc row :latitude :longitude)
         (update :price int)
@@ -97,19 +97,20 @@
 ;;; Make sure invalid ssh credentials are detected if a direct connection is possible
 (expect
   #"com.jcraft.jsch.JSchException:"
-  (try (let [engine :postgres
-             details {:ssl false,
-                      :password "changeme",
-                      :tunnel-host "localhost", ;; this test works if sshd is running or not
-                      :tunnel-pass "BOGUS-BOGUS-BOGUS",
-                      :port 5432,
-                      :dbname "test",
-                      :host "localhost",
-                      :tunnel-enabled true,
-                      :tunnel-port 22,
-                      :engine :postgres,
-                      :user "postgres",
-                      :tunnel-user "example"}]
-         (driver/can-connect-with-details? engine details :rethrow-exceptions))
+  (try (let [engine  :postgres
+             details {:ssl            false
+                      :password       "changeme"
+                      :tunnel-host    "localhost" ; this test works if sshd is running or not
+                      :tunnel-pass    "BOGUS-BOGUS-BOGUS"
+                      :port           5432
+                      :dbname         "test"
+                      :host           "localhost"
+                      :tunnel-enabled true
+                      :tunnel-port    22
+                      :engine         :postgres
+                      :user           "postgres"
+                      :tunnel-user    "example"}]
+         (tu.log/suppress-output
+           (driver/can-connect-with-details? engine details :rethrow-exceptions)))
        (catch Exception e
          (.getMessage e))))
diff --git a/test/metabase/driver/googleanalytics/query_processor_test.clj b/test/metabase/driver/googleanalytics/query_processor_test.clj
new file mode 100644
index 0000000000000000000000000000000000000000..2206cec07794a0e28460b1d42c2d4cd9cb7f6aa2
--- /dev/null
+++ b/test/metabase/driver/googleanalytics/query_processor_test.clj
@@ -0,0 +1,22 @@
+(ns metabase.driver.googleanalytics.query-processor-test
+  (:require [expectations :refer [expect]]
+            [metabase.driver.googleanalytics.query-processor :as ga.qp]))
+
+(expect
+  "ga::WOW"
+  (#'ga.qp/built-in-segment {:filter [:segment "ga::WOW"]}))
+
+;; should work recursively
+(expect
+  "gaid::A"
+  (#'ga.qp/built-in-segment {:filter [:and [:= [:field-id 1] 2] [:segment "gaid::A"]]}))
+
+;; should throw Exception if more than one segment is matched
+(expect
+  Exception
+  (#'ga.qp/built-in-segment {:filter [:and [:segment "gaid::A"] [:segment "ga::B"]]}))
+
+;; should ignore Metabase segments
+(expect
+  "ga::B"
+  (#'ga.qp/built-in-segment {:filter [:and [:segment 100] [:segment "ga::B"]]}))
diff --git a/test/metabase/driver/googleanalytics_test.clj b/test/metabase/driver/googleanalytics_test.clj
index dae92cc8a1f7daf222738f0946d72f5c3872c879..c89cc2bc754ad8093c814f34ff656ab5b461ad8b 100644
--- a/test/metabase/driver/googleanalytics_test.clj
+++ b/test/metabase/driver/googleanalytics_test.clj
@@ -1,56 +1,25 @@
 (ns metabase.driver.googleanalytics-test
   "Tests for the Google Analytics driver and query processor."
-  (:require [expectations :refer :all]
-            [metabase.driver.googleanalytics.query-processor :as qp]
+  (:require [expectations :refer [expect]]
+            [medley.core :as m]
+            [metabase
+             [query-processor :as qp]
+             [util :as u]]
+            [metabase.driver.googleanalytics.query-processor :as ga.qp]
             [metabase.models
              [card :refer [Card]]
              [database :refer [Database]]
              [field :refer [Field]]
              [table :refer [Table]]]
-            [metabase.query-processor
-             [interface :as qpi]
-             [store :as qp.store]]
+            [metabase.query-processor.store :as qp.store]
             [metabase.test.data.users :as users]
-            [metabase.util :as u]
+            [metabase.test.util :as tu]
             [metabase.util.date :as du]
             [toucan.db :as db]
             [toucan.util.test :as tt]))
 
 ;;; +----------------------------------------------------------------------------------------------------------------+
-;;; |                                             QUERY "TRANSFORMATION"                                             |
-;;; +----------------------------------------------------------------------------------------------------------------+
-
-;; check that a built-in Metric gets removed from the query and put in `:ga`
-(expect
-  {:ga {:segment nil, :metrics "ga:users"}}
-  (qp/transform-query {:query {:aggregation [[:metric "ga:users"]]}}))
-
-
-;; check that a built-in segment gets removed from the query and put in `:ga`
-(expect
-  {:ga {:segment "gaid::-4", :metrics nil}}
-  (qp/transform-query {:query {:filter [:segment "gaid::-4"]}}))
-
-;; check that other things stay in the order-by clause
-(expect
-  {:query {:filter [:< 100 200]}
-   :ga    {:segment nil, :metrics nil}}
-  (qp/transform-query {:query {:filter [:< 100 200]}}))
-
-(expect
-  {:query {:filter [:and [:< 100 200]]}
-   :ga    {:segment nil, :metrics nil}}
-  (qp/transform-query {:query {:filter [:and [:< 100 200]]}}))
-
-(expect
-  {:query {:filter [:and [:< 100 200]]}
-   :ga    {:segment "gaid::-4", :metrics nil}}
-  (qp/transform-query {:query {:filter [:and [:segment "gaid::-4"]
-                                             [:< 100 200]]}}))
-
-
-;;; +----------------------------------------------------------------------------------------------------------------+
-;;; |                                   MBQL->NATIVE (EXPANDED QUERY -> GA QUERY)                                    |
+;;; |                                        MBQL->NATIVE (QUERY -> GA QUERY)                                        |
 ;;; +----------------------------------------------------------------------------------------------------------------+
 
 (defn- ga-query [inner-query]
@@ -67,7 +36,7 @@
   (binding [qp.store/*store* (atom {:tables {1 #metabase.models.table.TableInstance{:name   "0123456"
                                                                                     :schema nil
                                                                                     :id     1}}})]
-    (qp/mbql->native (update query :query (partial merge {:source-table 1})))))
+    (ga.qp/mbql->native (update query :query (partial merge {:source-table 1})))))
 
 ;; just check that a basic almost-empty MBQL query can be compiled
 (expect
@@ -78,101 +47,74 @@
 ;; try a basic query with a metric (aggregation)
 (expect
   (ga-query {:metrics "ga:users"})
-  (mbql->native {:ga {:metrics "ga:users"}}))
+  (mbql->native {:query {:aggregation [[:metric "ga:users"]]}}))
 
 
 ;; query with metric (aggregation) + breakout
 (expect
   (ga-query {:metrics    "ga:users"
              :dimensions "ga:browser"})
-  (mbql->native {:query {:breakout [(qpi/map->Field {:field-name "ga:browser"})]}
-                 :ga    {:metrics "ga:users"}}))
+  (mbql->native {:query {:aggregation [[:metric "ga:users"]]
+                         :breakout    [[:field-literal "ga:browser"]]}}))
 
 
 ;; query w/ segment (filter)
 (expect
   (ga-query {:segment "gaid::-4"})
-  (mbql->native {:ga {:segment "gaid::-4"}}))
+  (mbql->native {:query {:filter [:segment "gaid::-4"]}}))
 
 
 ;; query w/ non-segment filter
 (expect
   (ga-query {:filters "ga:continent==North America"})
-  (mbql->native {:query {:filter {:filter-type :=
-                                  :field       (qpi/map->Field {:field-name "ga:continent"})
-                                  :value       (qpi/map->Value {:value "North America"})}}}))
+  (mbql->native {:query {:filter [:= [:field-literal "ga:continent"] [:value "North America"]]}}))
 
 ;; query w/ segment & non-segment filter
 (expect
   (ga-query {:filters "ga:continent==North America"
              :segment "gaid::-4"})
-  (mbql->native {:query {:filter {:filter-type :=
-                                  :field       (qpi/map->Field {:field-name "ga:continent"})
-                                  :value       (qpi/map->Value {:value "North America"})}}
-                 :ga    {:segment "gaid::-4"}}))
+  (mbql->native {:query {:filter [:and
+                                  [:segment "gaid::-4"]
+                                  [:= [:field-literal "ga:continent"] [:value "North America"]]]}}))
 
 ;; query w/ date filter
 (defn- ga-date-field [unit]
-  (qpi/map->DateTimeField {:field (qpi/map->Field {:field-name "ga:date"})
-                           :unit unit}))
+  [:datetime-field [:field-literal "ga:date"] unit])
 
 ;; absolute date
 (expect
   (ga-query {:start-date "2016-11-08", :end-date "2016-11-08"})
-  (mbql->native {:query {:filter {:filter-type :=
-                                  :field       (ga-date-field :day)
-                                  :value       (qpi/map->DateTimeValue {:value #inst "2016-11-08"
-                                                                        :field (ga-date-field :day)})}}}))
+  (mbql->native {:query {:filter [:= (ga-date-field :day) [:absolute-datetime #inst "2016-11-08" :day]]}}))
 
 ;; relative date -- last month
 (expect
   (ga-query {:start-date (du/format-date "yyyy-MM-01" (du/relative-date :month -1))
              :end-date   (du/format-date "yyyy-MM-01")})
-  (mbql->native {:query {:filter {:filter-type :=
-                                  :field       (ga-date-field :month)
-                                  :value       (qpi/map->RelativeDateTimeValue {:amount -1
-                                                                                :unit   :month
-                                                                                :field  (ga-date-field :month)})}}}))
+  (mbql->native {:query {:filter [:= (ga-date-field :month) [:relative-datetime -1 :month]]}}))
 
 ;; relative date -- this month
 (expect
   (ga-query {:start-date (du/format-date "yyyy-MM-01")
              :end-date   (du/format-date "yyyy-MM-01" (du/relative-date :month 1))})
-  (mbql->native {:query {:filter {:filter-type :=
-                                  :field       (ga-date-field :month)
-                                  :value       (qpi/map->RelativeDateTimeValue {:amount 0
-                                                                                :unit   :month
-                                                                                :field  (ga-date-field :month)})}}}))
+  (mbql->native {:query {:filter [:= (ga-date-field :month) [:relative-datetime 0 :month]]}}))
 
 ;; relative date -- next month
 (expect
   (ga-query {:start-date (du/format-date "yyyy-MM-01" (du/relative-date :month 1))
              :end-date   (du/format-date "yyyy-MM-01" (du/relative-date :month 2))})
-  (mbql->native {:query {:filter {:filter-type :=
-                                  :field       (ga-date-field :month)
-                                  :value       (qpi/map->RelativeDateTimeValue {:amount 1
-                                                                                :unit   :month
-                                                                                :field  (ga-date-field :month)})}}}))
+  (mbql->native {:query {:filter [:= (ga-date-field :month) [:relative-datetime 1 :month]]}}))
 
 ;; relative date -- 2 months from now
 (expect
   (ga-query {:start-date (du/format-date "yyyy-MM-01" (du/relative-date :month 2))
              :end-date   (du/format-date "yyyy-MM-01" (du/relative-date :month 3))})
-  (mbql->native {:query {:filter {:filter-type :=
-                                  :field       (ga-date-field :month)
-                                  :value       (qpi/map->RelativeDateTimeValue {:amount 2
-                                                                                :unit   :month
-                                                                                :field  (ga-date-field :month)})}}}))
+  (mbql->native {:query {:filter [:= (ga-date-field :month) [:relative-datetime 2 :month]]}}))
 
 ;; relative date -- last year
 (expect
   (ga-query {:start-date (du/format-date "yyyy-01-01" (du/relative-date :year -1))
              :end-date   (du/format-date "yyyy-01-01")})
-  (mbql->native {:query {:filter {:filter-type :=
-                                  :field       (ga-date-field :year)
-                                  :value       (qpi/map->RelativeDateTimeValue {:amount -1
-                                                                                :unit   :year
-                                                                                :field  (ga-date-field :year)})}}}))
+  (mbql->native {:query {:filter [:= (ga-date-field :year) [:relative-datetime -1 :year]]}}))
 
 ;; limit
 (expect
@@ -180,6 +122,137 @@
   (mbql->native {:query {:limit 25}}))
 
 
+;;; ----------------------------------------------- (Almost) E2E tests -----------------------------------------------
+
+(defn- do-with-some-fields [f]
+  (tt/with-temp* [Database [db                 {:engine :googleanalytics}]
+                  Table    [table              {:name "98765432"}]
+                  Field    [event-action-field {:name "ga:eventAction", :base_type "type/Text"}]
+                  Field    [event-label-field  {:name "ga:eventLabel", :base_type "type/Text"}]
+                  Field    [date-field         {:name "ga:date", :base_type "type/Date"}]]
+    (f {:db                 db
+        :table              table
+        :event-action-field event-action-field
+        :event-label-field  event-label-field
+        :date-field         date-field})))
+
+;; let's try a real-life GA query and see how it looks when it's all put together. This one has already been
+;; preprocessed, so we're just checking it gets converted to the correct native query
+(def ^:private expected-ga-query
+  {:query {:ids                "ga:98765432"
+           :dimensions         "ga:eventLabel"
+           :metrics            "ga:totalEvents"
+           :segment            "gaid::-4"
+           :start-date         "30daysAgo"
+           :end-date           "yesterday"
+           :filters            "ga:eventAction==Run Query;ga:eventLabel!=(not set);ga:eventLabel!=url"
+           :sort               "ga:eventLabel"
+           :max-results        10000
+           :include-empty-rows false}
+   :mbql? true})
+
+(defn- preprocessed-query-with-some-fields [{:keys [db table event-action-field event-label-field date-field]}]
+  {:database (u/get-id db)
+   :type     :query
+   :query    {:source-table
+              (u/get-id table)
+
+              :aggregation
+              [[:metric "ga:totalEvents"]]
+
+              :breakout
+              [[:field-id (u/get-id event-label-field)]]
+
+              :filter
+              [:and
+               [:segment "gaid::-4"]
+               [:=
+                [:field-id (u/get-id event-action-field)]
+                [:value "Run Query" {:base_type :type/Text, :special_type nil, :database_type "VARCHAR"}]]
+               [:between
+                [:datetime-field [:field-id (u/get-id date-field)] :day]
+                [:relative-datetime -30 :day]
+                [:relative-datetime -1 :day]]
+               [:!=
+                [:field-id (u/get-id event-label-field)]
+                [:value "(not set)" {:base_type :type/Text, :special_type nil, :database_type "VARCHAR"}]]
+               [:!=
+                [:field-id (u/get-id event-label-field)]
+                [:value "url" {:base_type :type/Text, :special_type nil, :database_type "VARCHAR"}]]]
+
+              :order-by
+              [[:asc [:field-id (u/get-id event-label-field)]]]}})
+
+(expect
+  expected-ga-query
+  (do-with-some-fields
+   (fn [{:keys [table event-action-field event-label-field date-field], :as objects}]
+     (qp.store/with-store
+       (qp.store/store-table! table)
+       (doseq [field [event-action-field event-label-field date-field]]
+         (qp.store/store-field! field))
+       (ga.qp/mbql->native (preprocessed-query-with-some-fields objects))))))
+
+;; this was the above query before it was preprocessed. Make sure we actually handle everything correctly end-to-end
+;; for the entire preprocessing process
+(defn- query-with-some-fields [{:keys [db table event-action-field event-label-field date-field]}]
+  {:database (u/get-id db)
+   :type     :query
+   :query    {:source-table (u/get-id table)
+              :aggregation  [[:metric "ga:totalEvents"]]
+              :filter       [:and
+                             [:segment "gaid::-4"]
+                             [:= [:field-id (u/get-id event-action-field)] "Run Query"]
+                             [:time-interval [:field-id (u/get-id date-field)] -30 :day]
+                             [:!= [:field-id (u/get-id event-label-field)] "(not set)" "url"]]
+              :breakout     [[:field-id (u/get-id event-label-field)]]}})
+
+(expect
+  expected-ga-query
+  (do-with-some-fields
+   (comp metabase.query-processor/query->native query-with-some-fields)))
+
+;; ok, now do the same query again, but run the entire QP pipeline, swapping out a few things so nothing is actually
+;; run externally.
+(expect
+  {:row_count 1
+   :status    :completed
+   :data      {:columns     [:ga:eventLabel :ga:totalEvents]
+               :rows        [["Toucan Sighting" 1000]]
+               :native_form expected-ga-query
+               :cols        [{:description     "This is ga:eventLabel"
+                              :special_type    nil
+                              :name            "ga:eventLabel"
+                              :settings        nil
+                              :source          :breakout
+                              :parent_id       nil
+                              :visibility_type :normal
+                              :display_name    "ga:eventLabel"
+                              :fingerprint     nil
+                              :base_type       :type/Text}
+                             {:name         "metric"
+                              :display_name "metric"
+                              :source       :aggregation
+                              :description  "This is metric"
+                              :base_type    :type/Text}]}}
+  (with-redefs [metabase.driver.googleanalytics/memoized-column-metadata (fn [_ column-name]
+                                                                           {:display_name column-name
+                                                                            :description  (str "This is " column-name)
+                                                                            :base_type    :type/Text})]
+    (do-with-some-fields
+     (fn [objects]
+       (let [results {:columns [:ga:eventLabel :ga:totalEvents]
+                      :cols    [{}, {:base_type :type/Text}]
+                      :rows    [["Toucan Sighting" 1000]]}
+             qp      (#'metabase.query-processor/qp-pipeline (constantly results))
+             query   (query-with-some-fields objects)]
+         (-> (tu/doall-recursive (qp query))
+             (update-in [:data :cols] #(for [col %]
+                                         (dissoc col :table_id :id)))
+             (m/dissoc-in [:data :results_metadata])
+             (m/dissoc-in [:data :insights])))))))
+
+
 ;;; ------------------------------------------------ Saving GA Cards -------------------------------------------------
 
 ;; Can we *save* a GA query that has two aggregations?
diff --git a/test/metabase/driver/mongo/util_test.clj b/test/metabase/driver/mongo/util_test.clj
index a1e44af28dd5466dafd471e5bd5d389882b98a16..ec46dc602368e6e681c5ce4ea785d284b3182857 100644
--- a/test/metabase/driver/mongo/util_test.clj
+++ b/test/metabase/driver/mongo/util_test.clj
@@ -1,7 +1,8 @@
 (ns metabase.driver.mongo.util-test
   (:require [expectations :refer [expect]]
             [metabase.driver :as driver]
-            [metabase.driver.mongo.util :as mongo-util])
+            [metabase.driver.mongo.util :as mongo-util]
+            [metabase.test.util.log :as tu.log])
   (:import com.mongodb.ReadPreference))
 
 ;; test that people can specify additional connection options like `?readPreference=nearest`
@@ -41,6 +42,7 @@
                    :tunnel-enabled true
                    :tunnel-port    22
                    :tunnel-user    "bogus"}]
-      (driver/can-connect-with-details? engine details :rethrow-exceptions))
+      (tu.log/suppress-output
+        (driver/can-connect-with-details? engine details :rethrow-exceptions)))
        (catch Exception e
          (.getMessage e))))
diff --git a/test/metabase/driver/mongo_test.clj b/test/metabase/driver/mongo_test.clj
index 6847f916a6b992d72a6092579be2cd2c81ba5674..d508242aca625b0e2e69dda39e6905d6e01e2e7e 100644
--- a/test/metabase/driver/mongo_test.clj
+++ b/test/metabase/driver/mongo_test.clj
@@ -77,7 +77,7 @@
    :row_count 1
    :data      {:rows        [[1]]
                :columns     ["count"]
-               :cols        [{:name "count", :display_name "Count", :base_type :type/Integer}]
+               :cols        [{:name "count", :display_name "Count", :base_type :type/Integer, :source :native}]
                :native_form {:collection "venues"
                              :query      native-query}}}
   (-> (qp/process-query {:native   {:query      native-query
diff --git a/test/metabase/driver/oracle_test.clj b/test/metabase/driver/oracle_test.clj
index bc080b75a482b3cca9cd32da2d11432cbd9311c6..d4cae60677dcc3f13d26d73f00f79aa14d288b66 100644
--- a/test/metabase/driver/oracle_test.clj
+++ b/test/metabase/driver/oracle_test.clj
@@ -5,14 +5,9 @@
             [metabase.driver
              [generic-sql :as sql]
              [oracle :as oracle]]
-            [metabase.models
-             [database :refer [Database]]
-             [setting :as setting]]
-            [metabase.test.data :as data]
-            [metabase.test.data
-             [dataset-definitions :as defs]
-             [datasets :refer [expect-with-engine]]]
-            [metabase.test.util :as tu])
+            [metabase.test.data.datasets :refer [expect-with-engine]]
+            [metabase.test.util :as tu]
+            [metabase.test.util.log :as tu.log])
   (:import metabase.driver.oracle.OracleDriver))
 
 ;; make sure we can connect with an SID
@@ -56,20 +51,21 @@
 
 (expect
   com.jcraft.jsch.JSchException
-  (let [engine :oracle
-        details {:ssl false,
-                 :password "changeme",
-                 :tunnel-host "localhost",
-                 :tunnel-pass "BOGUS-BOGUS-BOGUS",
-                 :port 12345,
-                 :service-name "test",
-                 :sid "asdf",
-                 :host "localhost",
-                 :tunnel-enabled true,
-                 :tunnel-port 22,
-                 :user "postgres",
-                 :tunnel-user "example"}]
-    (#'oracle/can-connect? details)))
+  (let [engine  :oracle
+        details {:ssl            false
+                 :password       "changeme"
+                 :tunnel-host    "localhost"
+                 :tunnel-pass    "BOGUS-BOGUS-BOGUS"
+                 :port           12345
+                 :service-name   "test"
+                 :sid            "asdf"
+                 :host           "localhost"
+                 :tunnel-enabled true
+                 :tunnel-port    22
+                 :user           "postgres"
+                 :tunnel-user    "example"}]
+    (tu.log/suppress-output
+      (#'oracle/can-connect? details))))
 
 (expect-with-engine :oracle
   "UTC"
diff --git a/test/metabase/driver/postgres_test.clj b/test/metabase/driver/postgres_test.clj
index 865bad8de67cd55fb60e7062b6598a3f2dc6c24b..504eacfc94b20ad3dc279f300fbda2d23b956d2b 100644
--- a/test/metabase/driver/postgres_test.clj
+++ b/test/metabase/driver/postgres_test.clj
@@ -6,7 +6,7 @@
             [metabase
              [driver :as driver]
              [query-processor :as qp]
-             [query-processor-test :refer [rows]]
+             [query-processor-test :refer [rows rows+column-names]]
              [sync :as sync]
              [util :as u]]
             [metabase.driver
@@ -17,7 +17,6 @@
              [database :refer [Database]]
              [field :refer [Field]]
              [table :refer [Table]]]
-            [metabase.query-processor.interface :as qpi]
             [metabase.sync.sync-metadata :as sync-metadata]
             [metabase.test
              [data :as data]
@@ -138,7 +137,7 @@
              [3 "ouija_board"]]}
   (-> (data/dataset metabase.driver.postgres-test/dots-in-names
         (data/run-mbql-query objects.stuff))
-      :data (dissoc :cols :native_form :results_metadata :insights)))
+      rows+column-names))
 
 
 ;; Make sure that duplicate column names (e.g. caused by using a FK) still return both columns
@@ -158,15 +157,15 @@
   (-> (data/dataset metabase.driver.postgres-test/duplicate-names
         (data/run-mbql-query people
           {:fields [$name $bird_id->birds.name]}))
-      :data (dissoc :cols :native_form :results_metadata :insights)))
+      rows+column-names))
 
 
 ;;; Check support for `inet` columns
 (i/def-database-definition ^:private ip-addresses
   [["addresses"
-     [{:field-name "ip", :base-type {:native "inet"}}]
-     [[(hsql/raw "'192.168.1.1'::inet")]
-      [(hsql/raw "'10.4.4.15'::inet")]]]])
+    [{:field-name "ip", :base-type {:native "inet"}}]
+    [[(hsql/raw "'192.168.1.1'::inet")]
+     [(hsql/raw "'10.4.4.15'::inet")]]]])
 
 ;; Filtering by inet columns should add the appropriate SQL cast, e.g. `cast('192.168.1.1' AS inet)` (otherwise this
 ;; wouldn't work)
@@ -299,11 +298,14 @@
 
 ;; Make sure we're able to fingerprint TIME fields (#5911)
 (expect-with-engine :postgres
-                    #{#metabase.models.field.FieldInstance{:name "start_time", :fingerprint {:global {:distinct-count 1}
+                    #{#metabase.models.field.FieldInstance{:name "start_time", :fingerprint {:global {:distinct-count 1
+                                                                                                      :nil% 0.0}
                                                                                              :type {:type/DateTime {:earliest "1970-01-01T22:00:00.000Z", :latest "1970-01-01T22:00:00.000Z"}}}}
-                      #metabase.models.field.FieldInstance{:name "end_time",   :fingerprint {:global {:distinct-count 1}
+                      #metabase.models.field.FieldInstance{:name "end_time",   :fingerprint {:global {:distinct-count 1
+                                                                                                      :nil% 0.0}
                                                                                              :type {:type/DateTime {:earliest "1970-01-01T09:00:00.000Z", :latest "1970-01-01T09:00:00.000Z"}}}}
-    #metabase.models.field.FieldInstance{:name "reason",     :fingerprint {:global {:distinct-count 1}
+    #metabase.models.field.FieldInstance{:name "reason",     :fingerprint {:global {:distinct-count 1
+:nil% 0.0}
                                                                            :type   {:type/Text {:percent-json    0.0
                                                                                                 :percent-url     0.0
                                                                                                 :percent-email   0.0
@@ -395,8 +397,7 @@
 ;; check that values for enum types get wrapped in appropriate CAST() fn calls in `->honeysql`
 (expect-with-engine :postgres
   {:name :cast, :args ["toucan" (keyword "bird type")]}
-  (sqlqp/->honeysql pg-driver (qpi/map->Value {:field {:database-type "bird type", :base-type :type/PostgresEnum}
-                                               :value "toucan"})))
+  (sqlqp/->honeysql pg-driver [:value "toucan" {:database_type "bird type", :base_type :type/PostgresEnum}]))
 
 ;; End-to-end check: make sure everything works as expected when we run an actual query
 (expect-with-engine :postgres
diff --git a/test/metabase/driver/presto_test.clj b/test/metabase/driver/presto_test.clj
index 91747d66706275f0f2398177370e0f6716292362..0d8ad69132faa575675527efcce7249219d58b8d 100644
--- a/test/metabase/driver/presto_test.clj
+++ b/test/metabase/driver/presto_test.clj
@@ -10,6 +10,7 @@
              [data :as data]
              [util :as tu]]
             [metabase.test.data.datasets :as datasets]
+            [metabase.test.util.log :as tu.log]
             [toucan.db :as db])
   (:import metabase.driver.presto.PrestoDriver))
 
@@ -70,10 +71,6 @@
   "\"weird . \"\"schema\".\"weird.table\"\" name\""
   (#'presto/quote+combine-names "weird . \"schema" "weird.table\" name"))
 
-(expect
-  ["name" "count" "count_2" "sum", "sum_2", "sum_3"]
-  (#'presto/rename-duplicates ["name" "count" "count" "sum" "sum" "sum"]))
-
 ;; DESCRIBE-DATABASE
 (datasets/expect-with-engine :presto
   {:tables #{{:name "categories" :schema "default"}
@@ -137,16 +134,17 @@
   #"com.jcraft.jsch.JSchException:"
   (try
     (let [engine  :presto
-          details {:ssl            false,
-                   :password       "changeme",
-                   :tunnel-host    "localhost",
-                   :tunnel-pass    "BOGUS-BOGUS",
+          details {:ssl            false
+                   :password       "changeme"
+                   :tunnel-host    "localhost"
+                   :tunnel-pass    "BOGUS-BOGUS"
                    :catalog        "BOGUS"
-                   :host           "localhost",
-                   :tunnel-enabled true,
-                   :tunnel-port    22,
+                   :host           "localhost"
+                   :tunnel-enabled true
+                   :tunnel-port    22
                    :tunnel-user    "bogus"}]
-      (driver/can-connect-with-details? engine details :rethrow-exceptions))
+      (tu.log/suppress-output
+        (driver/can-connect-with-details? engine details :rethrow-exceptions)))
     (catch Exception e
       (.getMessage e))))
 
diff --git a/test/metabase/driver/snowflake_test.clj b/test/metabase/driver/snowflake_test.clj
new file mode 100644
index 0000000000000000000000000000000000000000..5178499f46921fef1fd4a485021dca7e4a8d5aac
--- /dev/null
+++ b/test/metabase/driver/snowflake_test.clj
@@ -0,0 +1,7 @@
+(ns metabase.driver.snowflake-test
+  (:require [metabase.test.data.datasets :refer [expect-with-engine]]
+            [metabase.test.util :as tu]))
+
+(expect-with-engine :snowflake
+                    "UTC"
+                    (tu/db-timezone-id))
diff --git a/test/metabase/integrations/slack_test.clj b/test/metabase/integrations/slack_test.clj
new file mode 100644
index 0000000000000000000000000000000000000000..21f80a3265fdab441b43aba18c6bb396703928f5
--- /dev/null
+++ b/test/metabase/integrations/slack_test.clj
@@ -0,0 +1,122 @@
+(ns metabase.integrations.slack-test
+  (:require [cheshire.core :as json]
+            [clj-http.fake :as http-fake]
+            [clojure.java.io :as io]
+            [expectations :refer :all]
+            [metabase.integrations.slack :as slack-integ :refer :all]
+            [metabase.test.util :as tu]))
+
+(def ^:private default-channels-response
+  (delay (slurp (io/resource "slack_channels_response.json"))))
+
+(def ^:private default-channels
+  (delay (:channels (json/parse-string @default-channels-response keyword))))
+
+(def ^:private channels-request
+  {:address      "https://slack.com/api/channels.list"
+   :query-params {:token            "test-token"
+                  :exclude_archived "true"
+                  :exclude_members  "true"}})
+
+(defn- expected-200-response [body]
+  (fn [_]
+    {:status 200
+     :body (if (string? body)
+             body
+             (json/generate-string body))}))
+
+(def ^:private invalid-token-response
+  (expected-200-response
+   {:ok    false
+    :error "invalid_auth"}))
+
+(defn- exception-if-called [_]
+  (throw (Exception. "Failure, route should not have been invoked")))
+
+;; Channels should return nil if no Slack token has been configured
+(expect
+  nil
+  (http-fake/with-fake-routes {channels-request exception-if-called}
+    (tu/with-temporary-setting-values [slack-token nil]
+      (channels-list))))
+
+;; Test the channels call and expected response
+(expect
+  @default-channels
+  (http-fake/with-fake-routes {channels-request (expected-200-response @default-channels-response)}
+    (tu/with-temporary-setting-values [slack-token "test-token"]
+      (channels-list))))
+
+;; Test the invalid token auth flow
+(expect
+  {:ex-class clojure.lang.ExceptionInfo
+   :msg      nil
+   :data     {:errors {:slack-token "Invalid token"}}}
+  (http-fake/with-fake-routes {channels-request invalid-token-response}
+    (tu/with-temporary-setting-values [slack-token "test-token"]
+      (tu/exception-and-message
+       (channels-list)))))
+
+(def ^:private default-users-response
+  (delay (slurp (io/resource "slack_users_response.json"))))
+
+(def ^:private default-users
+  (delay (:members (json/parse-string @default-users-response keyword))))
+
+(def ^:private users-request
+  {:address "https://slack.com/api/users.list"
+   :query-params {:token "test-token"}})
+
+;; Users should return nil if no Slack token has been configured
+(expect
+  nil
+  (http-fake/with-fake-routes {users-request exception-if-called}
+    (tu/with-temporary-setting-values [slack-token nil]
+      (users-list))))
+
+;; Test the users call and the expected response
+(expect
+  @default-users
+  (http-fake/with-fake-routes {users-request (expected-200-response @default-users-response)}
+    (tu/with-temporary-setting-values [slack-token "test-token"]
+      (users-list))))
+
+;; Test the invalid token auth flow for users
+(expect
+  {:ex-class clojure.lang.ExceptionInfo
+   :msg      nil
+   :data     {:errors {:slack-token "Invalid token"}}}
+  (http-fake/with-fake-routes {users-request invalid-token-response}
+    (tu/with-temporary-setting-values [slack-token "test-token"]
+      (tu/exception-and-message
+       (users-list)))))
+
+(def ^:private files-request
+  (assoc-in channels-request [:query-params :exclude_archived] "false"))
+
+;; Asking for the files channel when slack is not configured throws an exception
+(expect
+  {:ex-class clojure.lang.ExceptionInfo
+   :msg      (var-get #'slack-integ/channel-missing-msg)
+   :data     {:status-code 400}}
+  (http-fake/with-fake-routes {files-request exception-if-called}
+    (tu/exception-and-message
+     (files-channel))))
+
+(defn- create-files-channel []
+  (let [channel-name (var-get #'slack-integ/files-channel-name)]
+    (-> @default-channels
+        first
+        (assoc
+            :name channel-name, :name_normalized channel-name,
+            :purpose {:value "Metabase file upload location", :creator "", :last_set 0}))))
+
+;; Testing the call that finds the metabase files channel
+(expect
+  (create-files-channel)
+  (http-fake/with-fake-routes {files-request (-> @default-channels-response
+                                                 json/parse-string
+                                                 (update :channels conj (create-files-channel))
+                                                 expected-200-response)}
+    (tu/with-temporary-setting-values [slack-token "test-token"]
+      (files-channel))))
diff --git a/test/metabase/mbql/normalize_test.clj b/test/metabase/mbql/normalize_test.clj
index c53327469a1cf3dc04e5d2a0c4dfeb5d5cfcd7b1..dbcac1e44291ce27942eeed81f26cd231b999c12 100644
--- a/test/metabase/mbql/normalize_test.clj
+++ b/test/metabase/mbql/normalize_test.clj
@@ -173,7 +173,7 @@
   {:query {:filter [:= [:field-id 10] [:relative-datetime :current]]}}
   (#'normalize/normalize-tokens {:query {"FILTER" ["=" [:field-id 10] ["RELATIVE_DATETIME" "CURRENT"]]}}))
 
-;; and in datetime-field clauses (MBQL 98)
+;; and in datetime-field clauses (MBQL 98+)
 (expect
   {:query {:filter [:= [:datetime-field [:field-id 10] :day] "2018-09-05"]}}
   (#'normalize/normalize-tokens {:query {"FILTER" ["=" [:datetime-field ["field_id" 10] "day"] "2018-09-05"]}}))
@@ -609,27 +609,6 @@
   {:query {:filter [:time-interval [:field-id 8] -30 :day]}}
   (#'normalize/canonicalize {:query {:filter [:time-interval [:datetime-field [:field-id 8] :month] -30 :day]}}))
 
-;;; ---------------------------------------------------- order-by ----------------------------------------------------
-
-;; ORDER BY: MBQL 95 [field direction] should get translated to MBQL 98 [direction field]
-(expect
-  {:query {:order-by [[:asc [:field-id 10]]]}}
-  (#'normalize/canonicalize {:query {:order-by [[[:field-id 10] :asc]]}}))
-
-;; MBQL 95 old order-by names should be handled
-(expect
-  {:query {:order-by [[:asc [:field-id 10]]]}}
-  (#'normalize/canonicalize {:query {:order-by [[10 :ascending]]}}))
-
-;; field-id should be added if needed
-(expect
-  {:query {:order-by [[:asc [:field-id 10]]]}}
-  (#'normalize/canonicalize {:query {:order-by [[10 :asc]]}}))
-
-(expect
-  {:query {:order-by [[:asc [:field-id 10]]]}}
-  (#'normalize/canonicalize {:query {:order-by [[:asc 10]]}}))
-
 ;; fk-> clauses should get the field-id treatment
 (expect
   {:query {:filter [:= [:fk-> [:field-id 10] [:field-id 20]] "ABC"]}}
@@ -640,7 +619,7 @@
   {:query {:filter [:= [:datetime-field [:field-id 10] :day] "2018-09-05"]}}
   (#'normalize/canonicalize {:query {:filter [:= [:datetime-field 10 :day] "2018-09-05"]}}))
 
-;; MBQL 95 datetime-field clauses ([:datetime-field <field> :as <unit>]) should get converted to MBQL 98
+;; MBQL 95 datetime-field clauses ([:datetime-field <field> :as <unit>]) should get converted to MBQL 2000
 (expect
   {:query {:filter [:= [:datetime-field [:field-id 10] :day] "2018-09-05"]}}
   (#'normalize/canonicalize {:query {:filter [:= [:datetime-field 10 :as :day] "2018-09-05"]}}))
@@ -661,6 +640,41 @@
   (#'normalize/canonicalize {:query {:filter '(:= 1 10)}}))
 
 
+;;; ---------------------------------------------------- order-by ----------------------------------------------------
+
+;; ORDER BY: MBQL 95 [field direction] should get translated to MBQL 98+ [direction field]
+(expect
+  {:query {:order-by [[:asc [:field-id 10]]]}}
+  (#'normalize/canonicalize {:query {:order-by [[[:field-id 10] :asc]]}}))
+
+;; MBQL 95 old order-by names should be handled
+(expect
+  {:query {:order-by [[:asc [:field-id 10]]]}}
+  (#'normalize/canonicalize {:query {:order-by [[10 :ascending]]}}))
+
+;; field-id should be added if needed
+(expect
+  {:query {:order-by [[:asc [:field-id 10]]]}}
+  (#'normalize/canonicalize {:query {:order-by [[10 :asc]]}}))
+
+(expect
+  {:query {:order-by [[:asc [:field-id 10]]]}}
+  (#'normalize/canonicalize {:query {:order-by [[:asc 10]]}}))
+
+;; we should handle seqs no prob
+(expect
+  {:query {:order-by [[:asc [:field-id 1]]]}}
+  (#'normalize/canonicalize {:query {:order-by '((1 :ascending))}}))
+
+;; duplicate order-by clauses should get removed
+(expect
+  {:query {:order-by [[:asc [:field-id 1]]
+                      [:desc [:field-id 2]]]}}
+  (#'normalize/canonicalize {:query {:order-by [[:asc [:field-id 1]]
+                                                [:desc [:field-id 2]]
+                                                [:asc 1]]}}))
+
+
 ;;; ------------------------------------------------- source queries -------------------------------------------------
 
 ;; Make sure canonicalization works correctly on source queries
@@ -695,6 +709,61 @@
     :query    {:source-query {:source-table 1, :aggregation :rows}}}))
 
 
+;;; +----------------------------------------------------------------------------------------------------------------+
+;;; |                                          WHOLE-QUERY TRANSFORMATIONS                                           |
+;;; +----------------------------------------------------------------------------------------------------------------+
+
+;; If you specify a field in a breakout and in the Fields clause, we should go ahead and remove it from the Fields
+;; clause, because it is (obviously) implied that you should get that Field back.
+(expect
+  {:type  :query
+   :query {:breakout [[:field-id 1] [:field-id 2]]
+           :fields   [[:field-id 3]]}}
+  (#'normalize/perform-whole-query-transformations
+   {:type  :query
+    :query {:breakout [[:field-id 1] [:field-id 2]]
+            :fields   [[:field-id 2] [:field-id 3]]}}))
+
+;; should work with FKs
+(expect
+  {:type  :query
+   :query {:breakout [[:field-id 1]
+                      [:fk-> [:field-id 2] [:field-id 4]]]
+           :fields   [[:field-id 3]]}}
+  (#'normalize/perform-whole-query-transformations
+   {:type  :query
+    :query {:breakout [[:field-id 1]
+                       [:fk-> [:field-id 2] [:field-id 4]]]
+            :fields   [[:fk-> [:field-id 2] [:field-id 4]]
+                       [:field-id 3]]}}))
+
+;; should work if the Field is bucketed in the breakout & in fields
+(expect
+  {:type  :query
+   :query {:breakout [[:field-id 1]
+                      [:datetime-field [:fk-> [:field-id 2] [:field-id 4]] :month]]
+           :fields   [[:field-id 3]]}}
+  (#'normalize/perform-whole-query-transformations
+   {:type  :query
+    :query {:breakout [[:field-id 1]
+                       [:datetime-field [:fk-> [:field-id 2] [:field-id 4]] :month]]
+            :fields   [[:datetime-field [:fk-> [:field-id 2] [:field-id 4]] :month]
+                       [:field-id 3]]}}))
+
+;; should work if the Field is bucketed in the breakout but not in fields
+(expect
+  {:type  :query
+   :query {:breakout [[:field-id 1]
+                      [:datetime-field [:fk-> [:field-id 2] [:field-id 4]] :month]]
+           :fields   [[:field-id 3]]}}
+  (#'normalize/perform-whole-query-transformations
+   {:type  :query
+    :query {:breakout [[:field-id 1]
+                       [:datetime-field [:fk-> [:field-id 2] [:field-id 4]] :month]]
+            :fields   [[:fk-> [:field-id 2] [:field-id 4]]
+                       [:field-id 3]]}}))
+
+
 ;;; +----------------------------------------------------------------------------------------------------------------+
 ;;; |                                              REMOVE EMPTY CLAUSES                                              |
 ;;; +----------------------------------------------------------------------------------------------------------------+
diff --git a/test/metabase/mbql/util_test.clj b/test/metabase/mbql/util_test.clj
index 75665fb9d9e9f979fc6549a70b555cd323a829c4..8000e0c6ef6b9df836aaa78803f504e014914afe 100644
--- a/test/metabase/mbql/util_test.clj
+++ b/test/metabase/mbql/util_test.clj
@@ -449,6 +449,33 @@
                                           :order-by     [[:asc [:field-id 10]]]}}
                               [:asc [:datetime-field [:field-id 10] :day]]))
 
+;; Check that `simplify-compound-filter` can apply de Morgan's law on `:not` over `:and`
+(expect
+  [:or
+   [:not [:= [:field-id 1] 2]]
+   [:not [:= [:field-id 2] 3]]]
+  (mbql.u/simplify-compound-filter [:not [:and
+                                          [:= [:field-id 1] 2]
+                                          [:= [:field-id 2] 3]]]))
+
+;; Check that `simplify-compound-filter` can apply de Morgan's law on `:not` over `:or`
+(expect
+  [:and
+   [:not [:= [:field-id 1] 2]]
+   [:not [:= [:field-id 2] 3]]]
+  (mbql.u/simplify-compound-filter [:not [:or
+                                          [:= [:field-id 1] 2]
+                                          [:= [:field-id 2] 3]]]))
+
+;; check that `simplify-compound-filter` doesn't remove `nil` from filters where it's being used as the value
+(expect
+  [:= [:field-id 1] nil]
+  (mbql.u/simplify-compound-filter [:= [:field-id 1] nil]))
+
+(expect
+  [:= [:field-id 1] nil]
+  (mbql.u/simplify-compound-filter [:and nil [:= [:field-id 1] nil]]))
+
 
 ;;; ---------------------------------------------- aggregation-at-index ----------------------------------------------
 
@@ -480,3 +507,106 @@
 (expect
   [:min [:field-id 1]]
   (mbql.u/aggregation-at-index query-with-some-nesting 1 1))
+
+
+;;; --------------------------------- Unique names & transforming ags to have names ----------------------------------
+
+;; can we generate unique names?
+(expect
+  ["count" "sum" "count_2" "count_3"]
+  (mbql.u/uniquify-names ["count" "sum" "count" "count"]))
+
+(expect
+  [[:named [:count] "count"]
+   [:named [:sum [:field-id 1]] "sum"]
+   [:named [:count] "count_2"]
+   [:named [:count] "count_3"]]
+  (mbql.u/uniquify-named-aggregations [[:named [:count] "count"]
+                                       [:named [:sum [:field-id 1]] "sum"]
+                                       [:named [:count] "count"]
+                                       [:named [:count] "count"]]))
+
+;; what if we try to trick it by using a name it would have generated?
+(expect
+  ["count" "count_2" "count_2_2"]
+  (mbql.u/uniquify-names ["count" "count" "count_2"]))
+
+(expect
+  [[:named [:count] "count"]
+   [:named [:count] "count_2"]
+   [:named [:count] "count_2_2"]]
+  (mbql.u/uniquify-named-aggregations [[:named [:count] "count"]
+                                       [:named [:count] "count"]
+                                       [:named [:count] "count_2"]]))
+
+;; for wacky DBMSes like SQLServer that return blank column names sometimes let's make sure we handle those without
+;; exploding
+(expect
+  ["" "_2"]
+  (mbql.u/uniquify-names ["" ""]))
+
+;; can we wrap all of our aggregation clauses in `:named` clauses?
+(defn- simple-ag->name [[ag-name]]
+  (name ag-name))
+
+(expect
+  [[:named [:sum [:field-id 1]] "sum"]
+   [:named [:count [:field-id 1]] "count"]
+   [:named [:sum [:field-id 1]] "sum"]
+   [:named [:avg [:field-id 1]] "avg"]
+   [:named [:sum [:field-id 1]] "sum"]
+   [:named [:min [:field-id 1]] "min"]]
+  (mbql.u/pre-alias-aggregations simple-ag->name
+    [[:sum [:field-id 1]]
+     [:count [:field-id 1]]
+     [:sum [:field-id 1]]
+     [:avg [:field-id 1]]
+     [:sum [:field-id 1]]
+     [:min [:field-id 1]]]))
+
+;; we shouldn't change the name of ones that are already named
+(expect
+  [[:named [:sum [:field-id 1]] "sum"]
+   [:named [:count [:field-id 1]] "count"]
+   [:named [:sum [:field-id 1]] "sum"]
+   [:named [:avg [:field-id 1]] "avg"]
+   [:named [:sum [:field-id 1]] "sum_2"]
+   [:named [:min [:field-id 1]] "min"]]
+  (mbql.u/pre-alias-aggregations simple-ag->name
+    [[:sum [:field-id 1]]
+     [:count [:field-id 1]]
+     [:sum [:field-id 1]]
+     [:avg [:field-id 1]]
+     [:named [:sum [:field-id 1]] "sum_2"]
+     [:min [:field-id 1]]]))
+
+;; ok, can we do the same thing as the tests above but make those names *unique* at the same time?
+(expect
+  [[:named [:sum [:field-id 1]] "sum"]
+   [:named [:count [:field-id 1]] "count"]
+   [:named [:sum [:field-id 1]] "sum_2"]
+   [:named [:avg [:field-id 1]] "avg"]
+   [:named [:sum [:field-id 1]] "sum_3"]
+   [:named [:min [:field-id 1]] "min"]]
+  (mbql.u/pre-alias-and-uniquify-aggregations simple-ag->name
+    [[:sum [:field-id 1]]
+     [:count [:field-id 1]]
+     [:sum [:field-id 1]]
+     [:avg [:field-id 1]]
+     [:sum [:field-id 1]]
+     [:min [:field-id 1]]]))
+
+(expect
+  [[:named [:sum [:field-id 1]] "sum"]
+   [:named [:count [:field-id 1]] "count"]
+   [:named [:sum [:field-id 1]] "sum_2"]
+   [:named [:avg [:field-id 1]] "avg"]
+   [:named [:sum [:field-id 1]] "sum_2_2"]
+   [:named [:min [:field-id 1]] "min"]]
+  (mbql.u/pre-alias-and-uniquify-aggregations simple-ag->name
+    [[:sum [:field-id 1]]
+     [:count [:field-id 1]]
+     [:sum [:field-id 1]]
+     [:avg [:field-id 1]]
+     [:named [:sum [:field-id 1]] "sum_2"]
+     [:min [:field-id 1]]]))
diff --git a/test/metabase/models/dashboard_test.clj b/test/metabase/models/dashboard_test.clj
index 443c5601aeaca9e21fd59c6531e356578eb8ecf3..f23c01044c6666f83d730790588516f4d205bedb 100644
--- a/test/metabase/models/dashboard_test.clj
+++ b/test/metabase/models/dashboard_test.clj
@@ -236,7 +236,7 @@
                                                      users/user->id
                                                      user/permissions-set
                                                      atom)]
-      (let [dashboard (magic/automagic-analysis (Table (id :venues)) {})
+      (let [dashboard                  (magic/automagic-analysis (Table (id :venues)) {})
             rastas-personal-collection (db/select-one-field :id 'Collection
                                          :personal_owner_id api/*current-user-id*)]
         (->> (save-transient-dashboard! dashboard rastas-personal-collection)
diff --git a/test/metabase/models/query/permissions_test.clj b/test/metabase/models/query/permissions_test.clj
index f595eca01e64ba53618a5d51cb887c9257209aef..ad67536744e4eef20ddbef8a9f2a3a8c1237c0c3 100644
--- a/test/metabase/models/query/permissions_test.clj
+++ b/test/metabase/models/query/permissions_test.clj
@@ -14,7 +14,8 @@
             [metabase.test.data :as data]
             [metabase.test.data.users :as users]
             [metabase.util :as u]
-            [toucan.util.test :as tt]))
+            [toucan.util.test :as tt]
+            [metabase.test.util.log :as tu.log]))
 
 ;;; ---------------------------------------------- Permissions Checking ----------------------------------------------
 
@@ -233,5 +234,6 @@
 ;; invalid/legacy queries should return perms for something that doesn't exist so no one gets to see it
 (expect
   #{"/db/0/"}
-  (query-perms/perms-set (data/mbql-query venues
-                           {:filter [:WOW 100 200]})))
+  (tu.log/suppress-output
+    (query-perms/perms-set (data/mbql-query venues
+                             {:filter [:WOW 100 200]}))))
diff --git a/test/metabase/pulse/render_test.clj b/test/metabase/pulse/render_test.clj
index 87bfcf4ec05ac4b069afbdc13ef338d2dd10f6dc..d8259cffa8d6938c2f1dc4543a1f46d0134c6e04 100644
--- a/test/metabase/pulse/render_test.clj
+++ b/test/metabase/pulse/render_test.clj
@@ -5,7 +5,7 @@
             [metabase.pulse
              [color :as color]
              [render :as render :refer :all]]
-            [metabase.query-processor.util :as qputil])
+            [metabase.test.util :as tu])
   (:import java.util.TimeZone))
 
 (def ^:private pacific-tz (TimeZone/getTimeZone "America/Los_Angeles"))
@@ -292,13 +292,26 @@
   (count-displayed-columns
    (concat test-columns [description-col detail-col sensitive-col retired-col])))
 
+(defn- postwalk-collect
+  "Invoke `collect-fn` on each node satisfying `pred`. If `collect-fn` returns a value, accumulate that and return the
+  results."
+  [pred collect-fn form]
+  (let [results (atom [])]
+    (tu/postwalk-pred pred
+                      (fn [node]
+                        (when-let [result (collect-fn node)]
+                          (swap! results conj result))
+                        node)
+                      form)
+    @results))
+
 (defn- find-table-body
   "Given the hiccup data structure, find the table body and return it"
   [results]
-  (qputil/postwalk-collect (every-pred vector? #(= :tbody (first %)))
-                           ;; The Hiccup form is [:tbody (...rows...)], so grab the second item
-                           second
-                           results))
+  (postwalk-collect (every-pred vector? #(= :tbody (first %)))
+                    ;; The Hiccup form is [:tbody (...rows...)], so grab the second item
+                    second
+                    results))
 
 (defn- style-map->background-color
   "Finds the background color in the style string of a Hiccup style map"
@@ -310,10 +323,10 @@
   "Returns a map of cell values to background colors of the pulse table found in the hiccup `results` data
   structure. This only includes the data cell values, not the header values."
   [results]
-  (into {} (qputil/postwalk-collect (every-pred vector? #(= :td (first %)))
-                                    (fn [[_ style-map cell-value]]
-                                      [cell-value (style-map->background-color style-map)])
-                                    results)))
+  (into {} (postwalk-collect (every-pred vector? #(= :td (first %)))
+                             (fn [[_ style-map cell-value]]
+                               [cell-value (style-map->background-color style-map)])
+                             results)))
 
 (defn- query-results->header+rows
   "Makes pulse header and data rows with no bar-width. Including bar-width just adds extra HTML that will be ignored."
diff --git a/test/metabase/query_processor/middleware/add_dimension_projections_test.clj b/test/metabase/query_processor/middleware/add_dimension_projections_test.clj
index 830c7f4758f78f456abf4ad8d8790b8dee9d52d4..84c273be601cbb09a467c1bc39fe7c373bb6af01 100644
--- a/test/metabase/query_processor/middleware/add_dimension_projections_test.clj
+++ b/test/metabase/query_processor/middleware/add_dimension_projections_test.clj
@@ -1,6 +1,7 @@
 (ns metabase.query-processor.middleware.add-dimension-projections-test
-  (:require [expectations :refer :all]
-            [metabase.query-processor.middleware.add-dimension-projections :as add-dim-projections]))
+  (:require [expectations :refer [expect]]
+            [metabase.query-processor.middleware.add-dimension-projections :as add-dim-projections]
+            [toucan.hydrate :as hydrate]))
 
 ;;; ----------------------------------------- add-fk-remaps (pre-processing) -----------------------------------------
 
@@ -18,6 +19,15 @@
                  {3 {:name "Product", :field_id 3, :human_readable_field_id 4}})]
     (f)))
 
+;; make sure we create the remap column tuples correctly
+(expect
+  [[[:field-id 3]
+    [:fk-> [:field-id 3] [:field-id 4]]
+    {:name "Product", :field_id 3, :human_readable_field_id 4}]]
+  (do-with-fake-remappings-for-field-3
+   (fn []
+     (#'add-dim-projections/create-remap-col-tuples [[:field-id 1] [:field-id 2] [:field-id 3]]))))
+
 ;; make sure FK remaps add an entry for the FK field to `:fields`, and returns a pair of [dimension-info updated-query]
 (expect
   [[{:name "Product", :field_id 3, :human_readable_field_id 4}]
@@ -44,7 +54,6 @@
 (def ^:private col-defaults
   {:description     nil
    :source          :fields
-   :extra_info      {}
    :fk_field_id     nil
    :visibility_type :normal
    :target          nil
@@ -80,7 +89,6 @@
     :schema_name  "PUBLIC"
     :special_type :type/FK
     :name         "CATEGORY_ID"
-    :extra_info   {:target_table_id 1}
     :id           11
     :display_name "Category ID"
     :base_type    :type/Integer}))
@@ -101,10 +109,7 @@
   {:description     nil
    :table_id        nil
    :name            "Foo"
-   :expression-name "Foo"
-   :source          :fields
    :remapped_from   "CATEGORY_ID"
-   :extra_info      {}
    :remapped_to     nil
    :id              nil
    :target          nil
@@ -123,23 +128,26 @@
                :remapped_to "Foo")
              example-result-cols-price
              example-result-cols-foo]}
-  (#'add-dim-projections/remap-results
-   nil
-   {:rows    [[1 "Red Medicine"                  4 3]
-              [2 "Stout Burgers & Beers"        11 2]
-              [3 "The Apple Pan"                11 2]
-              [4 "Wurstküche"                   29 2]
-              [5 "Brite Spot Family Restaurant" 20 2]]
-    :columns ["ID" "NAME" "CATEGORY_ID" "PRICE"]
-    :cols    [example-result-cols-id
-              example-result-cols-name
-              (assoc example-result-cols-category-id
-                :dimensions {:dimension-id 1, :dimension-type :internal, :dimension-name "Foo", :field-id 10}
-                :values     {:field-value-id        1
-                             :human-readable-values ["Foo" "Bar" "Baz" "Qux"]
-                             :values                [4 11 29 20]
-                             :field-id              33})
-              example-result-cols-price]}))
+  ;; swap out `hydrate` with one that will add some fake dimensions and values for CATEGORY_ID.
+  (with-redefs [hydrate/hydrate (fn [fields & _]
+                                  (for [{field-name :name, :as field} fields]
+                                    (cond-> field
+                                      (= field-name "CATEGORY_ID")
+                                      (assoc :dimensions {:type :internal, :name "Foo", :field_id 10}
+                                             :values     {:human_readable_values ["Foo" "Bar" "Baz" "Qux"]
+                                                          :values                [4 11 29 20]}))))]
+    (#'add-dim-projections/remap-results
+     nil
+     {:rows    [[1 "Red Medicine"                  4 3]
+                [2 "Stout Burgers & Beers"        11 2]
+                [3 "The Apple Pan"                11 2]
+                [4 "Wurstküche"                   29 2]
+                [5 "Brite Spot Family Restaurant" 20 2]]
+      :columns ["ID" "NAME" "CATEGORY_ID" "PRICE"]
+      :cols    [example-result-cols-id
+                example-result-cols-name
+                example-result-cols-category-id
+                example-result-cols-price]})))
 
 ;; test that external remappings get the appropriate `:remapped_from`/`:remapped_to` info
 (def ^:private example-result-cols-category
@@ -150,8 +158,6 @@
     :schema_name     nil
     :special_type    :type/Category
     :name            "CATEGORY"
-    :source          :fields
-    :extra_info      {}
     :fk_field_id     32
     :id              27
     :visibility_type :normal
diff --git a/test/metabase/query_processor/middleware/add_query_throttle_test.clj b/test/metabase/query_processor/middleware/add_query_throttle_test.clj
index 9abe9798be2591bbad8a292c7502ba0d210a4969..9387c696684a55b9da2377a7a79bc485c81c7418 100644
--- a/test/metabase/query_processor/middleware/add_query_throttle_test.clj
+++ b/test/metabase/query_processor/middleware/add_query_throttle_test.clj
@@ -10,14 +10,6 @@
             [metabase.util :as u])
   (:import java.util.concurrent.Semaphore))
 
-(defmacro ^:private exception-and-message [& body]
-  `(try
-     ~@body
-     (catch Exception e#
-       {:ex-class (class e#)
-        :msg      (.getMessage e#)
-        :data     (ex-data e#)})))
-
 (defmacro ^:private with-query-wait-time-in-seconds [time-in-seconds & body]
   `(with-redefs [throttle/max-query-wait-time-in-millis ~(* 1000 time-in-seconds)]
      ~@body))
@@ -30,7 +22,7 @@
    :data     {:status-code 503
               :type        ::throttle/concurrent-query-limit-reached}}
   (with-query-wait-time-in-seconds 1
-    (exception-and-message
+    (tu/exception-and-message
      (let [semaphore (Semaphore. 5)]
        (.acquire semaphore 5)
        ((#'throttle/throttle-queries semaphore (constantly "Should never be returned")) {})))))
@@ -43,7 +35,7 @@
    :data     {:status-code 503
               :type        ::throttle/concurrent-query-limit-reached}}
   (with-query-wait-time-in-seconds 1
-    (exception-and-message
+    (tu/exception-and-message
      (let [semaphore (Semaphore. 5)
            my-qp     (->> identity
                           (#'throttle/throttle-queries semaphore)
diff --git a/test/metabase/query_processor/middleware/add_row_count_and_status_test.clj b/test/metabase/query_processor/middleware/add_row_count_and_status_test.clj
new file mode 100644
index 0000000000000000000000000000000000000000..698bbbcf5a47a1eed16de41a33bb5f1988a5a198
--- /dev/null
+++ b/test/metabase/query_processor/middleware/add_row_count_and_status_test.clj
@@ -0,0 +1,23 @@
+(ns metabase.query-processor.middleware.add-row-count-and-status-test
+  (:require [expectations :refer [expect]]
+            [metabase.query-processor.middleware.add-row-count-and-status :as add-row-count-and-status]))
+
+(expect
+  {:row_count 5
+   :status    :completed
+   :data      {:rows           [[1] [1] [1] [1] [1]]
+               :rows_truncated 5}}
+  ;; NOTE: the default behavior is to treat the query as no aggregation and use :max-results-bare-rows
+  ((add-row-count-and-status/add-row-count-and-status (constantly {:rows [[1] [1] [1] [1] [1]]}))
+    {:constraints {:max-results           10
+                   :max-results-bare-rows 5}}))
+
+(expect
+  {:row_count      5
+   :status         :completed
+   :data           {:rows [[1] [1] [1] [1] [1]]}}
+  ;; when we aren't a no-aggregation query the then we use :max-results for our limit
+  ((add-row-count-and-status/add-row-count-and-status (constantly {:rows [[1] [1] [1] [1] [1]]}))
+    {:query       {:aggregation [[:count]]}
+     :constraints {:max-results           10
+                   :max-results-bare-rows 5}}))
diff --git a/test/metabase/query_processor/middleware/add_settings_test.clj b/test/metabase/query_processor/middleware/add_settings_test.clj
new file mode 100644
index 0000000000000000000000000000000000000000..03822830bc3a2be970dd19f3fab449686eb2aa7c
--- /dev/null
+++ b/test/metabase/query_processor/middleware/add_settings_test.clj
@@ -0,0 +1,30 @@
+(ns metabase.query-processor.middleware.add-settings-test
+  (:require [expectations :refer [expect]]
+            [metabase.driver :as driver]
+            [metabase.models.setting :as setting]
+            [metabase.query-processor.middleware.add-settings :as add-settings]))
+
+(defrecord ^:private TestDriver []
+  clojure.lang.Named
+  (getName [_] "TestDriver"))
+
+(extend TestDriver
+  driver/IDriver
+  {:features (constantly #{:set-timezone})})
+
+(expect
+  [{:settings {}}
+   {:settings {}}
+   {:settings {:report-timezone "US/Mountain"}}]
+  (let [original-tz (setting/get :report-timezone)
+        response1   ((add-settings/add-settings identity) {:driver (TestDriver.)})]
+    ;; make sure that if the timezone is an empty string we skip it in settings
+    (setting/set! :report-timezone "")
+    (let [response2 ((add-settings/add-settings identity) {:driver (TestDriver.)})]
+      ;; if the timezone is something valid it should show up in the query settings
+      (setting/set! :report-timezone "US/Mountain")
+      (let [response3 ((add-settings/add-settings identity) {:driver (TestDriver.)})]
+        (setting/set! :report-timezone original-tz)
+        [(dissoc response1 :driver)
+         (dissoc response2 :driver)
+         (dissoc response3 :driver)]))))
diff --git a/test/metabase/query_processor/middleware/annotate_and_sort_test.clj b/test/metabase/query_processor/middleware/annotate_and_sort_test.clj
deleted file mode 100644
index 1a33672532080ad1ebef088c8228c4d8790f6917..0000000000000000000000000000000000000000
--- a/test/metabase/query_processor/middleware/annotate_and_sort_test.clj
+++ /dev/null
@@ -1,19 +0,0 @@
-(ns metabase.query-processor.middleware.annotate-and-sort-test
-  (:require [expectations :refer :all]
-            [metabase.query-processor.middleware.annotate-and-sort :as annotate-and-sort]))
-
-;; make sure that `infer-column-types` can still infer types even if the initial value(s) are `nil` (#4256)
-(expect
-  [{:name "a", :display_name "A", :base_type :type/Integer}
-   {:name "b", :display_name "B", :base_type :type/Integer}]
-  (:cols (#'annotate-and-sort/infer-column-types {:columns [:a :b], :rows [[1 nil]
-                                                                           [2 nil]
-                                                                           [3 nil]
-                                                                           [4   5]
-                                                                           [6   7]]})))
-
-;; make sure that `infer-column-types` defaults `base_type` to `type/*` if there are no non-nil
-;; values when we peek.
-(expect
-  [{:name "a", :display_name "A", :base_type :type/*}]
-  (:cols (#'annotate-and-sort/infer-column-types {:columns [:a], :rows [[nil]]})))
diff --git a/test/metabase/query_processor/middleware/annotate_test.clj b/test/metabase/query_processor/middleware/annotate_test.clj
index 8d22d4de845ad3b812bc3a4095ec4119c4986834..c8828a5fe315c19fe6ad6012bb81d51bf1111d9d 100644
--- a/test/metabase/query_processor/middleware/annotate_test.clj
+++ b/test/metabase/query_processor/middleware/annotate_test.clj
@@ -1,105 +1,241 @@
 (ns metabase.query-processor.middleware.annotate-test
   (:require [expectations :refer [expect]]
+            [metabase.models.field :refer [Field]]
             [metabase.query-processor
-             [annotate :as annotate]
-             [interface :as qpi]]
-            [metabase.util :as u]
-            [toucan.util.test :as tt]))
-
-;; make sure when using a source query the right metadata comes back so we are able to do drill-through properly
-(expect
-  [{:field-id   [:field-literal "id" :type/Integer]
-    :field-name "id"
-    :source     :fields}
-   {:field-id   [:field-literal "reciever_id" :type/Integer]
-    :field-name "reciever_id"
-    :source     :fields}
-   {:field-id   [:field-literal "sender_id" :type/Integer]
-    :field-name "sender_id"
-    :source     :fields}
-   {:field-id   [:field-literal "text" :type/Text]
-    :field-name "text"
-    :source     :fields}]
-  (map
-   (u/rpartial select-keys [:field-id :field-name :source])
-   (annotate/collect-fields
-     {:source-query {:source-table       {:schema "public", :name "messages", :id 1}
-                     :fields-is-implicit true
-                     :fields             [(qpi/map->Field
-                                           {:field-id           1
-                                            :field-name         "id"
-                                            :field-display-name "ID"
-                                            :base-type          :type/Integer
-                                            :special-type       :type/PK
-                                            :table-id           1})
-                                          (qpi/map->Field
-                                           {:field-id           2
-                                            :field-name         "reciever_id"
-                                            :field-display-name "Rec I Ever ID"
-                                            :base-type          :type/Integer
-                                            :special-type       :type/FK
-                                            :table-id           1})
-                                          (qpi/map->Field
-                                           {:field-id           3
-                                            :field-name         "sender_id"
-                                            :field-display-name "Sender ID"
-                                            :base-type          :type/Integer
-                                            :special-type       :type/FK
-                                            :table-id           1})
-                                          (qpi/map->Field
-                                           {:field-id           3
-                                            :field-name         "text"
-                                            :field-display-name "Text"
-                                            :base-type          :type/Text
-                                            :special-type       :type/Category
-                                            :table-id           1})]}})))
-
-;; make sure when doing a breakout of a nested query the right metadata comes back (fields are "collected" properly) so things like bar charts work as expected
-(expect
-  [{:field-id [:field-literal "text"        :type/Text],    :field-name "text",        :source :breakout}
-   {:field-id [:field-literal "id"          :type/Integer], :field-name "id",          :source :fields}
-   {:field-id [:field-literal "reciever_id" :type/Integer], :field-name "reciever_id", :source :fields}
-   {:field-id [:field-literal "sender_id"   :type/Integer], :field-name "sender_id",   :source :fields}
-   {:field-id [:field-literal "text"        :type/Text],    :field-name "text",        :source :fields}
-   {:field-id [:field-literal "text"        :type/Text],    :field-name "text",        :source :order-by}
-   {:field-id [:field-literal "text"        :type/Text],    :field-name "text",        :source :order-by}]
-  (map
-   (u/rpartial select-keys [:field-id :field-name :source])
-   (annotate/collect-fields
-     {:aggregation  [[:count]]
-      :breakout     [(qpi/map->FieldLiteral {:field-name "text", :base-type :type/Text, :datetime-unit nil})]
-      :source-query {:source-table       {:schema "public", :name "messages", :id 1}
-                     :fields-is-implicit true
-                     :fields             [(qpi/map->Field
-                                           {:field-id     1
-                                            :field-name   "id"
-                                            :base-type    :type/Integer
-                                            :special-type :type/PK
-                                            :table-id     1})
-                                          (qpi/map->Field
-                                           {:field-id     2
-                                            :field-name   "reciever_id"
-                                            :base-type    :type/Integer
-                                            :special-type :type/FK
-                                            :table-id     1})
-                                          (qpi/map->Field
-                                           {:field-id     3
-                                            :field-name   "sender_id"
-                                            :base-type    :type/Integer
-                                            :special-type :type/FK
-                                            :table-id     1})
-                                          (qpi/map->Field
-                                           {:field-id     4
-                                            :field-name   "text"
-                                            :base-type    :type/Text
-                                            :special-type :type/Category
-                                            :table-id     1})]
-                     :order-by           [{:field     (qpi/map->Field
-                                                       {:field-id     4
-                                                        :field-name   "text"
-                                                        :base-type    :type/Text
-                                                        :special-type :type/Category
-                                                        :table-id     1})
-                                           :direction :ascending}]}
-      :order-by     [{:field     (qpi/map->FieldLiteral {:field-name "text", :base-type :type/Text, :datetime-unit nil})
-                      :direction :ascending}]})))
+             [interface :as i]
+             [store :as qp.store]]
+            [metabase.query-processor.middleware.annotate :as annotate]
+            [metabase.test.data :as data]
+            [metabase.query-processor.interface :as qp.i])
+  (:import metabase.driver.h2.H2Driver))
+
+;;; +----------------------------------------------------------------------------------------------------------------+
+;;; |                                             add-native-column-info                                             |
+;;; +----------------------------------------------------------------------------------------------------------------+
+
+;; make sure that `add-native-column-info` can still infer types even if the initial value(s) are `nil` (#4256)
+(expect
+  [{:name "a", :display_name "A", :base_type :type/Integer, :source :native}
+   {:name "b", :display_name "B", :base_type :type/Integer, :source :native}]
+  (:cols (#'annotate/add-native-column-info {:columns [:a :b], :rows [[1 nil]
+                                                                      [2 nil]
+                                                                      [3 nil]
+                                                                      [4   5]
+                                                                      [6   7]]})))
+
+;; make sure that `add-native-column-info` defaults `base_type` to `type/*` if there are no non-nil
+;; values when we peek.
+(expect
+  [{:name "a", :display_name "A", :base_type :type/*, :source :native}]
+  (:cols (#'annotate/add-native-column-info {:columns [:a], :rows [[nil]]})))
+
+
+;;; +----------------------------------------------------------------------------------------------------------------+
+;;; |                                              add-mbql-column-info                                              |
+;;; +----------------------------------------------------------------------------------------------------------------+
+
+;; make sure columns are comming back the way we'd expect
+(expect
+  [(-> (Field (data/id :venues :price))
+       (dissoc :database_type)
+       (assoc :source :fields))]
+  (qp.store/with-store
+    (qp.store/store-field! (Field (data/id :venues :price)))
+    (-> (#'annotate/add-mbql-column-info
+         {:query {:fields [[:field-id (data/id :venues :price)]]}}
+         {:columns [:price]})
+        :cols
+        vec)))
+
+;; when an `fk->` form is used, we should add in `:fk_field_id` info about the source Field
+(expect
+  [(-> (Field (data/id :categories :name))
+       (dissoc :database_type)
+       (assoc :fk_field_id (data/id :venues :category_id), :source :fields))]
+  (qp.store/with-store
+    (qp.store/store-field! (Field (data/id :categories :name)))
+    (-> (#'annotate/add-mbql-column-info
+         {:query {:fields [[:fk->
+                            [:field-id (data/id :venues :category_id)]
+                            [:field-id (data/id :categories :name)]]]}}
+         {:columns [:name]})
+        :cols
+        vec)))
+
+;; when a `:datetime-field` form is used, we should add in info about the `:unit`
+(expect
+  [(-> (Field (data/id :venues :price))
+       (dissoc :database_type)
+       (assoc :unit :month, :source :fields))]
+  (qp.store/with-store
+    (qp.store/store-field! (Field (data/id :venues :price)))
+    (-> (#'annotate/add-mbql-column-info
+         {:query {:fields [[:datetime-field [:field-id (data/id :venues :price)] :month]]}}
+         {:columns [:price]})
+        :cols
+        vec)))
+
+;; datetime unit should work on field literals too
+(expect
+  [{:name         "price"
+    :base_type    :type/Number
+    :display_name "Price"
+    :unit         :month
+    :source       :fields}]
+  (-> (#'annotate/add-mbql-column-info
+       {:query {:fields [[:datetime-field [:field-literal "price" :type/Number] :month]]}}
+       {:columns [:price]})
+      :cols
+      vec))
+
+;; when binning-strategy is used, include `:binning_info`
+(expect
+  [{:name         "price"
+    :base_type    :type/Number
+    :display_name "Price"
+    :unit         :month
+    :source       :fields
+    :binning_info {:num_bins         10
+                   :bin_width        5
+                   :min_value        -100
+                   :max_value        100
+                   :binning_strategy :num-bins}}]
+  (-> (#'annotate/add-mbql-column-info
+       {:query {:fields [[:binning-strategy
+                          [:datetime-field [:field-literal "price" :type/Number] :month]
+                          :num-bins
+                          10
+                          {:num-bins  10
+                           :bin-width 5
+                           :min-value -100
+                           :max-value 100}]]}}
+       {:columns [:price]})
+      :cols
+      vec))
+
+;; test that added information about aggregations looks the way we'd expect
+(defn- aggregation-name [ag-clause]
+  (binding [i/*driver* (H2Driver.)]
+    (annotate/aggregation-name ag-clause)))
+
+(expect
+  "count"
+  (aggregation-name [:count]))
+
+(expect
+  "count"
+  (aggregation-name [:distinct [:field-id 1]]))
+
+(expect
+  "sum"
+  (aggregation-name [:sum [:field-id 1]]))
+
+(expect
+  "count + 1"
+  (aggregation-name [:+ [:count] 1]))
+
+(expect
+  "min + (2 * avg)"
+  (aggregation-name [:+ [:min [:field-id 1]] [:* 2 [:avg [:field-id 2]]]]))
+
+(expect
+  "min + (2 * avg * 3 * (max - 4))"
+  (aggregation-name [:+
+                     [:min [:field-id 1]]
+                     [:*
+                      2
+                      [:avg [:field-id 2]]
+                      3
+                      [:-
+                       [:max [:field-id 3]]
+                       4]]]))
+
+(expect
+  "x"
+  (aggregation-name [:named [:+ [:min [:field-id 1]] [:* 2 [:avg [:field-id 2]]]] "x"]))
+
+(expect
+  "My Cool Aggregation"
+  (aggregation-name [:named [:avg [:field-id 2]] "My Cool Aggregation"]))
+
+;; make sure custom aggregation names get included in the col info
+(defn- col-info-for-aggregation-clause [clause]
+  (binding [qp.i/*driver* (metabase.driver.h2.H2Driver.)]
+    (#'annotate/col-info-for-aggregation-clause clause)))
+
+(expect
+  {:base_type    :type/Float
+   :special_type :type/Number
+   :name         "count / 2"
+   :display_name "count / 2"}
+  (col-info-for-aggregation-clause [:/ [:count] 2]))
+
+(expect
+  {:base_type    :type/Float
+   :special_type :type/Number
+   :name         "sum"
+   :display_name "sum"}
+  (qp.store/with-store
+    (data/$ids venues
+      (qp.store/store-field! (Field $price))
+      (col-info-for-aggregation-clause [:sum [:+ [:field-id $price] 1]]))))
+
+;; if a driver is kind enough to supply us with some information about the `:cols` that come back, we should include
+;; that information in the results. Their information should be preferred over ours
+(expect
+  {:cols    [{:name         "totalEvents"
+              :display_name "Total Events"
+              :base_type    :type/Text
+              :source       :aggregation}]
+   :columns ["totalEvents"]}
+  (binding [qp.i/*driver* (H2Driver.)]
+    ((annotate/add-column-info (constantly {:cols    [{:name         "totalEvents"
+                                                       :display_name "Total Events"
+                                                       :base_type    :type/Text}]
+                                            :columns ["totalEvents"]}))
+     {:database (data/id)
+      :type     :query
+      :query    {:source-table (data/id :venues)
+                 :aggregation  [[:metric "ga:totalEvents"]]}})))
+
+;; Make sure columns always come back with a unique `:name` key (#8759)
+(expect
+  {:cols
+   [{:base_type    :type/Number
+     :special_type :type/Number
+     :name         "count"
+     :display_name "count"
+     :source       :aggregation}
+    {:source       :aggregation
+     :name         "sum"
+     :display_name "sum"
+     :base_type    :type/Number}
+    {:base_type    :type/Number
+     :special_type :type/Number
+     :name         "count_2"
+     :display_name "count"
+     :source       :aggregation}
+    {:base_type    :type/Number
+     :special_type :type/Number
+     :name         "count_2_2"
+     :display_name "count_2"
+     :source       :aggregation}]
+   :columns ["count" "sum" "count" "count_2"]}
+  (binding [qp.i/*driver* (H2Driver.)]
+    ((annotate/add-column-info (constantly {:cols    [{:name         "count"
+                                                       :display_name "count"
+                                                       :base_type    :type/Number}
+                                                      {:name         "sum"
+                                                       :display_name "sum"
+                                                       :base_type    :type/Number}
+                                                      {:name         "count"
+                                                       :display_name "count"
+                                                       :base_type    :type/Number}
+                                                      {:name         "count_2"
+                                                       :display_name "count_2"
+                                                       :base_type    :type/Number}]
+                                            :columns ["count" "sum" "count" "count_2"]}))
+     {:database (data/id)
+      :type     :query
+      :query    {:source-table (data/id :venues)
+                 :aggregation  [[:count] [:sum] [:count] [:named [:count] "count_2"]]}})))
diff --git a/test/metabase/query_processor/middleware/auto_bucket_datetime_breakouts_test.clj b/test/metabase/query_processor/middleware/auto_bucket_datetime_breakouts_test.clj
index 1a6b4121ffad0fa77578129aa91b2f9bf9731db8..cb9a916af12554c2f3f79104a29672fccdef7ab7 100644
--- a/test/metabase/query_processor/middleware/auto_bucket_datetime_breakouts_test.clj
+++ b/test/metabase/query_processor/middleware/auto_bucket_datetime_breakouts_test.clj
@@ -4,7 +4,8 @@
             [metabase.query-processor.middleware.auto-bucket-datetime-breakouts :as auto-bucket-datetime-breakouts]
             [metabase.test.data :as data]
             [metabase.util :as u]
-            [toucan.util.test :as tt]))
+            [toucan.util.test :as tt]
+            [metabase.test.data :as data]))
 
 (defn- auto-bucket [query]
   ((auto-bucket-datetime-breakouts/auto-bucket-datetime-breakouts identity)
diff --git a/test/metabase/query_processor/middleware/bind_effective_timezone_test.clj b/test/metabase/query_processor/middleware/bind_effective_timezone_test.clj
new file mode 100644
index 0000000000000000000000000000000000000000..4b0e8873de83d8a2121415719f78ad9fe0ae12f4
--- /dev/null
+++ b/test/metabase/query_processor/middleware/bind_effective_timezone_test.clj
@@ -0,0 +1,18 @@
+(ns metabase.query-processor.middleware.bind-effective-timezone-test
+  (:require [expectations :refer [expect]]
+            [metabase.models.database :refer [Database]]
+            [metabase.query-processor.middleware.bind-effective-timezone :as bind-effective-timezone]
+            [metabase.test.util :as tu]
+            [metabase.util :as u]
+            [metabase.util.date :as du]
+            [toucan.util.test :as tt]))
+
+(expect
+  "US/Hawaii"
+  (let [bound-timezone (atom nil)]
+    (tt/with-temp Database [db {:engine :postgres}]
+      (tu/with-temporary-setting-values [report-timezone "US/Hawaii"]
+        ((bind-effective-timezone/bind-effective-timezone (fn [_] (reset! bound-timezone du/*report-timezone*)))
+         {:database (u/get-id db)})))
+    (when-let [^java.util.TimeZone timezone @bound-timezone]
+      (.getID timezone))))
diff --git a/test/metabase/query_processor/middleware/catch_exceptions_test.clj b/test/metabase/query_processor/middleware/catch_exceptions_test.clj
new file mode 100644
index 0000000000000000000000000000000000000000..8d5756a7fae532b6716d842565428af8c81fb228
--- /dev/null
+++ b/test/metabase/query_processor/middleware/catch_exceptions_test.clj
@@ -0,0 +1,16 @@
+(ns metabase.query-processor.middleware.catch-exceptions-test
+  (:require [expectations :refer [expect]]
+            [metabase.query-processor.middleware.catch-exceptions :as catch-exceptions]))
+
+(expect
+  {}
+  ((catch-exceptions/catch-exceptions identity) {}))
+
+(expect
+  {:status     :failed
+   :class      java.lang.Exception
+   :error      "Something went wrong"
+   :stacktrace true
+   :query      {}}
+  (-> ((catch-exceptions/catch-exceptions (fn [_] (throw (Exception. "Something went wrong")))) {})
+      (update :stacktrace boolean)))
diff --git a/test/metabase/query_processor/middleware/cumulative_aggregations_test.clj b/test/metabase/query_processor/middleware/cumulative_aggregations_test.clj
new file mode 100644
index 0000000000000000000000000000000000000000..72a3e144a4205e54b4cef9465447006ca7063ad0
--- /dev/null
+++ b/test/metabase/query_processor/middleware/cumulative_aggregations_test.clj
@@ -0,0 +1,113 @@
+(ns metabase.query-processor.middleware.cumulative-aggregations-test
+  (:require [expectations :refer [expect]]
+            [metabase.query-processor.middleware.cumulative-aggregations :as cumulative-aggregations]))
+
+(expect
+  [1 2 3]
+  (#'cumulative-aggregations/add-rows #{} [1 2 3] [1 2 3]))
+
+(expect
+  [2 2 3]
+  (#'cumulative-aggregations/add-rows #{0} [1 2 3] [1 2 3]))
+
+(expect
+  [2 4 3]
+  (#'cumulative-aggregations/add-rows #{0 1} [1 2 3] [1 2 3]))
+
+(expect
+  [1 4 6]
+  (#'cumulative-aggregations/add-rows #{1 2} [1 2 3] [1 2 3]))
+
+(expect
+  IndexOutOfBoundsException
+  (#'cumulative-aggregations/add-rows #{4} [1 2 3] [1 2 3]))
+
+(expect
+  #{1}
+  (#'cumulative-aggregations/diff-indecies [:a :b :c] [:a 100 :c]))
+
+(expect
+  #{}
+  (#'cumulative-aggregations/diff-indecies [:a :b :c] [:a :b :c]))
+
+(expect
+  [[0] [1] [2] [3] [4] [5] [6] [7] [8] [9]]
+  (#'cumulative-aggregations/sum-rows #{} [[0] [1] [2] [3] [4] [5] [6] [7] [8] [9]]))
+
+(expect
+  [[0] [1] [3] [6] [10] [15] [21] [28] [36] [45]]
+  (#'cumulative-aggregations/sum-rows #{0} [[0] [1] [2] [3] [4] [5] [6] [7] [8] [9]]))
+
+(expect
+  [[0 0] [1 1] [3 2] [6 3] [10 4] [15 5] [21 6] [28 7] [36 8] [45 9]]
+  (#'cumulative-aggregations/sum-rows
+   #{0}
+   [[0 0] [1 1] [2 2] [3 3] [4 4] [5 5] [6 6] [7 7] [8 8] [9 9]]))
+
+(expect
+  [[0 0] [1 1] [3 3] [6 6] [10 10] [15 15] [21 21] [28 28] [36 36] [45 45]]
+  (#'cumulative-aggregations/sum-rows
+   #{0 1}
+   [[0 0] [1 1] [2 2] [3 3] [4 4] [5 5] [6 6] [7 7] [8 8] [9 9]]))
+
+;; can it go forever without a stack overflow?
+(expect
+  [[4999850001] [4999950000]]
+  (drop 99998 (#'cumulative-aggregations/sum-rows
+               #{0}
+               (for [n (range 100000)]
+                 [n]))))
+
+;; does replacing cumaulate ags work correctly?
+(expect
+  {:database 1
+   :type     :query
+   :query    {:source-table 1
+              :breakout     [[:field-id 1]]
+              :aggregation  [[:sum [:field-id 1]]]}}
+  (#'cumulative-aggregations/replace-cumulative-ags
+   {:database 1
+    :type     :query
+    :query    {:source-table 1
+               :breakout     [[:field-id 1]]
+               :aggregation  [[:cum-sum [:field-id 1]]]}}))
+
+;; ...even inside expression aggregations?
+(expect
+  {:database 1
+   :type     :query
+   :query    {:source-table 1, :aggregation [[:* [:count] 1]]}}
+  (#'cumulative-aggregations/replace-cumulative-ags
+   {:database 1
+    :type     :query
+    :query    {:source-table 1, :aggregation [[:* [:cum-count] 1]]}}))
+
+
+(def ^:private ^{:arglists '([])} return-some-rows
+  (constantly
+   {:rows [[1 1]
+           [2 2]
+           [3 3]
+           [4 4]
+           [5 5]]}))
+
+;; make sure we take breakout fields into account
+(expect
+  {:rows [[1 1] [2 3] [3 6] [4 10] [5 15]]}
+  ((cumulative-aggregations/handle-cumulative-aggregations return-some-rows)
+   {:database 1
+    :type     :query
+    :query    {:source-table 1
+               :breakout     [[:field-id 1]]
+               :aggregation  [[:cum-sum [:field-id 1]]]}}))
+
+;; make sure we sum up cumulative aggregations inside expressions correctly
+(expect
+  ;; we shouldn't be doing anything special with the expressions, let the database figure that out. We will just SUM
+  {:rows [[1 1] [2 3] [3 6] [4 10] [5 15]]}
+  ((cumulative-aggregations/handle-cumulative-aggregations return-some-rows)
+   {:database 1
+    :type     :query
+    :query    {:source-table 1
+               :breakout     [[:field-id 1]]
+               :aggregation  [[:+ [:cum-count] 1]]}}))
diff --git a/test/metabase/query_processor/middleware/desugar_test.clj b/test/metabase/query_processor/middleware/desugar_test.clj
new file mode 100644
index 0000000000000000000000000000000000000000..1ab56a0849685c1bb145c54a4e0fb15493b5fc43
--- /dev/null
+++ b/test/metabase/query_processor/middleware/desugar_test.clj
@@ -0,0 +1,160 @@
+(ns metabase.query-processor.middleware.desugar-test
+  (:require [metabase.query-processor.middleware.desugar :as desugar]
+            [expectations :refer [expect]]))
+
+(def ^:private ^{:arglists '([query])} desugar
+  (desugar/desugar identity))
+
+;; TODO - test `inside`
+
+;; TODO - test `is-null`
+
+;; TODO - test `not-null`
+
+;;; --------------------------------------- desugaring `time-interval` clauses ---------------------------------------
+
+;; `time-interval` with value > 1 or < -1 should generate a `between` clause
+(expect
+  {:database 1
+   :type     :query
+   :query    {:source-table 1
+              :filter       [:between
+                             [:datetime-field [:field-id 1] :month]
+                             [:relative-datetime 1 :month]
+                             [:relative-datetime 2 :month]]}}
+  (desugar
+   {:database 1
+    :type     :query
+    :query    {:source-table 1
+               :filter       [:time-interval [:field-id 1] 2 :month]}}))
+
+;; test the `include-current` option -- interval should start or end at `0` instead of `1`
+(expect
+  {:database 1
+   :type     :query
+   :query    {:source-table 1
+              :filter       [:between
+                             [:datetime-field [:field-id 1] :month]
+                             [:relative-datetime 0 :month]
+                             [:relative-datetime 2 :month]]}}
+  (desugar
+   {:database 1
+    :type     :query
+    :query    {:source-table 1
+               :filter       [:time-interval [:field-id 1] 2 :month {:include-current true}]}}))
+
+;; test using keywords like `:current`
+(expect
+  {:database 1
+   :type     :query
+   :query    {:source-table 1
+              :filter       [:=
+                             [:datetime-field [:field-id 1] :week]
+                             [:relative-datetime 0 :week]]}}
+
+  (desugar
+   {:database 1
+    :type     :query
+    :query    {:source-table 1
+               :filter       [:time-interval [:field-id 1] :current :week]}}))
+
+
+;; TODO - test `does-not-contain`
+
+;;; ------------------------------------------ `=` and `!=` with extra args ------------------------------------------
+
+(expect
+  {:database 1
+   :type     :query
+   :query    {:source-table 1
+              :filter       [:or
+                             [:= [:field-id 1] 2]
+                             [:= [:field-id 1] 3]
+                             [:= [:field-id 1] 4]
+                             [:= [:field-id 1] 5]]}}
+  (desugar
+   {:database 1
+    :type     :query
+    :query    {:source-table 1
+               :filter       [:= [:field-id 1] 2 3 4 5]}}))
+
+;; TODO - test `!=` with extra args
+
+
+;;; ---------------------------- desugaring `:relative-datetime` clauses with `:current` -----------------------------
+
+;; for cases when `:relative-datetime` is being compared to a `:datetime-field` clause, it should take the unit of the
+;; clause it's being compared to
+(expect
+  {:database 1
+   :type     :query
+   :query    {:source-table 1
+              :filter       [:=
+                             [:datetime-field [:field-id 1] :minute]
+                             [:relative-datetime 0 :minute]]}}
+  (desugar
+   {:database 1
+    :type     :query
+    :query    {:source-table 1
+               :filter       [:=
+                              [:datetime-field [:field-id 1] :minute]
+                              [:relative-datetime :current]]}}))
+
+;; otherwise it should just get a unit of `:default`
+(expect
+  {:database 1
+   :type     :query
+   :query    {:source-table 1
+              :filter       [:=
+                             [:field-id 1]
+                             [:relative-datetime 0 :default]]}}
+  (desugar
+   {:database 1
+    :type     :query
+    :query    {:source-table 1
+               :filter       [:=
+                              [:field-id 1]
+                              [:relative-datetime :current]]}}))
+
+;; ok, we should be able to handle datetime fields even if they are nested inside another clause
+(expect
+  {:database 1
+   :type     :query
+   :query    {:source-table 1
+              :filter       [:=
+                             [:binning-strategy [:datetime-field [:field-id 1] :week] :default]
+                             [:relative-datetime 0 :week]]}}
+  (desugar
+   {:database 1
+    :type     :query
+    :query    {:source-table 1
+               :filter       [:=
+                              [:binning-strategy [:datetime-field [:field-id 1] :week] :default]
+                              [:relative-datetime :current]]}}))
+
+
+;;; +----------------------------------------------------------------------------------------------------------------+
+;;; |                                            Putting it all together                                             |
+;;; +----------------------------------------------------------------------------------------------------------------+
+
+;; filters should get re-simplified after desugaring (if applicable)
+(expect
+  {:database 1
+   :type     :query
+   :query    {:source-table 1
+              :filter       [:and
+                             [:= [:field-id 1] "Run Query"]
+                             [:between
+                              [:datetime-field [:field-id 2] :day]
+                              [:relative-datetime -30 :day]
+                              [:relative-datetime -1 :day]]
+                             [:!= [:field-id 3] "(not set)"]
+                             [:!= [:field-id 3] "url"]]}}
+  (desugar
+   {:database 1
+    :type     :query
+    :query    {:source-table 1
+               :filter       [:and
+                              [:= [:field-id 1] "Run Query"]
+                              [:time-interval [:field-id 2] -30 :day]
+                              [:!= [:field-id 3] "(not set)" "url"]]}}))
diff --git a/test/metabase/query_processor/middleware/fetch_source_query_test.clj b/test/metabase/query_processor/middleware/fetch_source_query_test.clj
index dd550ef05c6166dbf21e792c6f9e5d5193d20824..092cece162dc12ebdada8fa526328b819fd6044b 100644
--- a/test/metabase/query_processor/middleware/fetch_source_query_test.clj
+++ b/test/metabase/query_processor/middleware/fetch_source_query_test.clj
@@ -1,134 +1,109 @@
 (ns metabase.query-processor.middleware.fetch-source-query-test
-  (:require [clj-time.coerce :as tcoerce]
-            [expectations :refer [expect]]
-            [medley.core :as m]
-            [metabase
-             [query-processor :as qp]
-             [util :as u]]
+  (:require [expectations :refer [expect]]
             [metabase.models
              [card :refer [Card]]
              [database :as database]]
             [metabase.query-processor.middleware.fetch-source-query :as fetch-source-query]
             [metabase.test.data :as data]
-            [metabase.util.date :as du]
+            [metabase.util :as u]
             [toucan.util.test :as tt]))
 
 (def ^:private ^{:arglists '([query])} fetch-source-query (fetch-source-query/fetch-source-query identity))
 
-;; make sure that the `fetch-source-query` middleware correctly resolves MBQL queries
-(expect
+(defn- wrap-inner-query [query]
+  {:database     database/virtual-id
+   :type         :query
+   :query        query})
+
+(defn- default-result-with-inner-query [inner-query]
   {:database (data/id)
    :type     :query
-   :query    {:aggregation  [[:count]]
-              :breakout     [[:field-literal "price" :type/Integer]]
-              :source-query {:source-table (data/id :venues)}}}
+   :query    inner-query})
+
+;; make sure that the `fetch-source-query` middleware correctly resolves MBQL queries
+(expect
+  (default-result-with-inner-query
+   {:aggregation  [[:count]]
+    :breakout     [[:field-literal "price" :type/Integer]]
+    :source-query {:source-table (data/id :venues)}})
   (tt/with-temp Card [card {:dataset_query {:database (data/id)
                                             :type     :query
                                             :query    {:source-table (data/id :venues)}}}]
-    (fetch-source-query {:database database/virtual-id
-                         :type     :query
-                         :query    {:source-table (str "card__" (u/get-id card))
-                                    :aggregation  [[:count]]
-                                    :breakout     [[:field-literal "price" :type/Integer]]}})))
+    (fetch-source-query
+     (wrap-inner-query
+      {:source-table (str "card__" (u/get-id card))
+       :aggregation  [[:count]]
+       :breakout     [[:field-literal "price" :type/Integer]]}))))
 
 ;; make sure that the `fetch-source-query` middleware correctly resolves native queries
 (expect
-  {:database (data/id)
-   :type     :query
-   :query    {:aggregation  [[:count]]
-              :breakout     [[:field-literal "price" :type/Integer]]
-              :source-query {:native (format "SELECT * FROM %s" (data/format-name "venues"))}}}
+  (default-result-with-inner-query
+   {:aggregation  [[:count]]
+    :breakout     [[:field-literal "price" :type/Integer]]
+    :source-query {:native (format "SELECT * FROM %s" (data/format-name "venues"))}})
   (tt/with-temp Card [card {:dataset_query {:database (data/id)
                                             :type     :native
                                             :native   {:query (format "SELECT * FROM %s" (data/format-name "venues"))}}}]
-    (fetch-source-query {:database database/virtual-id
-                         :type     :query
-                         :query    {:source-table (str "card__" (u/get-id card))
-                                    :aggregation  [[:count]]
-                                    :breakout     [[:field-literal "price" :type/Integer]]}})))
-
-(defn- expand-and-scrub [query-map]
-  (-> query-map
-      qp/expand
-      (m/dissoc-in [:database :features])
-      (m/dissoc-in [:database :details])
-      (m/dissoc-in [:database :timezone])
-      (dissoc :driver)))
-
-(defn default-expanded-results [query]
-  {:database     {:name "test-data", :id (data/id), :engine :h2}
-   :type         :query
-   :fk-field-ids #{}
-   :query        query})
+    (fetch-source-query
+     (wrap-inner-query
+      {:source-table (str "card__" (u/get-id card))
+       :aggregation  [[:count]]
+       :breakout     [[:field-literal "price" :type/Integer]]}))))
 
-;; test that the `metabase.query-processor/expand` function properly handles nested queries (this function should call
-;; `fetch-source-query`)
 (expect
-  (default-expanded-results
-   {:source-query {:source-table (data/id :venues)
-                   :join-tables  nil}})
+  (default-result-with-inner-query
+   {:source-query {:source-table (data/id :venues)}})
   (tt/with-temp Card [card {:dataset_query {:database (data/id)
                                             :type     :query
                                             :query    {:source-table (data/id :venues)}}}]
-    (expand-and-scrub {:database database/virtual-id
-                       :type     :query
-                       :query    {:source-table (str "card__" (u/get-id card))}})))
+    (fetch-source-query
+     (wrap-inner-query
+      {:source-table (str "card__" (u/get-id card))}))))
 
 (expect
-  (let [date-field-literal {:field-name       "date"
-                            :base-type        :type/Date
-                            :binning-strategy nil
-                            :binning-param    nil
-                            :binning-opts     nil
-                            :fingerprint      nil}]
-    (default-expanded-results
-     {:source-query {:source-table (data/id :checkins)
-                     :join-tables  nil}
-      :filter       {:filter-type :between,
-                     :field       date-field-literal
-                     :min-val     {:value (tcoerce/to-timestamp (du/str->date-time "2015-01-01"))
-                                   :field {:field date-field-literal, :unit :default}},
-                     :max-val     {:value (tcoerce/to-timestamp (du/str->date-time "2015-02-01"))
-                                   :field {:field date-field-literal, :unit :default}}}}))
+  (default-result-with-inner-query
+   {:source-query {:source-table (data/id :checkins)}
+    :filter       [:between [:field-literal "date" :type/Date] "2015-01-01" "2015-02-01"]})
   (tt/with-temp Card [card {:dataset_query {:database (data/id)
                                             :type     :query
                                             :query    {:source-table (data/id :checkins)}}}]
-    (expand-and-scrub {:database database/virtual-id
-                       :type     :query
-                       :query    {:source-table (str "card__" (u/get-id card))
-                                  :filter       ["BETWEEN" ["field-id" ["field-literal" "date" "type/Date"]] "2015-01-01" "2015-02-01"]}})))
+    (fetch-source-query
+     (wrap-inner-query
+      {:source-table (str "card__" (u/get-id card))
+       :filter       [:between
+                      [:field-literal "date" :type/Date]
+                      "2015-01-01"
+                      "2015-02-01"]}))))
 
 ;; make sure that nested nested queries work as expected
 (expect
-  {:database (data/id)
-   :type     :query
-   :query    {:limit        25
-              :source-query {:limit        50
-                             :source-query {:source-table (data/id :venues)
-                                            :limit        100}}}}
+  (default-result-with-inner-query
+   {:limit        25
+    :source-query {:limit        50
+                   :source-query {:source-table (data/id :venues)
+                                  :limit        100}}})
   (tt/with-temp* [Card [card-1 {:dataset_query {:database (data/id)
                                                 :type     :query
                                                 :query    {:source-table (data/id :venues), :limit 100}}}]
                   Card [card-2 {:dataset_query {:database database/virtual-id
                                                 :type     :query
                                                 :query    {:source-table (str "card__" (u/get-id card-1)), :limit 50}}}]]
-    ((fetch-source-query/fetch-source-query identity) {:database database/virtual-id
-                                                       :type     :query
-                                                       :query    {:source-table (str "card__" (u/get-id card-2)), :limit 25}})))
+    (fetch-source-query
+     (wrap-inner-query
+      {:source-table (str "card__" (u/get-id card-2)), :limit 25}))))
 
 (expect
-  (default-expanded-results
+  (default-result-with-inner-query
    {:limit        25
     :source-query {:limit 50
                    :source-query {:source-table (data/id :venues)
-                                  :limit        100
-                                  :join-tables  nil}}})
+                                  :limit        100}}})
   (tt/with-temp* [Card [card-1 {:dataset_query {:database (data/id)
                                                 :type     :query
                                                 :query    {:source-table (data/id :venues), :limit 100}}}]
                   Card [card-2 {:dataset_query {:database database/virtual-id
                                                 :type     :query
                                                 :query    {:source-table (str "card__" (u/get-id card-1)), :limit 50}}}]]
-    (expand-and-scrub {:database database/virtual-id
-                       :type     :query
-                       :query    {:source-table (str "card__" (u/get-id card-2)), :limit 25}})))
+    (fetch-source-query
+     (wrap-inner-query
+      {:source-table (str "card__" (u/get-id card-2)), :limit 25}))))
diff --git a/test/metabase/query_processor/middleware/format_rows_test.clj b/test/metabase/query_processor/middleware/format_rows_test.clj
index 190a1a6f051de08cd14d7b60f22801bb7de29cd8..e7d830e616d390e2123ad00d3204dca526779ace 100644
--- a/test/metabase/query_processor/middleware/format_rows_test.clj
+++ b/test/metabase/query_processor/middleware/format_rows_test.clj
@@ -1,5 +1,8 @@
 (ns metabase.query-processor.middleware.format-rows-test
-  (:require [metabase.query-processor-test :as qpt]
+  (:require [clj-time.coerce :as tc]
+            [expectations :refer :all]
+            [metabase.query-processor-test :as qpt]
+            [metabase.query-processor.middleware.format-rows :as format-rows]
             [metabase.test
              [data :as data]
              [util :as tu]]
@@ -7,7 +10,11 @@
              [dataset-definitions :as defs]
              [datasets :refer [*engine*]]]))
 
-(qpt/expect-with-non-timeseries-dbs-except #{:oracle :mongo :redshift :presto :sparksql}
+(def ^:private dbs-exempt-from-format-rows-tests
+  "DBs to skip the tests below for. TODO - why are so many databases not running these tests?"
+  #{:oracle :mongo :redshift :presto :sparksql :snowflake})
+
+(qpt/expect-with-non-timeseries-dbs-except dbs-exempt-from-format-rows-tests
   (if (= :sqlite *engine*)
     [[1 "Plato Yeshua" "2014-04-01 00:00:00" "08:30:00"]
      [2 "Felipinho Asklepios" "2014-12-05 00:00:00" "15:15:00"]
@@ -26,7 +33,7 @@
             :limit    5}))
        qpt/rows))
 
-(qpt/expect-with-non-timeseries-dbs-except #{:oracle :mongo :redshift :presto :sparksql}
+(qpt/expect-with-non-timeseries-dbs-except dbs-exempt-from-format-rows-tests
   (cond
     (= :sqlite *engine*)
     [[1 "Plato Yeshua" "2014-04-01 00:00:00" "08:30:00"]
@@ -54,3 +61,20 @@
              {:order-by [[:asc $id]]
               :limit    5}))
          qpt/rows)))
+
+
+(expect
+  {:rows [["2011-04-18T10:12:47.232Z"]
+          ["2011-04-18T00:00:00.000Z"]
+          ["2011-04-18T10:12:47.232Z"]]}
+  ((format-rows/format-rows (constantly {:rows [[(tc/to-sql-time 1303121567232)]
+                                         [(tc/to-sql-date "2011-04-18")] ; joda-time assumes this is UTC time when parsing it
+                                         [(tc/to-date 1303121567232)]]})) {:settings {}}))
+
+(expect
+  {:rows [["2011-04-18T19:12:47.232+09:00"]
+          ["2011-04-18T09:00:00.000+09:00"]
+          ["2011-04-18T19:12:47.232+09:00"]]}
+  ((format-rows/format-rows (constantly {:rows [[(tc/to-sql-time 1303121567232)]
+                                         [(tc/to-sql-date "2011-04-18")] ; joda-time assumes this is UTC time when parsing it
+                                         [(tc/to-date 1303121567232)]]})) {:settings {:report-timezone "Asia/Tokyo"}}))
diff --git a/test/metabase/query_processor/middleware/limit_test.clj b/test/metabase/query_processor/middleware/limit_test.clj
index 7548f7b18ff2ac3c327f8417f3a510a9c389d870..2d87d94eef3f890be73541674ac86b96ad99281e 100644
--- a/test/metabase/query_processor/middleware/limit_test.clj
+++ b/test/metabase/query_processor/middleware/limit_test.clj
@@ -1,32 +1,37 @@
 (ns metabase.query-processor.middleware.limit-test
   "Tests for the `:limit` clause and `:max-results` constraints."
-  (:require [expectations :refer :all]
+  (:require [expectations :refer [expect]]
             [metabase.query-processor.interface :as i]
             [metabase.query-processor.middleware.limit :as limit]))
 
 ;;; --------------------------------------------- LIMIT-MAX-RESULT-ROWS ----------------------------------------------
 
+(def ^:private ^{:arglists '([query])} limit (limit/limit identity))
+
 ;; Apply limit-max-result-rows to an infinite sequence and make sure it gets capped at `i/absolute-max-results`
 (expect
   i/absolute-max-results
-  (->> ((limit/limit identity) {:rows (repeat [:ok])})
+  (->> (limit {:type :native
+               :rows (repeat [:ok])})
        :rows
        count))
 
 ;; Apply an arbitrary max-results on the query and ensure our results size is appropriately constrained
 (expect
   1234
-  (->> ((limit/limit identity) {:constraints {:max-results 1234}
-                                :query       {:aggregation [[:count]]}
-                                :rows        (repeat [:ok])})
+  (->> (limit {:constraints {:max-results 1234}
+               :type        :query
+               :query       {:aggregation [[:count]]}
+               :rows        (repeat [:ok])})
        :rows
        count))
 
 ;; Apply a max-results-bare-rows limit specifically on no-aggregation query
 (expect
   [46 46]
-  (let [res ((limit/limit identity) {:constraints {:max-results 46}
-                                     :query       {}
-                                     :rows        (repeat [:ok])})]
+  (let [res (limit {:constraints {:max-results 46}
+                    :type        :query
+                    :query       {}
+                    :rows        (repeat [:ok])})]
     [(->> res :rows count)
      (->> res :query :limit)]))
diff --git a/test/metabase/query_processor/middleware/parameters/mbql_test.clj b/test/metabase/query_processor/middleware/parameters/mbql_test.clj
index cd94f20f240b883c39595c311abd519c0be92be7..75af45f3446485d85e94b8a0fd605a9adcaec154 100644
--- a/test/metabase/query_processor/middleware/parameters/mbql_test.clj
+++ b/test/metabase/query_processor/middleware/parameters/mbql_test.clj
@@ -1,12 +1,14 @@
 (ns metabase.query-processor.middleware.parameters.mbql-test
   "Tests for *MBQL* parameter substitution."
-  (:require [expectations :refer :all]
+  (:require [expectations :refer [expect]]
             [metabase
              [query-processor :as qp]
              [query-processor-test :refer [first-row format-rows-by non-timeseries-engines rows]]]
             [metabase.mbql.normalize :as normalize]
             [metabase.query-processor.middleware.parameters.mbql :as mbql-params]
-            [metabase.test.data :as data]
+            [metabase.test
+             [data :as data]
+             [util :as tu]]
             [metabase.test.data.datasets :as datasets]
             [metabase.util.date :as du]))
 
@@ -95,7 +97,9 @@
   {:database 1
    :type     :query
    :query    {:source-table 1000
-              :filter       [:= [:field-id (data/id :users :last_login)] [:relative-datetime -1 :day]]
+              :filter       [:=
+                             [:datetime-field [:field-id (data/id :users :last_login)] :day]
+                             [:relative-datetime -1 :day]]
               :breakout     [[:field-id 17]]}}
   (expand-parameters {:database   1
                       :type       :query
@@ -111,7 +115,9 @@
   {:database 1
    :type     :query
    :query    {:source-table 1000
-              :filter       [:between [:field-id (data/id :users :last_login)] "2014-05-10" "2014-05-16"]
+              :filter       [:between [:datetime-field [:field-id (data/id :users :last_login)] :day]
+                             "2014-05-10"
+                             "2014-05-16"]
               :breakout     [[:field-id 17]]}}
   (expand-parameters {:database   1
                       :type       :query
@@ -129,22 +135,25 @@
 ;;; +----------------------------------------------------------------------------------------------------------------+
 
 ;; for some reason param substitution tests fail on Redshift & (occasionally) Crate so just don't run those for now
-(def ^:private ^:const params-test-engines (disj non-timeseries-engines :redshift :crate))
+(def ^:private params-test-engines (disj non-timeseries-engines :redshift :crate))
 
 ;; check that date ranges work correctly
 (datasets/expect-with-engines params-test-engines
   [29]
-  (first-row
-    (format-rows-by [int]
-      (qp/process-query {:database   (data/id)
-                         :type       :query
-                         :query      {:source-table (data/id :checkins)
-                                      :aggregation  [[:count]]}
-                         :parameters [{:hash   "abc123"
-                                       :name   "foo"
-                                       :type   "date"
-                                       :target [:dimension [:field-id (data/id :checkins :date)]]
-                                       :value  "2015-04-01~2015-05-01"}]}))))
+  (do
+    ;; Prevent an issue with Snowflake were a previous connection's report-timezone setting can affect this test's results
+    (when (= :snowflake datasets/*engine*) (tu/clear-connection-pool (data/id)))
+    (first-row
+      (format-rows-by [int]
+        (qp/process-query {:database   (data/id)
+                           :type       :query
+                           :query      {:source-table (data/id :checkins)
+                                        :aggregation  [[:count]]}
+                           :parameters [{:hash   "abc123"
+                                         :name   "foo"
+                                         :type   "date"
+                                         :target [:dimension [:field-id (data/id :checkins :date)]]
+                                         :value  "2015-04-01~2015-05-01"}]})))))
 
 ;; check that IDs work correctly (passed in as numbers)
 (datasets/expect-with-engines params-test-engines
@@ -223,33 +232,37 @@
                 "FROM \"PUBLIC\".\"VENUES\" "
                 "WHERE (\"PUBLIC\".\"VENUES\".\"PRICE\" = 3 OR \"PUBLIC\".\"VENUES\".\"PRICE\" = 4)")
    :params nil}
-  (let [outer-query (-> (data/mbql-query venues
-                          {:aggregation [[:count]]})
-                        (assoc :parameters [{:name   "price"
-                                             :type   :category
-                                             :target [:field-id (data/id :venues :price)]
-                                             :value  [3 4]}]))]
-    (-> (qp/process-query outer-query)
-        :data :native_form)))
+  (let [query (-> (data/mbql-query venues
+                    {:aggregation [[:count]]})
+                  (assoc :parameters [{:name   "price"
+                                       :type   :category
+                                       :target [:field-id (data/id :venues :price)]
+                                       :value  [3 4]}]))]
+    (-> query qp/process-query :data :native_form)))
 
 ;; try it with date params as well. Even though there's no way to do this in the frontend AFAIK there's no reason we
 ;; can't handle it on the backend
+;;
+;; TODO - If we actually wanted to generate efficient queries we should be doing something like
+;;
+;;    WHERE (cast(DATE as date) IN ((cast(? AS date), cast(? AS date)))
+;;
+;; instead of all these BETWEENs
 (datasets/expect-with-engine :h2
   {:query  (str "SELECT count(*) AS \"count\" FROM \"PUBLIC\".\"CHECKINS\" "
-                "WHERE (CAST(\"PUBLIC\".\"CHECKINS\".\"DATE\" AS date) BETWEEN CAST(? AS date) AND CAST(? AS date) "
-                "OR CAST(\"PUBLIC\".\"CHECKINS\".\"DATE\" AS date) BETWEEN CAST(? AS date) AND CAST(? AS date))")
+                "WHERE (CAST(\"PUBLIC\".\"CHECKINS\".\"DATE\" AS date) BETWEEN CAST(? AS date) AND CAST(? AS date)"
+                " OR CAST(\"PUBLIC\".\"CHECKINS\".\"DATE\" AS date) BETWEEN CAST(? AS date) AND CAST(? AS date))")
    :params [(du/->Timestamp #inst "2014-06-01")
             (du/->Timestamp #inst "2014-06-30")
             (du/->Timestamp #inst "2015-06-01")
             (du/->Timestamp #inst "2015-06-30")]}
-  (let [outer-query (-> (data/mbql-query checkins
-                          {:aggregation [[:count]]})
-                        (assoc :parameters [{:name   "date"
-                                             :type   "date/month"
-                                             :target [:field-id (data/id :checkins :date)]
-                                             :value  ["2014-06" "2015-06"]}]))]
-    (-> (qp/process-query outer-query)
-        :data :native_form)))
+  (let [query (-> (data/mbql-query checkins
+                    {:aggregation [[:count]]})
+                  (assoc :parameters [{:name   "date"
+                                       :type   "date/month"
+                                       :target [:field-id (data/id :checkins :date)]
+                                       :value  ["2014-06" "2015-06"]}]))]
+    (-> query qp/process-query :data :native_form)))
 
 ;; make sure that "ID" type params get converted to numbers when appropriate
 (expect
diff --git a/test/metabase/query_processor/middleware/parameters/sql_test.clj b/test/metabase/query_processor/middleware/parameters/sql_test.clj
index bddb98f46a76e756b7b6003ecd50541c5cb25ca5..8773547852ab2e6f3e9747d056a1838ec8045089 100644
--- a/test/metabase/query_processor/middleware/parameters/sql_test.clj
+++ b/test/metabase/query_processor/middleware/parameters/sql_test.clj
@@ -13,11 +13,11 @@
             [metabase.test
              [data :as data]
              [util :as tu]]
-            [metabase.test.data
-             [datasets :as datasets]
-             [generic-sql :as generic-sql]]
-            [metabase.util.date :as du]
-            [toucan.db :as db]))
+            [metabase.test.data.datasets :as datasets]
+            [metabase.util
+             [date :as du]
+             [schema :as su]]
+            [schema.core :as s]))
 
 ;;; ----------------------------------------------- basic parser tests -----------------------------------------------
 
@@ -319,6 +319,26 @@
   (into {} (#'sql/value-for-tag {:name "checkin_date", :display-name "Checkin Date", :type :dimension, :dimension [:field-id (data/id :checkins :date)]}
                                 nil)))
 
+;; dimension -- required but unspecified
+(expect Exception
+        (into {} (#'sql/value-for-tag {:name "checkin_date", :display-name "Checkin Date", :type "dimension", :required true,
+                                       :dimension ["field-id" (data/id :checkins :date)]}
+                                      nil)))
+
+;; dimension -- required and default specified
+(expect
+    {:field {:name      "DATE"
+                        :parent_id nil
+                        :table_id  (data/id :checkins)
+                        :base_type :type/Date}
+        :param {:type   :dimension
+                :target [:dimension [:template-tag "checkin_date"]]
+                :value  "2015-04-01~2015-05-01"}}
+   (into {} (#'sql/value-for-tag {:name "checkin_date", :display-name "Checkin Date", :type :dimension, :required true, :default "2015-04-01~2015-05-01",
+                                  :dimension [:field-id (data/id :checkins :date)]}
+                                 nil)))
+
+
 ;; multiple values for the same tag should return a vector with multiple params instead of a single param
 (expect
   {:field {:name      "DATE"
@@ -534,19 +554,12 @@
 
 ;;; -------------------------------------------- "REAL" END-TO-END-TESTS ---------------------------------------------
 
-(defn- quote-name [identifier]
-  (generic-sql/quote-name datasets/*driver* identifier))
-
-(defn- checkins-identifier []
-  ;; HACK ! I don't have all day to write protocol methods to make this work the "right" way so for BigQuery and
-  ;; Presto we will just hackily return the correct identifier here
-  (case datasets/*engine*
-    :bigquery "`test_data.checkins`"
-    :presto   "\"default\".\"checkins\""
-    (let [{table-name :name, schema :schema} (db/select-one ['Table :name :schema], :id (data/id :checkins))]
-      (str (when (seq schema)
-             (str (quote-name schema) \.))
-           (quote-name table-name)))))
+(s/defn ^:private checkins-identifier :- su/NonBlankString
+  "Get the identifier used for `checkins` for the current driver by looking at what the driver uses when converting MBQL
+  to SQL. Different drivers qualify to different degrees (i.e. `table` vs `schema.table` vs `database.schema.table`)."
+  []
+  (let [sql (:query (qp/query->native (data/mbql-query checkins)))]
+    (second (re-find #"FROM\s([^\s()]+)" sql))))
 
 ;; as with the MBQL parameters tests Redshift and Crate fail for unknown reasons; disable their tests for now
 (def ^:private ^:const sql-parameters-engines
@@ -584,8 +597,8 @@
                                                      :dimension    [:field-id (data/id :checkins :date)]}}}
         :parameters []))))
 
-;; test that relative dates work correctly. It should be enough to try just one type of relative date here,
-;; since handling them gets delegated to the functions in `metabase.query-processor.parameters`, which is fully-tested :D
+;; test that relative dates work correctly. It should be enough to try just one type of relative date here, since
+;; handling them gets delegated to the functions in `metabase.query-processor.parameters`, which is fully-tested :D
 (datasets/expect-with-engines sql-parameters-engines
   [0]
   (first-row
@@ -619,6 +632,13 @@
      (= :presto datasets/*engine*)
      "2018-04-18"
 
+     ;; Snowflake appears to have a bug in their JDBC driver when including the target timezone along with the SQL
+     ;; date parameter. The below value is not correct, but is what the driver returns right now. This bug is written
+     ;; up as https://github.com/metabase/metabase/issues/8804 and when fixed this should be removed as it should
+     ;; return the same value as the other drivers that support a report timezone
+     (= :snowflake datasets/*engine*)
+     "2018-04-16T17:00:00.000-07:00"
+
      (qpt/supports-report-timezone? datasets/*engine*)
      "2018-04-18T00:00:00.000-07:00"
 
@@ -627,12 +647,13 @@
   (tu/with-temporary-setting-values [report-timezone "America/Los_Angeles"]
     (first-row
       (process-native
-        :native     {:query         (cond
-                                      (= :bigquery datasets/*engine*)
+        :native     {:query         (case datasets/*engine*
+                                      :bigquery
                                       "SELECT {{date}} as date"
-                                      (= :oracle datasets/*engine*)
+
+                                      :oracle
                                       "SELECT cast({{date}} as date) from dual"
-                                      :else
+
                                       "SELECT cast({{date}} as date)")
                      :template-tags {"date" {:name "date" :display-name "Date" :type :date}}}
         :parameters [{:type :date/single :target [:variable [:template-tag "date"]] :value "2018-04-18"}]))))
@@ -640,6 +661,13 @@
 
 ;;; -------------------------------------------- SQL PARAMETERS 2.0 TESTS --------------------------------------------
 
+;; make sure we handle quotes inside names correctly!
+(expect
+  {:replacement-snippet     "\"test-data\".\"PUBLIC\".\"checkins\".\"date\"",
+   :prepared-statement-args nil}
+  (binding [qp.i/*driver* (driver/engine->driver :postgres)]
+    (#'sql/honeysql->replacement-snippet-info :test-data.PUBLIC.checkins.date)))
+
 ;; Some random end-to-end param expansion tests added as part of the SQL Parameters 2.0 rewrite
 
 (expect
diff --git a/test/metabase/query_processor/middleware/reconcile_breakout_and_order_by_bucketing_test.clj b/test/metabase/query_processor/middleware/reconcile_breakout_and_order_by_bucketing_test.clj
new file mode 100644
index 0000000000000000000000000000000000000000..7c02302d7c28d44f580cbdde11fd3f635f789ce0
--- /dev/null
+++ b/test/metabase/query_processor/middleware/reconcile_breakout_and_order_by_bucketing_test.clj
@@ -0,0 +1,118 @@
+(ns metabase.query-processor.middleware.reconcile-breakout-and-order-by-bucketing-test
+  (:require [expectations :refer [expect]]
+            [metabase.query-processor.middleware.reconcile-breakout-and-order-by-bucketing :as reconcile-bucketing]))
+
+(defn- mbql-query {:style/indent 0} [& clauses]
+  {:database 1
+   :type     :query
+   :query    (apply assoc {:source-table 1} clauses)})
+
+(def ^:private ^{:arglists '([query]), :style/indent 0} reconcile-breakout-and-order-by-bucketing
+  (comp (reconcile-bucketing/reconcile-breakout-and-order-by-bucketing identity) mbql-query))
+
+;; will unbucketed datetime order-bys get bucketed if Field it references is bucketed in a `breakout` clause?
+(expect
+  (mbql-query
+    :breakout [[:datetime-field [:field-id 1] :day]]
+    :order-by [[:asc [:datetime-field [:field-id 1] :day]]])
+  (reconcile-breakout-and-order-by-bucketing
+    :breakout [[:datetime-field [:field-id 1] :day]]
+    :order-by [[:asc [:field-id 1]]]))
+
+;; should also work with FKs
+(expect
+  (mbql-query
+    :breakout [[:datetime-field [:fk-> [:field-id 1] [:field-id 2]] :day]]
+    :order-by [[:asc [:datetime-field [:fk-> [:field-id 1] [:field-id 2]] :day]]])
+  (reconcile-breakout-and-order-by-bucketing
+    :breakout [[:datetime-field [:fk-> [:field-id 1] [:field-id 2]] :day]]
+    :order-by [[:asc [:fk-> [:field-id 1] [:field-id 2]]]]))
+
+;; ...and with field literals
+(expect
+  (mbql-query
+    :breakout [[:datetime-field [:field-literal "Corn Field" :type/Text] :day]]
+    :order-by [[:asc [:datetime-field [:field-literal "Corn Field" :type/Text] :day]]])
+  (reconcile-breakout-and-order-by-bucketing
+    :breakout [[:datetime-field [:field-literal "Corn Field" :type/Text] :day]]
+    :order-by [[:asc [:field-literal "Corn Field" :type/Text]]]))
+
+;; unbucketed datetimes in order-bys should be left undisturbed if they are not referenced in the breakout clause;
+;; this is likely an invalid query, but that isn't this middleware's problem
+(expect
+  (mbql-query
+    :breakout [[:datetime-field [:field-id 2] :day]]
+    :order-by [[:asc [:field-id 1]]])
+  (reconcile-breakout-and-order-by-bucketing
+    :breakout [[:datetime-field [:field-id 2] :day]]
+    :order-by [[:asc [:field-id 1]]]))
+
+;; similarly, if a datetime field is already bucketed in a different way in the order-by than the same Field in a
+;; breakout clause, we should not do anything, even though the query is likely invalid (we assume you know what you're
+;; doing if you explicitly specify a bucketing)
+(expect
+  (mbql-query
+    :breakout [[:datetime-field [:field-id 1] :day]]
+    :order-by [[:asc [:datetime-field [:field-id 1] :month]]])
+  (reconcile-breakout-and-order-by-bucketing
+    :breakout [[:datetime-field [:field-id 1] :day]]
+    :order-by [[:asc [:datetime-field [:field-id 1] :month]]]))
+
+;; we should be able to fix multiple order-bys
+(expect
+  (mbql-query
+    :breakout [[:datetime-field [:field-id 1] :day]
+               [:datetime-field [:field-id 2] :month]]
+    :order-by [[:asc [:datetime-field [:field-id 1] :day]]
+               [:desc [:datetime-field [:field-id 2] :month]]])
+  (reconcile-breakout-and-order-by-bucketing
+    :breakout [[:datetime-field [:field-id 1] :day]
+               [:datetime-field [:field-id 2] :month]]
+    :order-by [[:asc  [:field-id 1]]
+               [:desc [:field-id 2]]]))
+
+;; if for some reason a Field is referenced twice in the order bys, we should only bucket unbucketed references
+(expect
+  (mbql-query
+    :breakout [[:datetime-field [:field-id 1] :day]]
+    :order-by [[:asc  [:datetime-field [:field-id 1] :day]]
+               [:desc [:datetime-field [:field-id 1] :month]]])
+  (reconcile-breakout-and-order-by-bucketing
+    :breakout [[:datetime-field [:field-id 1] :day]]
+    :order-by [[:asc  [:field-id 1]]
+               [:desc [:datetime-field [:field-id 1] :month]]]))
+
+;; if a Field is referenced twice and we bucket an unbucketed reference, creating duplicate order-by clauses, we
+;; should remove them, as it is illegal in MBQL 2000
+(expect
+  (mbql-query
+    :breakout [[:datetime-field [:field-id 1] :day]]
+    :order-by [[:asc [:datetime-field [:field-id 1] :day]]])
+  (reconcile-breakout-and-order-by-bucketing
+    :breakout [[:datetime-field [:field-id 1] :day]]
+    :order-by [[:asc [:field-id 1]]
+               [:asc [:datetime-field [:field-id 1] :day]]]))
+
+;; if there are two breakouts of the same Field with different bucketing, let's just use the bucketing for the first
+;; breakout (?)
+(expect
+  (reconcile-breakout-and-order-by-bucketing
+    :breakout [[:datetime-field [:field-id 1] :day]
+               [:datetime-field [:field-id 1] :month]]
+    :order-by [[:asc [:field-id 1]]]))
+
+;; don't add order bys if there are none
+(expect
+  (mbql-query
+    :breakout [[:datetime-field [:field-id 1] :day]])
+  (reconcile-breakout-and-order-by-bucketing
+    :breakout [[:datetime-field [:field-id 1] :day]]))
+
+;; we also need to be able to handle bucketing via binning-strategy
+(expect
+  (mbql-query
+    :breakout [[:binning-strategy [:field-id 1] :num-bins 10]]
+    :order-by [[:asc [:binning-strategy [:field-id 1] :num-bins 10]]])
+  (reconcile-breakout-and-order-by-bucketing
+    :breakout [[:binning-strategy [:field-id 1] :num-bins 10]]
+    :order-by [[:asc [:field-id 1]]]))
diff --git a/test/metabase/query_processor/middleware/resolve_joined_tables_test.clj b/test/metabase/query_processor/middleware/resolve_joined_tables_test.clj
new file mode 100644
index 0000000000000000000000000000000000000000..e78d114e56cbdb1ce5b2deafcf2e6e6dad74f0fd
--- /dev/null
+++ b/test/metabase/query_processor/middleware/resolve_joined_tables_test.clj
@@ -0,0 +1,35 @@
+(ns metabase.query-processor.middleware.resolve-joined-tables-test
+  (:require [expectations :refer :all]
+            [metabase.models
+             [field :refer [Field]]
+             [table :refer [Table]]]
+            [metabase.query-processor.middleware.resolve-joined-tables :as resolve-joined-tables]
+            [metabase.query-processor.store :as qp.store]
+            [metabase.test.data :as data]))
+
+(expect
+  {:database (data/id)
+   :type     :query
+   :query    {:source-table (data/id :venues)
+              :fields       [[:field-id (data/id :venues :name)]
+                             [:fk->
+                              [:field-id (data/id :venues :category_id)]
+                              [:field-id (data/id :categories :name)]]]
+              :join-tables  [{:join-alias  "CATEGORIES__via__CATEGORY_ID"
+                              :table-id    (data/id :categories)
+                              :fk-field-id (data/id :venues :category_id)
+                              :pk-field-id (data/id :categories :id)}]}}
+  (qp.store/with-store
+    (qp.store/store-table! (Table (data/id :venues)))
+    (doseq [field-id [(data/id :venues :name)
+                      (data/id :categories :name)
+                      (data/id :venues :category_id)]]
+      (qp.store/store-field! (Field field-id)))
+    ((resolve-joined-tables/resolve-joined-tables identity)
+     {:database (data/id)
+      :type     :query
+      :query    {:source-table (data/id :venues)
+                 :fields       [[:field-id (data/id :venues :name)]
+                                [:fk->
+                                 [:field-id (data/id :venues :category_id)]
+                                 [:field-id (data/id :categories :name)]]]}})))
diff --git a/test/metabase/query_processor/middleware/resolve_test.clj b/test/metabase/query_processor/middleware/resolve_test.clj
deleted file mode 100644
index df54d410271863ae0948dec67414db608ecfa943..0000000000000000000000000000000000000000
--- a/test/metabase/query_processor/middleware/resolve_test.clj
+++ /dev/null
@@ -1,66 +0,0 @@
-(ns metabase.query-processor.middleware.resolve-test
-  (:require [expectations :refer :all]
-            [metabase.models.field :refer [Field]]
-            [metabase.query-processor.middleware
-             [expand :as ql]
-             [resolve :as resolve]]
-            [metabase.test
-             [data :as data]
-             [util :as tu]]
-            [metabase.test.data.dataset-definitions :as defs]
-            [metabase.util :as u]))
-
-(defn- resolved? [field]
-  (not (#'resolve/unresolved-field-id field)))
-
-;; Resolving already resolved fields is a noop
-(expect
-  {:resolved-before? true
-   :fields-same?     true}
-  (data/with-db (data/get-or-create-database! defs/test-data)
-    (let [fields [(Field (u/get-id (data/id :venues :name)))
-                  (Field (u/get-id (data/id :venues :category_id)))]]
-      {:resolved-before? (every? resolved? fields)
-       :fields-same?     (= fields (resolve/resolve-fields-if-needed fields))})))
-
-;; Resolving placholders will return resolved fields
-(expect
-  {:resolved-before? false
-   :resolved-after?  true}
-  (data/with-db (data/get-or-create-database! defs/test-data)
-    (let [field-placeholders [(ql/field-id (data/id :venues :name))
-                              (ql/field-id (data/id :venues :category_id))]]
-      {:resolved-before? (every? resolved? field-placeholders)
-       :resolved-after?  (every? resolved? (resolve/resolve-fields-if-needed field-placeholders))})))
-
-;; Resolving a mixed list of placeholders and fields will only resolve the unresolved-fields
-(expect
-  {:resolved-fields-count   1
-   :unresolved-fields-count 1
-   :all-resolved?           true
-   :resolved-field-same?    true}
-  (data/with-db (data/get-or-create-database! defs/test-data)
-    (let [resolved-field   (Field (u/get-id (data/id :venues :category_id)))
-          both-field-types [(ql/field-id (data/id :venues :name))
-                            resolved-field]
-          result           (resolve/resolve-fields-if-needed both-field-types)]
-      {:resolved-fields-count   (count (filter resolved? both-field-types))
-       :unresolved-fields-count (count (remove resolved? both-field-types))
-       :all-resolved?           (every? resolved? result)
-       :resolved-field-same?    (= resolved-field (second result))})))
-
-;; Resolving the fields should include any relevant dimensions along with the field
-(expect
-  {:field-resolved-before? false
-   :field-resolved-after?  true
-   :dimension-values       [{:dimension-id true, :field-id true, :dimension-name "Foo",
-                              :human-readable-field-id true, :dimension-type :external,
-                              :created-at true, :updated-at true}]}
-  (data/with-db (data/get-or-create-database! defs/test-data)
-    (data/with-data
-      (data/create-venue-category-fk-remapping "Foo")
-      (let [field-with-dimension (ql/field-id (data/id :venues :category_id))
-            result (resolve/resolve-fields-if-needed [field-with-dimension])]
-        {:field-resolved-before? (resolved? field-with-dimension)
-         :field-resolved-after?  (first (map resolved? result))
-         :dimension-values       (tu/boolean-ids-and-timestamps (map :dimensions result))}))))
diff --git a/test/metabase/query_processor/middleware/results_metadata_test.clj b/test/metabase/query_processor/middleware/results_metadata_test.clj
index 46eebf5410355b18b82e6e3758c8bf824492c5b4..a2eb0578f5a8023017dbed26b63417e4d684b787 100644
--- a/test/metabase/query_processor/middleware/results_metadata_test.clj
+++ b/test/metabase/query_processor/middleware/results_metadata_test.clj
@@ -47,34 +47,41 @@
     :special_type "type/Longitude", :fingerprint  (:longitude mutil/venue-fingerprints)}])
 
 (def ^:private default-card-results-native
-  (update-in default-card-results [3 :fingerprint] assoc :type {:type/Number {:min 2.0, :max 74.0, :avg 29.98}}))
+  (update-in default-card-results [3 :fingerprint] assoc :type {:type/Number {:min 2.0, :max 74.0, :avg 29.98, :q1 7.0, :q3 49.0 :sd 23.06}}))
 
 ;; test that Card result metadata is saved after running a Card
 (expect
   default-card-results-native
   (tt/with-temp Card [card]
-    (qp/process-query (assoc (native-query "SELECT ID, NAME, PRICE, CATEGORY_ID, LATITUDE, LONGITUDE FROM VENUES")
-                        :info {:card-id    (u/get-id card)
-                               :query-hash (qputil/query-hash {})}))
-    (round-to-2-decimals (card-metadata card))))
+    (u/prog1
+     (qp/process-query (assoc (native-query "SELECT ID, NAME, PRICE, CATEGORY_ID, LATITUDE, LONGITUDE FROM VENUES")
+                         :info {:card-id    (u/get-id card)
+                                :query-hash (qputil/query-hash {})}))
+     (assert (= (:status <>) :completed)))
+    (-> card
+        card-metadata
+        round-to-2-decimals
+        tu/round-fingerprint-cols)))
 
 ;; check that using a Card as your source doesn't overwrite the results metadata...
 (expect
-  {:name "NAME", :display_name "Name", :base_type "type/Text"}
+  [{:name "NAME", :display_name "Name", :base_type "type/Text"}]
   (tt/with-temp Card [card {:dataset_query   (native-query "SELECT * FROM VENUES")
-                            :result_metadata {:name "NAME", :display_name "Name", :base_type "type/Text"}}]
-    (qp/process-query {:database database/virtual-id
-                       :type     :query
-                       :query    {:source-table (str "card__" (u/get-id card))}})
+                            :result_metadata [{:name "NAME", :display_name "Name", :base_type "type/Text"}]}]
+    (u/prog1
+     (qp/process-query {:database database/virtual-id
+                        :type     :query
+                        :query    {:source-table (str "card__" (u/get-id card))}})
+     (assert (= (:status <>) :completed)))
     (card-metadata card)))
 
 ;; ...even when running via the API endpoint
 (expect
-  {:name "NAME", :display_name "Name", :base_type "type/Text"}
+  [{:name "NAME", :display_name "Name", :base_type "type/Text"}]
   (tt/with-temp* [Collection [collection]
                   Card       [card {:collection_id   (u/get-id collection)
                                     :dataset_query   (native-query "SELECT * FROM VENUES")
-                                    :result_metadata {:name "NAME", :display_name "Name", :base_type "type/Text"}}]]
+                                    :result_metadata [{:name "NAME", :display_name "Name", :base_type "type/Text"}]}]]
     (perms/grant-collection-read-permissions! (group/all-users) collection)
     ((users/user->client :rasta) :post 200 "dataset" {:database database/virtual-id
                                                       :type     :query
@@ -104,7 +111,8 @@
                          :native   {:query "SELECT ID, NAME, PRICE, CATEGORY_ID, LATITUDE, LONGITUDE FROM VENUES"}})
       (get-in [:data :results_metadata])
       (update :checksum class)
-      round-to-2-decimals))
+      round-to-2-decimals
+      (->> (tu/round-fingerprint-cols [:columns]))))
 
 ;; make sure that a Card where a DateTime column is broken out by year advertises that column as Text, since you can't
 ;; do datetime breakouts on years
@@ -114,14 +122,15 @@
     :name         "DATE"
     :unit         nil
     :special_type nil
-    :fingerprint  {:global {:distinct-count 618}, :type {:type/DateTime {:earliest "2013-01-03T00:00:00.000Z"
+    :fingerprint  {:global {:distinct-count 618 :nil% 0.0}, :type {:type/DateTime {:earliest "2013-01-03T00:00:00.000Z"
                                                                          :latest   "2015-12-29T00:00:00.000Z"}}}}
    {:base_type    "type/Integer"
     :display_name "count"
     :name         "count"
     :special_type "type/Quantity"
-    :fingerprint  {:global {:distinct-count 3}
-                   :type   {:type/Number {:min 235.0, :max 498.0, :avg 333.33}}}}]
+    :fingerprint  {:global {:distinct-count 3
+                            :nil%           0.0},
+                   :type   {:type/Number {:min 235.0, :max 498.0, :avg 333.33 :q1 243.0, :q3 440.0 :sd 143.5}}}}]
   (tt/with-temp Card [card]
     (qp/process-query {:database (data/id)
                        :type     :query
@@ -130,4 +139,7 @@
                                   :breakout     [[:datetime-field [:field-id (data/id :checkins :date)] :year]]}
                        :info     {:card-id    (u/get-id card)
                                   :query-hash (qputil/query-hash {})}})
-    (round-to-2-decimals (card-metadata card))))
+    (-> card
+        card-metadata
+        round-to-2-decimals
+        tu/round-fingerprint-cols)))
diff --git a/test/metabase/query_processor/middleware/wrap_value_literals_test.clj b/test/metabase/query_processor/middleware/wrap_value_literals_test.clj
new file mode 100644
index 0000000000000000000000000000000000000000..9813e69a74960ee847cb98e0f721620cdd7ef00e
--- /dev/null
+++ b/test/metabase/query_processor/middleware/wrap_value_literals_test.clj
@@ -0,0 +1,121 @@
+(ns metabase.query-processor.middleware.wrap-value-literals-test
+  (:require [expectations :refer :all]
+            [metabase.models.field :refer [Field]]
+            [metabase.query-processor.middleware.wrap-value-literals :as wrap-value-literals]
+            [metabase.query-processor.store :as qp.store]
+            [metabase.test.data :as data]
+            [metabase.util.date :as du]))
+
+(defn- wrap-value-literals {:style/indent 1} [field-ids-to-put-in-store inner-query]
+  (qp.store/with-store
+    (doseq [field-id field-ids-to-put-in-store]
+      (qp.store/store-field! (Field field-id)))
+    (binding [du/*report-timezone* (java.util.TimeZone/getTimeZone "UTC")]
+      (-> ((wrap-value-literals/wrap-value-literals identity)
+           {:database (data/id)
+            :type     :query
+            :query    inner-query})
+          :query))))
+
+(expect
+  (data/$ids venues
+    {:source-table (data/id :venues)
+     :filter       [:>
+                    [:field-id $id]
+                    [:value 50 {:base_type     :type/BigInteger
+                                :special_type  :type/PK
+                                :database_type "BIGINT"}]]})
+  (data/$ids venues
+    (wrap-value-literals [$id]
+      {:source-table (data/id :venues)
+       :filter       [:> [:field-id $id] 50]})))
+
+(expect
+  (data/$ids venues
+    {:source-table (data/id :venues)
+     :filter       [:and
+                    [:> [:field-id $id] [:value 50 {:base_type     :type/BigInteger
+                                                    :special_type  :type/PK
+                                                    :database_type "BIGINT"}]]
+                    [:< [:field-id $price] [:value 5 {:base_type     :type/Integer
+                                                      :special_type  :type/Category
+                                                      :database_type "INTEGER"}]]]})
+  (data/$ids venues
+    (wrap-value-literals [$id $price]
+      {:source-table (data/id :venues)
+       :filter       [:and
+                      [:> [:field-id $id] 50]
+                      [:< [:field-id $price] 5]]})))
+
+;; do datetime literal strings get wrapped in `absolute-datetime` clauses when in appropriate filters?
+(expect
+  (data/$ids checkins
+    {:source-table (data/id :checkins)
+     :filter       [:=
+                    [:datetime-field [:field-id $date] :month]
+                    [:absolute-datetime (du/->Timestamp "2018-10-01" "UTC") :month]]})
+  (data/$ids checkins
+    (wrap-value-literals [$date]
+      {:source-table (data/id :checkins)
+       :filter       [:= [:datetime-field [:field-id $date] :month] "2018-10-01"]})))
+
+;; even if the Field in question is not wrapped in a datetime-field clause, we should still auto-bucket the value, but
+;; we should give it a `:default` unit
+(expect
+  (data/$ids checkins
+    {:source-table (data/id :checkins)
+     :filter       [:=
+                    [:field-id $date]
+                    [:absolute-datetime (du/->Timestamp "2018-10-01" "UTC") :default]]})
+  (data/$ids checkins
+    (wrap-value-literals [$date]
+      {:source-table (data/id :checkins)
+       :filter       [:= [:field-id $date] "2018-10-01"]})))
+
+;; should also apply if the Fields are UNIX timestamps or other things with special type of :type/Datetime
+(expect
+  (data/dataset sad-toucan-incidents
+    (data/$ids incidents
+      {:source-table (data/id :incidents)
+       :filter       [:and
+                      [:>
+                       [:datetime-field [:field-id $timestamp] :day]
+                       [:absolute-datetime #inst "2015-06-01T00:00:00.000000000-00:00" :day]]
+                      [:<
+                       [:datetime-field [:field-id $timestamp] :day]
+                       [:absolute-datetime #inst "2015-06-03T00:00:00.000000000-00:00" :day]]]}))
+
+  (data/dataset sad-toucan-incidents
+    (data/$ids incidents
+      (wrap-value-literals [$timestamp]
+        {:source-table (data/id :incidents)
+         :filter       [:and
+                        [:> [:datetime-field [:field-id $timestamp] :day] "2015-06-01"]
+                        [:< [:datetime-field [:field-id $timestamp] :day] "2015-06-03"]]}))))
+
+;; string filters like `starts-with` should not parse datetime strings for obvious reasons
+(expect
+  (data/$ids checkins
+    {:source-table (data/id :checkins)
+     :filter        [:starts-with
+                     [:datetime-field [:field-id $date] :month]
+                     [:value "2018-10-01" {:base_type     :type/Date
+                                           :special_type  nil
+                                           :database_type "DATE"
+                                           :unit          :month}]]})
+  (data/$ids checkins
+    (wrap-value-literals [$date]
+      {:source-table (data/id :checkins)
+       :filter       [:starts-with [:datetime-field [:field-id $date] :month] "2018-10-01"]})))
+
+;; does wrapping value literals work recursively on source queries as well?
+(expect
+  (data/$ids checkins
+    {:source-query {:source-table (data/id :checkins)
+                    :filter       [:>
+                                   [:field-id $date]
+                                   [:absolute-datetime #inst "2014-01-01T00:00:00.000000000-00:00" :default]]}})
+  (data/$ids checkins
+    (wrap-value-literals [$date]
+      {:source-query {:source-table (data/id :checkins)
+                      :filter       [:> [:field-id $date] "2014-01-01"]}})))
diff --git a/test/metabase/query_processor/qp_middleware_test.clj b/test/metabase/query_processor/qp_middleware_test.clj
deleted file mode 100644
index 4153a73a87f7098ba3baff63bb9a4e2536028b5a..0000000000000000000000000000000000000000
--- a/test/metabase/query_processor/qp_middleware_test.clj
+++ /dev/null
@@ -1,97 +0,0 @@
-(ns metabase.query_processor.qp-middleware-test
-  (:require [clj-time.coerce :as tc]
-            [expectations :refer :all]
-            [metabase.driver :as driver]
-            [metabase.models.setting :as setting]
-            [metabase.query-processor.middleware
-             [add-row-count-and-status :as add-row-count-and-status]
-             [add-settings :as add-settings]
-             [catch-exceptions :as catch-exceptions]
-             [format-rows :as format-rows]]))
-
-(defrecord ^:private TestDriver []
-  clojure.lang.Named
-  (getName [_] "TestDriver"))
-
-(extend TestDriver
-  driver/IDriver
-  {:features (constantly #{:set-timezone})})
-
-
-;; catch-exceptions
-
-(expect
-  {}
-  ((catch-exceptions/catch-exceptions identity) {}))
-
-(expect
-  {:status        :failed
-   :class         java.lang.Exception
-   :error         "Something went wrong"
-   :stacktrace    true
-   :query          {}
-   :expanded-query nil}
-  (-> ((catch-exceptions/catch-exceptions (fn [_] (throw (Exception. "Something went wrong")))) {})
-      (update :stacktrace boolean)))
-
-
-;; add-settings/add-settings
-
-(expect
-  [{:settings {}}
-   {:settings {}}
-   {:settings {:report-timezone "US/Mountain"}}]
-  (let [original-tz (setting/get :report-timezone)
-        response1   ((add-settings/add-settings identity) {:driver (TestDriver.)})]
-    ;; make sure that if the timezone is an empty string we skip it in settings
-    (setting/set! :report-timezone "")
-    (let [response2 ((add-settings/add-settings identity) {:driver (TestDriver.)})]
-      ;; if the timezone is something valid it should show up in the query settings
-      (setting/set! :report-timezone "US/Mountain")
-      (let [response3 ((add-settings/add-settings identity) {:driver (TestDriver.)})]
-        (setting/set! :report-timezone original-tz)
-        [(dissoc response1 :driver)
-         (dissoc response2 :driver)
-         (dissoc response3 :driver)]))))
-
-
-;; add-row-count-and-status
-
-(expect
-  {:row_count 5
-   :status    :completed
-   :data      {:rows           [[1] [1] [1] [1] [1]]
-               :rows_truncated 5}}
-  ;; NOTE: the default behavior is to treat the query as no aggregation and use :max-results-bare-rows
-  ((add-row-count-and-status/add-row-count-and-status (constantly {:rows [[1] [1] [1] [1] [1]]}))
-    {:constraints {:max-results           10
-                   :max-results-bare-rows 5}}))
-
-(expect
-  {:row_count      5
-   :status         :completed
-   :data           {:rows [[1] [1] [1] [1] [1]]}}
-  ;; when we aren't a no-aggregation query the then we use :max-results for our limit
-  ((add-row-count-and-status/add-row-count-and-status (constantly {:rows [[1] [1] [1] [1] [1]]}))
-    {:query       {:aggregation [[:count]]}
-     :constraints {:max-results           10
-                   :max-results-bare-rows 5}}))
-
-
-;; format-rows/format-rows
-
-(expect
-  {:rows [["2011-04-18T10:12:47.232Z"]
-          ["2011-04-18T00:00:00.000Z"]
-          ["2011-04-18T10:12:47.232Z"]]}
-  ((format-rows/format-rows (constantly {:rows [[(tc/to-sql-time 1303121567232)]
-                                         [(tc/to-sql-date "2011-04-18")] ; joda-time assumes this is UTC time when parsing it
-                                         [(tc/to-date 1303121567232)]]})) {:settings {}}))
-
-(expect
-  {:rows [["2011-04-18T19:12:47.232+09:00"]
-          ["2011-04-18T09:00:00.000+09:00"]
-          ["2011-04-18T19:12:47.232+09:00"]]}
-  ((format-rows/format-rows (constantly {:rows [[(tc/to-sql-time 1303121567232)]
-                                         [(tc/to-sql-date "2011-04-18")] ; joda-time assumes this is UTC time when parsing it
-                                         [(tc/to-date 1303121567232)]]})) {:settings {:report-timezone "Asia/Tokyo"}}))
diff --git a/test/metabase/query_processor/util_test.clj b/test/metabase/query_processor/util_test.clj
index fa714b08c74d58b2b85c4964dfb54c2b9f6f4092..68d5fa188ae05f1d611eeda3e626a0a74a168c8b 100644
--- a/test/metabase/query_processor/util_test.clj
+++ b/test/metabase/query_processor/util_test.clj
@@ -118,35 +118,6 @@
    :c (TestRecord2. 1)
    :d [1 2 3 4]})
 
-;; Test that we can change only the items matching the `instance?` predicate
-(expect
-  (-> test-tree
-      (update-in [:a :aa :x] inc)
-      (update-in [:b :x] inc))
-  (qputil/postwalk-pred #(instance? TestRecord1 %)
-                        #(update % :x inc)
-                        test-tree))
-
-;; If nothing matches, the original tree should be returned
-(expect
-  test-tree
-  (qputil/postwalk-pred set?
-                        #(set (map inc %))
-                        test-tree))
-
-;; We should be able to collect items matching the predicate
-(expect
-  [(TestRecord1. 1) (TestRecord1. 1)]
-  (qputil/postwalk-collect #(instance? TestRecord1 %)
-                           identity
-                           test-tree))
-
-;; Not finding any of the items should just return an empty seq
-(expect
-  []
-  (qputil/postwalk-collect set?
-                           identity
-                           test-tree))
 
 (def ^:private test-inner-map
   {:test {:value 10}})
diff --git a/test/metabase/query_processor_test.clj b/test/metabase/query_processor_test.clj
index a8ebd8f0bc9d8c1e49d9d79f60a5de4e5eb0ec03..469eb3ddd34ebceed2f402c9c4733371f0a8ff79 100644
--- a/test/metabase/query_processor_test.clj
+++ b/test/metabase/query_processor_test.clj
@@ -94,20 +94,15 @@
 ;; #### categories
 
 (defn- col-defaults []
-  {:extra_info      {}
-   :target          nil
-   :description     nil
+  {:description     nil
    :visibility_type :normal
-   :schema_name     (data/default-schema)
-   :source          :fields
-   :fk_field_id     nil
-   :remapped_from   nil
-   :remapped_to     nil
-   :settings        nil})
+   :settings        nil
+   :parent_id       nil
+   :source          :fields})
 
 (defn- target-field [field]
   (when (data/fks-supported?)
-    (dissoc field :target :extra_info :schema_name :source :fk_field_id :remapped_from :remapped_to :fingerprint)))
+    (dissoc field :target :schema_name :fk_field_id :remapped_from :remapped_to :fingerprint)))
 
 (defn categories-col
   "Return column information for the `categories` column named by keyword COL."
@@ -125,11 +120,12 @@
             :base_type    (data/expected-base-type->actual :type/Text)
             :name         (data/format-name "name")
             :display_name "Name"
-            :fingerprint  {:global {:distinct-count 75}
+            :fingerprint  {:global {:distinct-count 75
+                                    :nil%           0.0}
                            :type   {:type/Text {:percent-json   0.0
                                                 :percent-url    0.0
                                                 :percent-email  0.0
-                                                :average-length 8.333}}}})))
+                                                :average-length 8.33}}}})))
 
 ;; #### users
 (defn users-col
@@ -149,19 +145,21 @@
                   :base_type    (data/expected-base-type->actual :type/Text)
                   :name         (data/format-name "name")
                   :display_name "Name"
-                  :fingerprint  {:global {:distinct-count 15}
+                  :fingerprint  {:global {:distinct-count 15
+                                          :nil%           0.0}
                                  :type   {:type/Text {:percent-json   0.0
                                                       :percent-url    0.0
                                                       :percent-email  0.0
-                                                      :average-length 13.267}}}}
+                                                      :average-length 13.27}}}}
      :last_login {:special_type nil
                   :base_type    (data/expected-base-type->actual :type/DateTime)
                   :name         (data/format-name "last_login")
                   :display_name "Last Login"
                   :unit         :default
-                  :fingerprint  {:global {:distinct-count 11}
-                                 :type   {:type/DateTime {:earliest "2014-01-01T00:00:00.000Z"
-                                                          :latest   "2014-12-05T00:00:00.000Z"}}}})))
+                  :fingerprint  {:global {:distinct-count 15
+                                          :nil%           0.0}
+                                 :type   {:type/DateTime {:earliest "2014-01-01T08:30:00.000Z"
+                                                          :latest   "2014-12-05T15:15:00.000Z"}}}})))
 
 ;; #### venues
 (defn venues-columns
@@ -182,39 +180,46 @@
                    :name         (data/format-name "id")
                    :display_name "ID"
                    :fingerprint  nil}
-     :category_id {:extra_info   (if (data/fks-supported?)
-                                   {:target_table_id (data/id :categories)}
-                                   {})
-                   :target       (target-field (categories-col :id))
-                   :special_type (if (data/fks-supported?)
+     :category_id {:special_type (if (data/fks-supported?)
                                    :type/FK
                                    :type/Category)
                    :base_type    (data/expected-base-type->actual :type/Integer)
                    :name         (data/format-name "category_id")
                    :display_name "Category ID"
                    :fingerprint  (if (data/fks-supported?)
-                                   {:global {:distinct-count 28}}
-                                   {:global {:distinct-count 28}, :type {:type/Number {:min 2.0, :max 74.0, :avg 29.98}}})}
+                                   {:global {:distinct-count 28
+                                             :nil%           0.0}}
+                                   {:global {:distinct-count 28
+                                             :nil%           0.0},
+                                    :type {:type/Number {:min 2.0, :max 74.0, :avg 29.98, :q1 7.0, :q3 49.0 :sd 23.06}}})}
      :price       {:special_type :type/Category
                    :base_type    (data/expected-base-type->actual :type/Integer)
                    :name         (data/format-name "price")
                    :display_name "Price"
-                   :fingerprint  {:global {:distinct-count 4}, :type {:type/Number {:min 1.0, :max 4.0, :avg 2.03}}}}
+                   :fingerprint  {:global {:distinct-count 4
+                                           :nil%           0.0},
+                                  :type {:type/Number {:min 1.0, :max 4.0, :avg 2.03, :q1 1.0, :q3 2.0 :sd 0.77}}}}
      :longitude   {:special_type :type/Longitude
                    :base_type    (data/expected-base-type->actual :type/Float)
                    :name         (data/format-name "longitude")
-                   :fingerprint  {:global {:distinct-count 84}, :type {:type/Number {:min -165.374, :max -73.953, :avg -115.998}}}
+                   :fingerprint  {:global {:distinct-count 84
+                                           :nil%           0.0},
+                                  :type {:type/Number {:min -165.37, :max -73.95, :avg -116.0 :q1 -122.0, :q3 -118.0 :sd 14.16}}}
                    :display_name "Longitude"}
      :latitude    {:special_type :type/Latitude
                    :base_type    (data/expected-base-type->actual :type/Float)
                    :name         (data/format-name "latitude")
                    :display_name "Latitude"
-                   :fingerprint  {:global {:distinct-count 94}, :type {:type/Number {:min 10.065, :max 40.779, :avg 35.506}}}}
+                   :fingerprint  {:global {:distinct-count 94
+                                           :nil%           0.0},
+                                  :type {:type/Number {:min 10.06, :max 40.78, :avg 35.51, :q1 34.0, :q3 38.0 :sd 3.43}}}}
      :name        {:special_type :type/Name
                    :base_type    (data/expected-base-type->actual :type/Text)
                    :name         (data/format-name "name")
                    :display_name "Name"
-                   :fingerprint  {:global {:distinct-count 100}, :type {:type/Text {:percent-json 0.0, :percent-url 0.0, :percent-email 0.0, :average-length 15.63}}}})))
+                   :fingerprint  {:global {:distinct-count 100
+                                           :nil%           0.0},
+                                  :type {:type/Text {:percent-json 0.0, :percent-url 0.0, :percent-email 0.0, :average-length 15.63}}}})))
 
 (defn venues-cols
   "`cols` information for all the columns in `venues`."
@@ -234,30 +239,29 @@
                 :base_type    (data/id-field-type)
                 :name         (data/format-name "id")
                 :display_name "ID"}
-     :venue_id {:extra_info   (if (data/fks-supported?)
-                                {:target_table_id (data/id :venues)}
-                                {})
-                :target       (target-field (venues-col :id))
-                :special_type (when (data/fks-supported?)
+     :venue_id {:special_type (when (data/fks-supported?)
                                 :type/FK)
                 :base_type    (data/expected-base-type->actual :type/Integer)
                 :name         (data/format-name "venue_id")
                 :display_name "Venue ID"
                 :fingerprint  (if (data/fks-supported?)
-                                {:global {:distinct-count 100}}
-                                {:global {:distinct-count 100}, :type {:type/Number {:min 1.0, :max 100.0, :avg 51.965}}})}
-     :user_id  {:extra_info   (if (data/fks-supported?) {:target_table_id (data/id :users)}
-                                  {})
-                :target       (target-field (users-col :id))
-                :special_type (if (data/fks-supported?)
+                                {:global {:distinct-count 100
+                                          :nil%           0.0}}
+                                {:global {:distinct-count 100
+                                          :nil%           0.0},
+                                 :type {:type/Number {:min 1.0, :max 100.0, :avg 51.97, :q1 28.0, :q3 76.0 :sd 28.51}}})}
+     :user_id  {:special_type (if (data/fks-supported?)
                                 :type/FK
                                 :type/Category)
                 :base_type    (data/expected-base-type->actual :type/Integer)
                 :name         (data/format-name "user_id")
                 :display_name "User ID"
                 :fingerprint  (if (data/fks-supported?)
-                                {:global {:distinct-count 15}}
-                                {:global {:distinct-count 15}, :type {:type/Number {:min 1.0, :max 15.0, :avg 7.929}}})})))
+                                {:global {:distinct-count 15
+                                          :nil%           0.0}}
+                                {:global {:distinct-count 15
+                                          :nil%           0.0},
+                                 :type {:type/Number {:min 1.0, :max 15.0, :avg 7.93 :q1 4.0, :q3 11.0 :sd 3.99}}})})))
 
 
 ;;; #### aggregate columns
@@ -269,35 +273,29 @@
     (aggregate-col :count)
     (aggregate-col :avg (venues-col :id))"
   {:arglists '([ag-col-kw] [ag-col-kw field])}
+  ;; TODO - cumulative count doesn't require a FIELD !!!!!!!!!
   ([ag-col-kw]
-   (case ag-col-kw
-     :count {:base_type    :type/Integer
-             :special_type :type/Number
-             :name         "count"
-             :display_name "count"
-             :id           nil
-             :table_id     nil
-             :description  nil
-             :settings     nil
-             :source       :aggregation
-             :extra_info   {}
-             :target       nil}))
+   (assert (= ag-col-kw) :count)
+   {:base_type    :type/Integer
+    :special_type :type/Number
+    :name         "count"
+    :display_name "count"
+    :source       :aggregation})
   ([ag-col-kw {:keys [base_type special_type]}]
    {:pre [base_type special_type]}
-   {:base_type    base_type
-    :special_type special_type
-    :id           nil
-    :table_id     nil
-    :description  nil
-    :settings     nil
-    :source       :aggregation
-    :extra_info   {}
-    :target       nil
-    :name         (name ag-col-kw)
-    :display_name (name ag-col-kw)}))
-
-(defn breakout-col [column]
-  (assoc column :source :breakout))
+   (merge
+    {:base_type    base_type
+     :special_type special_type
+     :settings     nil
+     :name         (name ag-col-kw)
+     :display_name (name ag-col-kw)
+     :source       :aggregation}
+    ;; count always gets the same special type regardless
+    (when (= ag-col-kw :count)
+      (aggregate-col :count)))))
+
+(defn breakout-col [col]
+  (assoc col :source :breakout))
 
 ;; TODO - maybe this needs a new name now that it also removes the results_metadata
 (defn booleanize-native-form
diff --git a/test/metabase/query_processor_test/aggregation_test.clj b/test/metabase/query_processor_test/aggregation_test.clj
index a4620dcf75c210a110d56351c6430df52d8a8efd..258ca94ff0c32aef429062e976b068ab81b0cf76 100644
--- a/test/metabase/query_processor_test/aggregation_test.clj
+++ b/test/metabase/query_processor_test/aggregation_test.clj
@@ -52,7 +52,7 @@
 (qp-expect-with-all-engines
   {:rows        [[15]]
    :columns     ["count"]
-   :cols        [(aggregate-col :count)]
+   :cols        [(aggregate-col :count (Field (data/id :checkins :user_id)))]
    :native_form true}
   (->> (data/run-mbql-query checkins
          {:aggregation [[:distinct $user_id]]})
@@ -159,16 +159,12 @@
 
 ;; make sure that multiple aggregations of the same type have the correct metadata (#4003)
 ;;
-;; (TODO - this isn't tested against Mongo or BigQuery because those drivers don't currently work correctly with
-;; multiple columns with the same name)
-(datasets/expect-with-engines (disj non-timeseries-engines :mongo :bigquery)
+;; TODO - this isn't tested against Mongo because those driver doesn't currently work correctly with multiple columns
+;; with the same name. It seems like it would be pretty easy to take the stuff we have for BigQuery and generalize it
+;; so we can use it with Mongo
+(datasets/expect-with-engines (disj non-timeseries-engines :mongo)
   [(aggregate-col :count)
-   (-> (aggregate-col :count)
-       (dissoc :settings)
-       (assoc
-         :display_name    "Count 2"
-         :name            "count_2"
-         :preview_display true))]
+   (assoc (aggregate-col :count) :name "count_2")]
   (-> (data/run-mbql-query venues
         {:aggregation [[:count] [:count]]})
       :data :cols))
@@ -262,7 +258,8 @@
          {:aggregation [[:cum-sum $id]]
           :breakout    [$price]})
        booleanize-native-form
-       (format-rows-by [int int])))
+       (format-rows-by [int int])
+       tu/round-fingerprint-cols))
 
 
 ;;; ------------------------------------------------ CUMULATIVE COUNT ------------------------------------------------
@@ -328,8 +325,8 @@
          {:aggregation [[:cum-count $id]]
           :breakout    [$price]})
        booleanize-native-form
-       (format-rows-by [int int])))
-
+       (format-rows-by [int int])
+       tu/round-fingerprint-cols))
 
 ;; Does Field.settings show up for aggregate Fields?
 (expect
diff --git a/test/metabase/query_processor_test/breakout_test.clj b/test/metabase/query_processor_test/breakout_test.clj
index fd027c8646af935e5a1d118b6a6a98b95ed9a48f..a1143bfacdcfd2de3f34fbfb8cce7da64c37b899 100644
--- a/test/metabase/query_processor_test/breakout_test.clj
+++ b/test/metabase/query_processor_test/breakout_test.clj
@@ -14,10 +14,10 @@
             [metabase.test
              [data :as data]
              [util :as tu]]
-            [metabase.util.i18n :refer [tru]]
             [metabase.test.data
              [dataset-definitions :as defs]
              [datasets :as datasets]]
+            [metabase.test.util.log :as tu.log]
             [toucan.db :as db]
             [toucan.util.test :as tt]))
 
@@ -34,7 +34,8 @@
           :breakout    [$user_id]
           :order-by    [[:asc $user_id]]})
        booleanize-native-form
-       (format-rows-by [int int])))
+       (format-rows-by [int int])
+       tu/round-fingerprint-cols))
 
 ;;; BREAKOUT w/o AGGREGATION
 ;; This should act as a "distinct values" query and return ordered results
@@ -47,7 +48,8 @@
          {:breakout [$user_id]
           :limit    10})
        booleanize-native-form
-       (format-rows-by [int])))
+       (format-rows-by [int])
+       tu/round-fingerprint-cols))
 
 
 ;;; "BREAKOUT" - MULTIPLE COLUMNS W/ IMPLICT "ORDER_BY"
@@ -66,7 +68,8 @@
           :breakout    [$user_id $venue_id]
           :limit       10})
        booleanize-native-form
-       (format-rows-by [int int int])))
+       (format-rows-by [int int int])
+       tu/round-fingerprint-cols))
 
 ;;; "BREAKOUT" - MULTIPLE COLUMNS W/ EXPLICIT "ORDER_BY"
 ;; `breakout` should not implicitly order by any fields specified in `order-by`
@@ -85,7 +88,8 @@
           :order-by    [[:desc $user_id]]
           :limit       10})
        booleanize-native-form
-       (format-rows-by [int int int])))
+       (format-rows-by [int int int])
+       tu/round-fingerprint-cols))
 
 (qp-expect-with-all-engines
   {:rows        [[2 8 "Artisan"]
@@ -115,7 +119,8 @@
             :breakout    [$category_id]
             :limit       5})
          booleanize-native-form
-         (format-rows-by [int int str]))))
+         (format-rows-by [int int str])
+         tu/round-fingerprint-cols)))
 
 (datasets/expect-with-engines (non-timeseries-engines-with-feature :foreign-keys)
   [["Wine Bar" "Thai" "Thai" "Thai" "Thai" "Steakhouse" "Steakhouse" "Steakhouse" "Steakhouse" "Southern"]
@@ -214,9 +219,7 @@
 ;;Validate binning info is returned with the binning-strategy
 (datasets/expect-with-engines (non-timeseries-engines-with-feature :binning)
   (assoc (breakout-col (venues-col :latitude))
-    :binning_info {:binning_strategy :bin-width, :bin_width 10.0,
-                   :num_bins         4,          :min_value 10.0
-                   :max_value        50.0})
+    :binning_info {:min_value 10.0, :max_value 50.0, :num_bins 4, :bin_width 10.0, :binning_strategy :bin-width})
   (-> (data/run-mbql-query venues
         {:aggregation [[:count]]
          :breakout    [[:binning-strategy $latitude :default]]})
@@ -226,9 +229,7 @@
 
 (datasets/expect-with-engines (non-timeseries-engines-with-feature :binning)
   (assoc (breakout-col (venues-col :latitude))
-    :binning_info {:binning_strategy :num-bins, :bin_width 7.5,
-                   :num_bins         5,         :min_value 7.5,
-                   :max_value        45.0})
+    :binning_info {:min_value 7.5, :max_value 45.0, :num_bins 5, :bin_width 7.5, :binning_strategy :num-bins})
   (-> (data/run-mbql-query venues
         {:aggregation [[:count]]
          :breakout    [[:binning-strategy $latitude :num-bins 5]]})
@@ -242,9 +243,10 @@
    :class  Exception
    :error  "Unable to bin Field without a min/max value"}
   (tu/with-temp-vals-in-db Field (data/id :venues :latitude) {:fingerprint {:type {:type/Number {:min nil, :max nil}}}}
-    (-> (data/run-mbql-query venues
-          {:aggregation [[:count]]
-           :breakout    [[:binning-strategy $latitude :default]]})
+    (-> (tu.log/suppress-output
+          (data/run-mbql-query venues
+            {:aggregation [[:count]]
+             :breakout    [[:binning-strategy $latitude :default]]}))
         (select-keys [:status :class :error]))))
 
 (defn- field->result-metadata [field]
@@ -272,9 +274,22 @@
 ;; Binning is not supported when there is no fingerprint to determine boundaries
 (datasets/expect-with-engines (non-timeseries-engines-with-feature :binning :nested-queries)
   Exception
-  (tt/with-temp Card [card {:dataset_query {:database (data/id)
-                                            :type     :query
-                                            :query    {:source-query {:source-table (data/id :venues)}}}}]
-    (-> (nested-venues-query card)
-        qp/process-query
-        rows)))
+  (tu.log/suppress-output
+    (tt/with-temp Card [card {:dataset_query {:database (data/id)
+                                              :type     :query
+                                              :query    {:source-query {:source-table (data/id :venues)}}}}]
+      (-> (nested-venues-query card)
+          qp/process-query
+          rows))))
+
+;; if we include a Field in both breakout and fields, does the query still work? (Normalization should be taking care
+;; of this) (#8760)
+(expect-with-non-timeseries-dbs
+  :completed
+  (-> (qp/process-query
+        {:database (data/id)
+         :type     :query
+         :query    {:source-table (data/id :venues)
+                    :breakout     [[:field-id (data/id :venues :price)]]
+                    :fields       [["field_id" (data/id :venues :price)]]}})
+      :status))
diff --git a/test/metabase/query_processor_test/date_bucketing_test.clj b/test/metabase/query_processor_test/date_bucketing_test.clj
index e9bff4bc20f1976b793791325cfdbb492faa24f0..084c73570ffe259ee3b9f295f0b03b98e4260486 100644
--- a/test/metabase/query_processor_test/date_bucketing_test.clj
+++ b/test/metabase/query_processor_test/date_bucketing_test.clj
@@ -35,13 +35,14 @@
     (long x)
     x))
 
-(defn- oracle-or-redshift?
-  "We currently have a bug in how report-timezone is used in Oracle. The timeone is applied correctly, but the date
-  operations that we use aren't using that timezone. It's written up as
-  https://github.com/metabase/metabase/issues/5789. This function is used to differentiate Oracle from the other
-  report-timezone databases until that bug can get fixed. Redshift also has this issue."
+(defn tz-shifted-engine-bug?
+  "Returns true if `engine` is affected by the bug originally observed in
+  Oracle (https://github.com/metabase/metabase/issues/5789) but later found in Redshift and Snowflake. The timezone is
+  applied correctly, but the date operations that we use aren't using that timezone. This function is used to
+  differentiate Oracle from the other report-timezone databases until that bug can get fixed. Redshift and Snowflake
+  also have this issue."
   [engine]
-  (contains? #{:oracle :redshift} engine))
+  (contains? #{:snowflake :oracle :redshift} engine))
 
 (defn- sad-toucan-incidents-with-bucketing
   "Returns 10 sad toucan incidents grouped by `UNIT`"
@@ -128,7 +129,7 @@
     (sad-toucan-result (source-date-formatter utc-tz) result-date-formatter-without-tz)
 
     ;; There's a bug here where we are reading in the UTC time as pacific, so we're 7 hours off
-    (oracle-or-redshift? *engine*)
+    (tz-shifted-engine-bug? *engine*)
     (sad-toucan-result (source-date-formatter pacific-tz) (result-date-formatter pacific-tz))
 
     ;; When the reporting timezone is applied, the same datetime value is returned, but set in the pacific timezone
@@ -148,7 +149,7 @@
     (contains? #{:sqlite :crate} *engine*)
     (sad-toucan-result (source-date-formatter utc-tz) result-date-formatter-without-tz)
 
-    (oracle-or-redshift? *engine*)
+    (tz-shifted-engine-bug? *engine*)
     (sad-toucan-result (source-date-formatter eastern-tz) (result-date-formatter eastern-tz))
 
     ;; The time instant is the same as UTC (or pacific) but should be offset by the eastern timezone
@@ -172,7 +173,7 @@
     (contains? #{:sqlite :crate} *engine*)
     (sad-toucan-result (source-date-formatter utc-tz) result-date-formatter-without-tz)
 
-    (oracle-or-redshift? *engine*)
+    (tz-shifted-engine-bug? *engine*)
     (sad-toucan-result (source-date-formatter eastern-tz) (result-date-formatter eastern-tz))
 
     ;; The JVM timezone should have no impact on a database that uses a report timezone
@@ -197,7 +198,7 @@
     (contains? #{:sqlite :crate} *engine*)
     (sad-toucan-result (source-date-formatter utc-tz) result-date-formatter-without-tz)
 
-    (oracle-or-redshift? *engine*)
+    (tz-shifted-engine-bug? *engine*)
     (sad-toucan-result (source-date-formatter pacific-tz) (result-date-formatter pacific-tz))
 
     (supports-report-timezone? *engine*)
@@ -261,7 +262,7 @@
     (results-by-hour (source-date-formatter utc-tz)
                      result-date-formatter-without-tz)
 
-    (oracle-or-redshift? *engine*)
+    (tz-shifted-engine-bug? *engine*)
     (results-by-hour (source-date-formatter pacific-tz) (result-date-formatter pacific-tz))
 
     (supports-report-timezone? *engine*)
@@ -283,7 +284,7 @@
 ;; first three results of the pacific results to the last three of the
 ;; UTC results (i.e. pacific is 7 hours back of UTC at that time)
 (expect-with-non-timeseries-dbs
-  (if (and (not (oracle-or-redshift? *engine*))
+  (if (and (not (tz-shifted-engine-bug? *engine*))
            (supports-report-timezone? *engine*))
     [[0 8] [1 9] [2 7] [3 10] [4 10] [5 9] [6 6] [7 5] [8 7] [9 7]]
     [[0 13] [1 8] [2 4] [3 7] [4 5] [5 13] [6 10] [7 8] [8 9] [9 7]])
@@ -387,7 +388,7 @@
                     date-formatter-without-time
                     [6 10 4 9 9 8 8 9 7 9])
 
-    (oracle-or-redshift? *engine*)
+    (tz-shifted-engine-bug? *engine*)
     (results-by-day (tformat/with-zone date-formatter-without-time pacific-tz)
                     (result-date-formatter pacific-tz)
                     [6 10 4 9 9 8 8 9 7 9])
@@ -420,7 +421,7 @@
                     date-formatter-without-time
                     [6 10 4 9 9 8 8 9 7 9])
 
-    (oracle-or-redshift? *engine*)
+    (tz-shifted-engine-bug? *engine*)
     (results-by-day (tformat/with-zone date-formatter-without-time eastern-tz)
                     (result-date-formatter eastern-tz)
                     [6 10 4 9 9 8 8 9 7 9])
@@ -453,7 +454,7 @@
                     date-formatter-without-time
                     [6 10 4 9 9 8 8 9 7 9])
 
-    (oracle-or-redshift? *engine*)
+    (tz-shifted-engine-bug? *engine*)
     (results-by-day (tformat/with-zone date-formatter-without-time pacific-tz)
                     (result-date-formatter pacific-tz)
                     [6 10 4 9 9 8 8 9 7 9])
@@ -478,7 +479,7 @@
 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
 
 (expect-with-non-timeseries-dbs
-  (if (and (not (oracle-or-redshift? *engine*))
+  (if (and (not (tz-shifted-engine-bug? *engine*))
            (supports-report-timezone? *engine*))
     [[1 29] [2 36] [3 33] [4 29] [5 13] [6 38] [7 22]]
     [[1 28] [2 38] [3 29] [4 27] [5 24] [6 30] [7 24]])
@@ -495,7 +496,7 @@
 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
 
 (expect-with-non-timeseries-dbs
-  (if (and (not (oracle-or-redshift? *engine*))
+  (if (and (not (tz-shifted-engine-bug? *engine*))
            (supports-report-timezone? *engine*))
     [[1 8] [2 9] [3 9] [4 4] [5 11] [6 8] [7 6] [8 10] [9 6] [10 10]]
     [[1 6] [2 10] [3 4] [4 9] [5  9] [6 8] [7 8] [8  9] [9 7] [10  9]])
@@ -512,7 +513,7 @@
 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
 
 (expect-with-non-timeseries-dbs
-  (if (and (not (oracle-or-redshift? *engine*))
+  (if (and (not (tz-shifted-engine-bug? *engine*))
            (supports-report-timezone? *engine*))
     [[152 8] [153 9] [154 9] [155 4] [156 11] [157 8] [158 6] [159 10] [160 6] [161 10]]
     [[152 6] [153 10] [154 4] [155 9] [156  9] [157  8] [158 8] [159  9] [160 7] [161  9]])
@@ -582,7 +583,7 @@
                      date-formatter-without-time
                      [46 47 40 60 7])
 
-    (oracle-or-redshift? *engine*)
+    (tz-shifted-engine-bug? *engine*)
     (results-by-week (tformat/with-zone date-formatter-without-time pacific-tz)
                      (result-date-formatter pacific-tz)
                      [46 47 40 60 7])
@@ -614,7 +615,7 @@
                      date-formatter-without-time
                      [46 47 40 60 7])
 
-    (oracle-or-redshift? *engine*)
+    (tz-shifted-engine-bug? *engine*)
     (results-by-week (tformat/with-zone date-formatter-without-time eastern-tz)
                      (result-date-formatter eastern-tz)
                      [46 47 40 60 7])
@@ -643,7 +644,7 @@
                      date-formatter-without-time
                      [46 47 40 60 7])
 
-    (oracle-or-redshift? *engine*)
+    (tz-shifted-engine-bug? *engine*)
     (results-by-week (tformat/with-zone date-formatter-without-time pacific-tz)
                      (result-date-formatter pacific-tz)
                      [46 47 40 60 7])
@@ -669,6 +670,8 @@
 (expect-with-non-timeseries-dbs
   ;; Not really sure why different drivers have different opinions on these </3
   (cond
+    (= :snowflake *engine*)
+    [[22 46] [23 47] [24 40] [25 60] [26 7]]
 
     (contains? #{:sqlserver :sqlite :crate :oracle :sparksql} *engine*)
     [[23 54] [24 46] [25 39] [26 61]]
@@ -807,29 +810,29 @@
                          (cons :relative-datetime relative-datetime-args)]}))
       first-row first int))
 
-;; HACK - Don't run these tests against BigQuery because the databases need to be loaded every time the tests are ran
-;;        and loading data into BigQuery is mind-bogglingly slow. Don't worry, I promise these work though!
+;; HACK - Don't run these tests against BigQuery/etc. because the databases need to be loaded every time the tests are ran
+;;        and loading data into BigQuery/etc. is mind-bogglingly slow. Don't worry, I promise these work though!
 
 ;; Don't run the minute tests against Oracle because the Oracle tests are kind of slow and case CI to fail randomly
 ;; when it takes so long to load the data that the times are no longer current (these tests pass locally if your
 ;; machine isn't as slow as the CircleCI ones)
-(expect-with-non-timeseries-dbs-except #{:bigquery :oracle} 4 (count-of-grouping (checkins:4-per-minute) :minute "current"))
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery :oracle} 4 (count-of-grouping (checkins:4-per-minute) :minute "current"))
 
-(expect-with-non-timeseries-dbs-except #{:bigquery :oracle} 4 (count-of-grouping (checkins:4-per-minute) :minute -1 "minute"))
-(expect-with-non-timeseries-dbs-except #{:bigquery :oracle} 4 (count-of-grouping (checkins:4-per-minute) :minute  1 "minute"))
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery :oracle} 4 (count-of-grouping (checkins:4-per-minute) :minute -1 "minute"))
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery :oracle} 4 (count-of-grouping (checkins:4-per-minute) :minute  1 "minute"))
 
-(expect-with-non-timeseries-dbs-except #{:bigquery} 4 (count-of-grouping (checkins:4-per-hour) :hour "current"))
-(expect-with-non-timeseries-dbs-except #{:bigquery} 4 (count-of-grouping (checkins:4-per-hour) :hour -1 "hour"))
-(expect-with-non-timeseries-dbs-except #{:bigquery} 4 (count-of-grouping (checkins:4-per-hour) :hour  1 "hour"))
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery} 4 (count-of-grouping (checkins:4-per-hour) :hour "current"))
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery} 4 (count-of-grouping (checkins:4-per-hour) :hour -1 "hour"))
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery} 4 (count-of-grouping (checkins:4-per-hour) :hour  1 "hour"))
 
-(expect-with-non-timeseries-dbs-except #{:bigquery} 1 (count-of-grouping (checkins:1-per-day) :day "current"))
-(expect-with-non-timeseries-dbs-except #{:bigquery} 1 (count-of-grouping (checkins:1-per-day) :day -1 "day"))
-(expect-with-non-timeseries-dbs-except #{:bigquery} 1 (count-of-grouping (checkins:1-per-day) :day  1 "day"))
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery} 1 (count-of-grouping (checkins:1-per-day) :day "current"))
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery} 1 (count-of-grouping (checkins:1-per-day) :day -1 "day"))
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery} 1 (count-of-grouping (checkins:1-per-day) :day  1 "day"))
 
-(expect-with-non-timeseries-dbs-except #{:bigquery} 7 (count-of-grouping (checkins:1-per-day) :week "current"))
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery} 7 (count-of-grouping (checkins:1-per-day) :week "current"))
 
 ;; SYNTACTIC SUGAR
-(expect-with-non-timeseries-dbs-except #{:bigquery}
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery}
   1
   (-> (data/with-temp-db [_ (checkins:1-per-day)]
         (data/run-mbql-query checkins
@@ -837,7 +840,7 @@
            :filter      [:time-interval $timestamp :current :day]}))
       first-row first int))
 
-(expect-with-non-timeseries-dbs-except #{:bigquery}
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery}
   7
   (-> (data/with-temp-db [_ (checkins:1-per-day)]
         (data/run-mbql-query checkins
@@ -859,32 +862,32 @@
                (throw (ex-info "Query failed!" results)))
      :unit (-> results :data :cols first :unit)}))
 
-(expect-with-non-timeseries-dbs-except #{:bigquery}
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery}
   {:rows 1, :unit :day}
   (date-bucketing-unit-when-you :breakout-by "day", :filter-by "day"))
 
-(expect-with-non-timeseries-dbs-except #{:bigquery}
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery}
   {:rows 7, :unit :day}
   (date-bucketing-unit-when-you :breakout-by "day", :filter-by "week"))
 
-(expect-with-non-timeseries-dbs-except #{:bigquery}
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery}
   {:rows 1, :unit :week}
   (date-bucketing-unit-when-you :breakout-by "week", :filter-by "day"))
 
-(expect-with-non-timeseries-dbs-except #{:bigquery}
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery}
   {:rows 1, :unit :quarter}
   (date-bucketing-unit-when-you :breakout-by "quarter", :filter-by "day"))
 
-(expect-with-non-timeseries-dbs-except #{:bigquery}
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery}
   {:rows 1, :unit :hour}
   (date-bucketing-unit-when-you :breakout-by "hour", :filter-by "day"))
 
 ;; make sure if you use a relative date bucket in the past (e.g. "past 2 months") you get the correct amount of rows
 ;; (#3910)
-(expect-with-non-timeseries-dbs-except #{:bigquery}
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery}
   {:rows 2, :unit :day}
   (date-bucketing-unit-when-you :breakout-by "day", :filter-by "day", :with-interval -2))
 
-(expect-with-non-timeseries-dbs-except #{:bigquery}
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery}
   {:rows 2, :unit :day}
   (date-bucketing-unit-when-you :breakout-by "day", :filter-by "day", :with-interval 2))
diff --git a/test/metabase/query_processor_test/expressions_test.clj b/test/metabase/query_processor_test/expressions_test.clj
index e40b776ca2cd655b0bbf4b8714ae43fe47487fc4..7982c9948ac9bc25b0e7c571634bb32af84f3573 100644
--- a/test/metabase/query_processor_test/expressions_test.clj
+++ b/test/metabase/query_processor_test/expressions_test.clj
@@ -86,9 +86,11 @@
 ;; Custom aggregation expressions should include their type
 (datasets/expect-with-engines (non-timeseries-engines-with-feature :expressions)
   (conj #{{:name "x" :base_type :type/Float}}
-        (if (= datasets/*engine* :oracle)
-          {:name (data/format-name "category_id") :base_type :type/Decimal}
-          {:name (data/format-name "category_id") :base_type :type/Integer}))
+        {:name      (data/format-name "category_id")
+         :base_type (case datasets/*engine*
+                      :oracle    :type/Decimal
+                      :snowflake :type/Number
+                      :type/Integer)})
   (set (map #(select-keys % [:name :base_type])
             (-> (data/run-mbql-query venues
                   {:aggregation [:named [:sum [:* $price -1]] "x"]
diff --git a/test/metabase/query_processor_test/failure_test.clj b/test/metabase/query_processor_test/failure_test.clj
new file mode 100644
index 0000000000000000000000000000000000000000..6bec2791803c0e3571a06e251d6aea7d26fe1304
--- /dev/null
+++ b/test/metabase/query_processor_test/failure_test.clj
@@ -0,0 +1,63 @@
+(ns metabase.query-processor-test.failure-test
+  "Tests for how the query processor as a whole handles failures."
+  (:require [expectations :refer [expect]]
+            [metabase.query-processor :as qp]
+            [metabase.test.data :as data]
+            [metabase.query-processor.interface :as qp.i])
+  (:import metabase.driver.h2.H2Driver))
+
+(defn- bad-query []
+  {:database (data/id)
+   :type     :query
+   :query    {:source-table (data/id :venues)
+              :fields       [["datetime_field" (data/id :venues :id) "MONTH"]]}})
+
+(defn- bad-query:preprocessed []
+  {:database (data/id)
+   :type     :query
+   :query    {:source-table (data/id :venues)
+              :fields       [[:datetime-field [:field-id (data/id :venues :id)] :month]]
+              :limit        qp.i/absolute-max-results}
+   :driver   (H2Driver.)
+   :settings {}})
+
+(def ^:private bad-query:native
+  {:query  (str "SELECT parsedatetime(formatdatetime(\"PUBLIC\".\"VENUES\".\"ID\", 'yyyyMM'), 'yyyyMM') AS \"ID\" "
+                "FROM \"PUBLIC\".\"VENUES\" "
+                "LIMIT 1048576")
+   :params nil})
+
+(def ^:private ^{:arglists '([stacktrace])} valid-stacktrace? (every-pred seq (partial every? (every-pred string? seq))))
+
+;; running a bad query via `process-query` should return stacktrace, query, preprocessed query, and native query
+(expect
+  {:status       :failed
+   :class        java.util.concurrent.ExecutionException
+   :error        true
+   :stacktrace   true
+   ;; `:database` is removed by the catch-exceptions middleware for historical reasons
+   :query        (dissoc (bad-query) :database)
+   :preprocessed (bad-query:preprocessed)
+   :native       bad-query:native}
+  (-> (qp/process-query (bad-query))
+      (update :error (every-pred string? seq))
+      (update :stacktrace valid-stacktrace?)))
+
+;; running via `process-query-and-save-execution!` should return similar info and a bunch of other nonsense too
+(expect
+  {:started_at   true
+   :json_query   (bad-query)
+   :native       bad-query:native
+   :status       :failed
+   :stacktrace   true
+   :context      :question
+   :error        true
+   :row_count    0
+   :running_time true
+   :preprocessed (bad-query:preprocessed)
+   :data         {:rows [], :cols [], :columns []}}
+  (-> (qp/process-query-and-save-execution! (bad-query) {:context :question})
+      (update :error (every-pred string? seq))
+      (update :started_at (partial instance? java.util.Date))
+      (update :stacktrace valid-stacktrace?)
+      (update :running_time (complement neg?))))
diff --git a/test/metabase/query_processor_test/filter_test.clj b/test/metabase/query_processor_test/filter_test.clj
index f3451f446810cf3590530771881d67524ec93c43..184ee0aba0e9887c14c228219060ea5603c35ab6 100644
--- a/test/metabase/query_processor_test/filter_test.clj
+++ b/test/metabase/query_processor_test/filter_test.clj
@@ -1,7 +1,9 @@
 (ns metabase.query-processor-test.filter-test
   "Tests for the `:filter` clause."
   (:require [metabase.query-processor-test :refer :all]
-            [metabase.test.data :as data]
+            [metabase.test
+             [data :as data]
+             [util :as tu]]
             [metabase.test.data.datasets :as datasets]))
 
 ;;; ------------------------------------------------ "FILTER" CLAUSE -------------------------------------------------
@@ -92,11 +94,14 @@
    :columns     ["count"]
    :cols        [(aggregate-col :count)]
    :native_form true}
-  (->> (data/run-mbql-query checkins
-         {:aggregation [[:count]]
-          :filter      [:between $date "2015-04-01" "2015-05-01"]})
-       booleanize-native-form
-       (format-rows-by [int])))
+  (do
+    ;; Prevent an issue with Snowflake were a previous connection's report-timezone setting can affect this test's results
+    (when (= :snowflake datasets/*engine*) (tu/clear-connection-pool (data/id)))
+    (->> (data/run-mbql-query checkins
+           {:aggregation [[:count]]
+            :filter      [:between [:datetime-field $date :day] "2015-04-01" "2015-05-01"]})
+         booleanize-native-form
+         (format-rows-by [int]))))
 
 ;;; FILTER -- "OR", "<=", "="
 (expect-with-non-timeseries-dbs
@@ -143,9 +148,7 @@
                             {:aggregation [[:count]]
                              :filter      [:is-null $date]}))]
     ;; Some DBs like Mongo don't return any results at all in this case, and there's no easy workaround
-    (or (= result [0])
-        (= result [0M])
-        (nil? result))))
+    (contains? #{[0] [0M] [nil] nil} result)))
 
 
 ;;; +----------------------------------------------------------------------------------------------------------------+
@@ -283,8 +286,6 @@
 ;; equivalent expressions but I already wrote them so in this case it doesn't hurt to have a little more test coverage
 ;; than we need
 ;;
-;; TODO - maybe it makes sense to have a separate namespace to test the Query eXpander so we don't need to run all
-;; these extra queries?
 
 ;;; =
 (expect-with-non-timeseries-dbs
diff --git a/test/metabase/query_processor_test/nested_queries_test.clj b/test/metabase/query_processor_test/nested_queries_test.clj
index 0522e0f4bbf3bae2dfe5faf82fd88bb352f35d32..a7f6705523c6cbc6b1cc8619c86c37a4c1307cae 100644
--- a/test/metabase/query_processor_test/nested_queries_test.clj
+++ b/test/metabase/query_processor_test/nested_queries_test.clj
@@ -23,6 +23,7 @@
              [util :as tu]]
             [metabase.test.data
              [dataset-definitions :as defs]
+             [generic-sql :as sql.test]
              [datasets :as datasets]
              [users :refer [create-users-if-needed! user->client]]]
             [toucan.db :as db]
@@ -62,26 +63,6 @@
                                    :limit        10}
                     :limit        5}}))))
 
-;; TODO - `identifier`, `quoted-identifier` might belong in some sort of shared util namespace
-(defn- identifier
-  "Return a properly formatted *UNQUOTED* identifier for a Table or Field.
-  (This handles DBs like H2 who require uppercase identifiers, or databases like Redshift do clever hacks
-   like prefixing table names with a unique schema for each test run because we're not
-   allowed to create new databases.)"
-  (^String [table-kw]
-   (let [{schema :schema, table-name :name} (db/select-one [Table :name :schema] :id (data/id table-kw))]
-     (name (hsql/qualify schema table-name))))
-  (^String [table-kw field-kw]
-   (db/select-one-field :name Field :id (data/id table-kw field-kw))))
-
-(defn- quote-identifier [identifier]
-  (first (hsql/format (keyword identifier)
-           :quoting (generic-sql/quote-style datasets/*driver*))))
-
-(def ^:private ^{:arglists '([table-kw] [table-kw field-kw])} ^String quoted-identifier
-  "Return a *QUOTED* identifier for a Table or Field. (This behaves just like `identifier`, but quotes the result)."
-  (comp quote-identifier identifier))
-
 ;; make sure we can do a basic query with a SQL source-query
 (datasets/expect-with-engines (non-timeseries-engines-with-feature :nested-queries)
   {:rows [[1 -165.374  4 3 "Red Medicine"                 10.0646]
@@ -89,10 +70,11 @@
           [3 -118.428 11 2 "The Apple Pan"                34.0406]
           [4 -118.465 29 2 "Wurstküche"                   33.9997]
           [5 -118.261 20 2 "Brite Spot Family Restaurant" 34.0778]]
-   :cols [{:name "id",          :base_type :type/Integer}
+   ;; Oracle doesn't have Integer types, they always come back as DECIMAL
+   :cols [{:name "id",          :base_type (case datasets/*engine* :oracle :type/Decimal :type/Integer)}
           {:name "longitude",   :base_type :type/Float}
-          {:name "category_id", :base_type (data/expected-base-type->actual :type/Integer)}
-          {:name "price",       :base_type (data/expected-base-type->actual :type/Integer)}
+          {:name "category_id", :base_type (case datasets/*engine* :oracle :type/Decimal :type/Integer)}
+          {:name "price",       :base_type (case datasets/*engine* :oracle :type/Decimal :type/Integer)}
           {:name "name",        :base_type :type/Text}
           {:name "latitude",    :base_type :type/Float}]}
   (format-rows-by [int (partial u/round-to-decimals 4) int int str (partial u/round-to-decimals 4)]
@@ -100,19 +82,20 @@
       (qp/process-query
         {:database (data/id)
          :type     :query
-         :query    {:source-query {:native (format "SELECT %s, %s, %s, %s, %s, %s FROM %s"
-                                                   (quoted-identifier :venues :id)
-                                                   (quoted-identifier :venues :longitude)
-                                                   (quoted-identifier :venues :category_id)
-                                                   (quoted-identifier :venues :price)
-                                                   (quoted-identifier :venues :name)
-                                                   (quoted-identifier :venues :latitude)
-                                                   (quoted-identifier :venues))}
+         :query    {:source-query {:native (:query
+                                            (qp/query->native
+                                              (data/mbql-query venues
+                                                {:fields [[:field-id $id]
+                                                          [:field-id $longitude]
+                                                          [:field-id $category_id]
+                                                          [:field-id $price]
+                                                          [:field-id $name]
+                                                          [:field-id $latitude]]})))}
                     :order-by     [[:asc [:field-literal (data/format-name :id) :type/Integer]]]
                     :limit        5}}))))
 
 
-(def ^:private ^:const breakout-results
+(def ^:private breakout-results
   {:rows [[1 22]
           [2 59]
           [3 13]
@@ -197,13 +180,13 @@
 
 ;; make sure we can do a query with breakout and aggregation using a SQL source query
 (datasets/expect-with-engines (non-timeseries-engines-with-feature :nested-queries)
-  breakout-results
+   breakout-results
   (rows+cols
     (format-rows-by [int int]
       (qp/process-query
         {:database (data/id)
          :type     :query
-         :query    {:source-query {:native (format "SELECT * FROM %s" (quoted-identifier :venues))}
+         :query    {:source-query {:native (:query (qp/query->native (data/mbql-query venues)))}
                     :aggregation  [:count]
                     :breakout     [[:field-literal (keyword (data/format-name :price)) :type/Integer]]}}))))
 
@@ -333,7 +316,7 @@
 ;; e.g. the ORDER BY in the source-query should refer the 'stddev' aggregation, NOT the 'avg' aggregation
 (expect
   {:query (str "SELECT avg(\"stddev\") AS \"avg\" FROM ("
-                   "SELECT STDDEV(\"PUBLIC\".\"VENUES\".\"ID\") AS \"stddev\", \"PUBLIC\".\"VENUES\".\"PRICE\" AS \"PRICE\" "
+                   "SELECT \"PUBLIC\".\"VENUES\".\"PRICE\" AS \"PRICE\", stddev(\"PUBLIC\".\"VENUES\".\"ID\") AS \"stddev\" "
                    "FROM \"PUBLIC\".\"VENUES\" "
                    "GROUP BY \"PUBLIC\".\"VENUES\".\"PRICE\" "
                    "ORDER BY \"stddev\" DESC, \"PUBLIC\".\"VENUES\".\"PRICE\" ASC"
@@ -351,12 +334,13 @@
 (def ^:private ^:const ^String venues-source-with-category-sql
   (str "(SELECT \"PUBLIC\".\"VENUES\".\"ID\" AS \"ID\", \"PUBLIC\".\"VENUES\".\"NAME\" AS \"NAME\", "
        "\"PUBLIC\".\"VENUES\".\"CATEGORY_ID\" AS \"CATEGORY_ID\", \"PUBLIC\".\"VENUES\".\"LATITUDE\" AS \"LATITUDE\", "
-       "\"PUBLIC\".\"VENUES\".\"LONGITUDE\" AS \"LONGITUDE\", \"PUBLIC\".\"VENUES\".\"PRICE\" AS \"PRICE\", \"category_id\" AS \"category_id\" "
+       "\"PUBLIC\".\"VENUES\".\"LONGITUDE\" AS \"LONGITUDE\", \"PUBLIC\".\"VENUES\".\"PRICE\" AS \"PRICE\" "
        "FROM \"PUBLIC\".\"VENUES\") \"source\""))
 
 ;; make sure that we handle [field-id [field-literal ...]] forms gracefully, despite that not making any sense
 (expect
-  {:query  (format "SELECT \"category_id\" AS \"category_id\" FROM %s GROUP BY \"category_id\" ORDER BY \"category_id\" ASC LIMIT 10" venues-source-with-category-sql)
+  {:query  (format "SELECT \"category_id\" FROM %s GROUP BY \"category_id\" ORDER BY \"category_id\" ASC LIMIT 10"
+                   venues-source-with-category-sql)
    :params nil}
   (qp/query->native
     {:database (data/id)
@@ -403,51 +387,47 @@
        :query    {:source-table (str "card__" (u/get-id card))}})))
 
 (defn results-metadata {:style/indent 0} [results]
+  (when (= :failed (:status results))
+    (throw (ex-info "No results metadata." results)))
   (for [col (get-in results [:data :cols])]
-    (u/select-non-nil-keys col [:base_type :display_name :id :name :source :special_type :table_id :unit :datetime-unit])))
+    (u/select-non-nil-keys col [:base_type :display_name :id :name :special_type :table_id :unit :datetime-unit])))
 
 ;; make sure a query using a source query comes back with the correct columns metadata
 (expect
   [{:base_type    :type/BigInteger
     :display_name "ID"
-    :id           [:field-literal "ID" :type/BigInteger]
+    :id           (data/id :venues :id)
     :name         "ID"
-    :source       :fields
     :special_type :type/PK
     :table_id     (data/id :venues)}
    {:base_type    :type/Text
     :display_name "Name"
-    :id           [:field-literal "NAME" :type/Text]
+    :id           (data/id :venues :name)
     :name         "NAME"
-    :source       :fields
     :special_type :type/Name
     :table_id     (data/id :venues)}
    {:base_type    :type/Integer
     :display_name "Category ID"
-    :id           [:field-literal "CATEGORY_ID" :type/Integer]
+    :id           (data/id :venues :category_id)
     :name         "CATEGORY_ID"
-    :source       :fields
     :special_type :type/FK
     :table_id     (data/id :venues)}
    {:base_type    :type/Float
     :display_name "Latitude"
-    :id           [:field-literal "LATITUDE" :type/Float]
+    :id           (data/id :venues :latitude)
     :name         "LATITUDE"
-    :source       :fields
     :special_type :type/Latitude
     :table_id     (data/id :venues)}
    {:base_type    :type/Float
     :display_name "Longitude"
-    :id           [:field-literal "LONGITUDE" :type/Float]
+    :id           (data/id :venues :longitude)
     :name         "LONGITUDE"
-    :source       :fields
     :special_type :type/Longitude
     :table_id     (data/id :venues)}
    {:base_type    :type/Integer
     :display_name "Price"
-    :id           [:field-literal "PRICE" :type/Integer]
+    :id           (data/id :venues :price)
     :name         "PRICE"
-    :source       :fields
     :special_type :type/Category
     :table_id     (data/id :venues)}]
   (-> (tt/with-temp Card [card (venues-mbql-card-def)]
@@ -457,14 +437,11 @@
 ;; make sure a breakout/aggregate query using a source query comes back with the correct columns metadata
 (expect
   [{:base_type    :type/Text
-    :id           [:field-literal "PRICE" :type/Text]
     :name         "PRICE"
-    :display_name "Price"
-    :source       :breakout}
+    :display_name "Price"}
    {:base_type    :type/Integer
     :display_name "count"
     :name         "count"
-    :source       :aggregation
     :special_type :type/Number}]
   (-> (tt/with-temp Card [card (venues-mbql-card-def)]
         (qp/process-query (query-with-source-card card
@@ -476,14 +453,11 @@
 (expect
   [{:base_type    :type/DateTime
     :display_name "Date"
-    :id           [:field-literal "DATE" :type/DateTime]
     :name         "DATE"
-    :source       :breakout
     :unit         :day}
    {:base_type    :type/Integer
     :display_name "count"
     :name         "count"
-    :source       :aggregation
     :special_type :type/Number}]
   (-> (tt/with-temp Card [card {:dataset_query {:database (data/id)
                                                 :type     :native
@@ -495,17 +469,16 @@
 
 ;; make sure when doing a nested query we give you metadata that would suggest you should be able to break out a *YEAR*
 (expect
-  [{:base_type    :type/Text
+  [{:base_type    :type/Date
     :display_name "Date"
-    :id           [:field-literal "DATE" :type/Text]
+    :id           (data/id :checkins :date)
     :name         "DATE"
-    :source       :breakout
-    :table_id     (data/id :checkins)}
+    :table_id     (data/id :checkins)
+    :unit         :year}
    {:base_type    :type/Integer
-    :display_name "Count"
-    :id           [:field-literal :count :type/Integer]
+    :display_name "count"
     :name         "count"
-    :source       :fields}]
+    :special_type :type/Number}]
   (-> (tt/with-temp Card [card (mbql-card-def
                                  :source-table (data/id :checkins)
                                  :aggregation  [[:count]]
@@ -513,6 +486,9 @@
         (qp/process-query (query-with-source-card card)))
       results-metadata))
 
+(defn- identifier [table-kw field-kw]
+  (db/select-one-field :name Field :id (data/id table-kw field-kw)))
+
 ;; make sure using a time interval filter works
 (datasets/expect-with-engines (non-timeseries-engines-with-feature :nested-queries)
   :completed
diff --git a/test/metabase/query_processor_test/order_by_test.clj b/test/metabase/query_processor_test/order_by_test.clj
index 97babc3e6a0f80b99c74e6d4c0fb838419a50040..d3edcadb6a15eca87cb98704d83991e83a947bf4 100644
--- a/test/metabase/query_processor_test/order_by_test.clj
+++ b/test/metabase/query_processor_test/order_by_test.clj
@@ -1,9 +1,11 @@
 (ns metabase.query-processor-test.order-by-test
   "Tests for the `:order-by` clause."
   (:require [clojure.math.numeric-tower :as math]
+            [metabase.models.field :refer [Field]]
             [metabase.query-processor-test :refer :all]
             [metabase.test.data :as data]
-            [metabase.test.data.datasets :as datasets :refer [*engine*]]))
+            [metabase.test.data.datasets :as datasets :refer [*engine*]]
+            [metabase.test.util :as tu]))
 
 (expect-with-non-timeseries-dbs
   [[1 12 375]
@@ -43,7 +45,8 @@
           :breakout    [$price]
           :order-by    [[:asc [:aggregation 0]]]})
        booleanize-native-form
-       (format-rows-by [int int])))
+       (format-rows-by [int int])
+       tu/round-fingerprint-cols))
 
 
 ;;; order-by aggregate ["sum" field-id]
@@ -62,7 +65,8 @@
           :breakout    [$price]
           :order-by    [[:desc [:aggregation 0]]]})
        booleanize-native-form
-       (format-rows-by [int int])))
+       (format-rows-by [int int])
+       tu/round-fingerprint-cols))
 
 
 ;;; order-by aggregate ["distinct" field-id]
@@ -74,14 +78,15 @@
                  [1 22]
                  [2 59]]
    :cols        [(breakout-col (venues-col :price))
-                 (aggregate-col :count)]
+                 (aggregate-col :count (Field (data/id :venues :id)))]
    :native_form true}
   (->> (data/run-mbql-query venues
          {:aggregation [[:distinct $id]]
           :breakout    [$price]
           :order-by    [[:asc [:aggregation 0]]]})
        booleanize-native-form
-       (format-rows-by [int int])))
+       (format-rows-by [int int])
+       tu/round-fingerprint-cols))
 
 
 ;;; order-by aggregate ["avg" field-id]
@@ -100,7 +105,9 @@
           :breakout    [$price]
           :order-by    [[:asc [:aggregation 0]]]})
        booleanize-native-form
-       :data (format-rows-by [int int])))
+       data
+       (format-rows-by [int int])
+       tu/round-fingerprint-cols))
 
 ;;; ### order-by aggregate ["stddev" field-id]
 ;; SQRT calculations are always NOT EXACT (normal behavior) so round everything to the nearest int.
@@ -120,4 +127,6 @@
           :breakout    [$price]
           :order-by    [[:desc [:aggregation 0]]]})
        booleanize-native-form
-       :data (format-rows-by [int (comp int math/round)])))
+       data
+       (format-rows-by [int (comp int math/round)])
+       tu/round-fingerprint-cols))
\ No newline at end of file
diff --git a/test/metabase/query_processor_test/remapping_test.clj b/test/metabase/query_processor_test/remapping_test.clj
index e7da4f509681880abb93bfc1019e56f58895a3c8..aae49ff298885b2ae05532c1ecb45187327e15f4 100644
--- a/test/metabase/query_processor_test/remapping_test.clj
+++ b/test/metabase/query_processor_test/remapping_test.clj
@@ -34,7 +34,8 @@
             :order-by [[:asc $name]]
             :limit    4})
          booleanize-native-form
-         (format-rows-by [str int str]))))
+         (format-rows-by [str int str])
+         tu/round-fingerprint-cols)))
 
 (defn- select-columns
   "Focuses the given resultset to columns that return true when passed to `columns-pred`. Typically this would be done
@@ -67,11 +68,10 @@
    :cols        [(venues-col :name)
                  (venues-col :price)
                  (assoc (categories-col :name)
-                   :fk_field_id (data/id :venues :category_id)
-                   :display_name "Foo"
-                   :name (data/format-name "name_2")
-                   :remapped_from (data/format-name "category_id")
-                   :schema_name nil)]
+                   :fk_field_id   (data/id :venues :category_id)
+                   :display_name  "Foo"
+                   :name          (data/format-name "name_2")
+                   :remapped_from (data/format-name "category_id"))]
    :native_form true}
   (data/with-data
     (data/create-venue-category-fk-remapping "Foo")
@@ -96,11 +96,10 @@
    :cols        [(venues-col :name)
                  (venues-col :price)
                  (assoc (categories-col :name)
-                   :fk_field_id (data/id :venues :category_id)
-                   :display_name "Foo"
-                   :name (data/format-name "name_2")
-                   :remapped_from (data/format-name "category_id")
-                   :schema_name nil)]
+                   :fk_field_id   (data/id :venues :category_id)
+                   :display_name  "Foo"
+                   :name          (data/format-name "name_2")
+                   :remapped_from (data/format-name "category_id"))]
    :native_form true}
   (data/with-data
     (data/create-venue-category-fk-remapping "Foo")
diff --git a/test/metabase/query_processor_test/time_field_test.clj b/test/metabase/query_processor_test/time_field_test.clj
index ef2c40ac2d06c82d968a11c8cf0596ee00758246..4d419fc3cc2f3394d1a0ad0e802cb80b744078e2 100644
--- a/test/metabase/query_processor_test/time_field_test.clj
+++ b/test/metabase/query_processor_test/time_field_test.clj
@@ -79,6 +79,12 @@
     (= :mysql *engine*)
     []
 
+    ;; It looks like Snowflake is doing this conversion correctly. Snowflake's time field is stored as wall clock time
+    ;; (vs. PG and others storing it without a timezone). Originally, this time is 16:15 in UTC, which is 8:15 in
+    ;; pacific time. The other report timezone databases are not doing this timezone conversion.
+    (= :snowflake *engine*)
+    [[3 "Kaneonuskatew Eiran" "08:15:00.000-08:00"]]
+
     ;; Databases like PostgreSQL ignore timezone information when
     ;; using a time field, the result below is what happens when the
     ;; 08:00 time is interpreted as UTC, then not adjusted to Pacific
diff --git a/test/metabase/query_processor_test/timezones_test.clj b/test/metabase/query_processor_test/timezones_test.clj
index 9f13ddfd2d7bd371925443205ccc5c74a4572c46..88f6a8a4f078d08a8977872293aeb646f9bf728c 100644
--- a/test/metabase/query_processor_test/timezones_test.clj
+++ b/test/metabase/query_processor_test/timezones_test.clj
@@ -30,29 +30,17 @@
   `(call-with-timezones-db (fn [] ~@body)))
 
 (def ^:private default-utc-results
-  #{[6 "Shad Ferdynand" "2014-08-02T12:30:00.000Z"]
-    [7 "Conchúr Tihomir" "2014-08-02T09:30:00.000Z"]})
+  #{[6 "Shad Ferdynand" "2014-08-02T12:30:00.000Z"]})
 
 (def ^:private default-pacific-results
+  #{[6 "Shad Ferdynand" "2014-08-02T05:30:00.000-07:00"]})
+
+;; parameters always get `date` bucketing so doing something the between stuff we do below is basically just going to
+;; match anything with a `2014-08-02` date
+(def ^:private default-pacific-results-for-params
   #{[6 "Shad Ferdynand" "2014-08-02T05:30:00.000-07:00"]
     [7 "Conchúr Tihomir" "2014-08-02T02:30:00.000-07:00"]})
 
-;; Test querying a database that does NOT support report timezones
-;;
-;; The report-timezone of Europe/Brussels is UTC+2, our tests use a JVM timezone of UTC. If the timestamps below are
-;; interpretted incorrectly as Europe/Brussels, it would adjust that back 2 hours to UTC
-;; (i.e. 2014-07-01T22:00:00.000Z). We then cast that time to a date, which truncates it to 2014-07-01, which is then
-;; querying the day before. This reproduces the bug found in https://github.com/metabase/metabase/issues/7584
-(expect-with-engine :bigquery
-  #{[10 "Frans Hevel" "2014-07-03T19:30:00.000Z"]
-    [12 "Kfir Caj" "2014-07-03T01:30:00.000Z"]}
-  (with-tz-db
-    (tu/with-temporary-setting-values [report-timezone "Europe/Brussels"]
-      (-> (data/run-mbql-query users
-            {:filter [:between $last_login "2014-07-02" "2014-07-03"]})
-          qpt/rows
-          set))))
-
 ;; Query PG using a report-timezone set to pacific time. Should adjust the query parameter using that report timezone
 ;; and should return the timestamp in pacific time as well
 (expect-with-engines [:postgres :mysql]
@@ -87,7 +75,7 @@
 
 ;; Test that native dates are parsed with the report timezone (when supported)
 (expect-with-engines [:postgres :mysql]
-  default-pacific-results
+  default-pacific-results-for-params
   (with-tz-db
     (tu/with-temporary-setting-values [report-timezone "America/Los_Angeles"]
       (process-query'
@@ -105,7 +93,7 @@
 
 ;; This does not currently work for MySQL
 (expect-with-engines [:postgres :mysql]
-  default-pacific-results
+  default-pacific-results-for-params
   (with-tz-db
     (tu/with-temporary-setting-values [report-timezone "America/Los_Angeles"]
       (process-query'
@@ -122,7 +110,7 @@
 
 ;; Querying using a single date
 (expect-with-engines [:postgres :mysql]
-  default-pacific-results
+  default-pacific-results-for-params
   (with-tz-db
     (tu/with-temporary-setting-values [report-timezone "America/Los_Angeles"]
       (process-query'
diff --git a/test/metabase/query_processor_test/unix_timestamp_test.clj b/test/metabase/query_processor_test/unix_timestamp_test.clj
index d9e6e69e897b78cbe6f5550029ba6234ad530ede..f09189292c396932cf5bc28fa7ae210a542eb706 100644
--- a/test/metabase/query_processor_test/unix_timestamp_test.clj
+++ b/test/metabase/query_processor_test/unix_timestamp_test.clj
@@ -1,6 +1,7 @@
 (ns metabase.query-processor-test.unix-timestamp-test
   "Tests for UNIX timestamp support."
   (:require [metabase.query-processor-test :refer :all]
+            [metabase.query-processor-test.date-bucketing-test :as dbt]
             [metabase.test
              [data :as data]
              [util :as tu]]
@@ -18,9 +19,7 @@
   (tu/with-temporary-setting-values [report-timezone "UTC"]
     (count (rows (data/dataset sad-toucan-incidents
                    (data/run-mbql-query incidents
-                     {:filter   [:and
-                                 [:> $timestamp "2015-06-01"]
-                                 [:< $timestamp "2015-06-03"]]
+                     {:filter   [:= [:datetime-field $timestamp :day] "2015-06-02"]
                       :order-by [[:asc $timestamp]]}))))))
 
 (expect-with-non-timeseries-dbs
@@ -37,7 +36,7 @@
      ["2015-06-09"  7]
      ["2015-06-10"  9]]
 
-    (contains? #{:oracle :redshift} *engine*)
+    (dbt/tz-shifted-engine-bug? *engine*)
     [["2015-06-01T00:00:00.000-07:00" 6]
      ["2015-06-02T00:00:00.000-07:00" 10]
      ["2015-06-03T00:00:00.000-07:00" 4]
diff --git a/test/metabase/sample_dataset_test.clj b/test/metabase/sample_dataset_test.clj
index 4edb2a20eb0b1f56d98bb07ab2a682b61e84829e..271063a9269f06a55ddb54a622a49586e0ad2b7c 100644
--- a/test/metabase/sample_dataset_test.clj
+++ b/test/metabase/sample_dataset_test.clj
@@ -57,7 +57,8 @@
    :visibility_type  :normal
    :preview_display  true
    :display_name     "Name"
-   :fingerprint      {:global {:distinct-count 2499}
+   :fingerprint      {:global {:distinct-count 2499
+                               :nil%           0.0}
                       :type   {:type/Text {:percent-json   0.0
                                            :percent-url    0.0
                                            :percent-email  0.0
diff --git a/test/metabase/sync/analyze/fingerprint/fingerprinters_test.clj b/test/metabase/sync/analyze/fingerprint/fingerprinters_test.clj
index 9214e52df4ad16ed9d1de4615c2275c6ea8e96ee..01aebb842e14b8413bed8697c746c26476a243ad 100644
--- a/test/metabase/sync/analyze/fingerprint/fingerprinters_test.clj
+++ b/test/metabase/sync/analyze/fingerprint/fingerprinters_test.clj
@@ -5,7 +5,8 @@
             [metabase.util.date :as du]))
 
 (expect
-  {:global {:distinct-count 3}
+  {:global {:distinct-count 3
+            :nil%           0.0}
    :type {:type/DateTime {:earliest (du/date->iso-8601 #inst "2013")
                           :latest   (du/date->iso-8601 #inst "2018")}}}
   (transduce identity
@@ -13,7 +14,8 @@
              [#inst "2013" #inst "2018" #inst "2015"]))
 
 (expect
-  {:global {:distinct-count 1}
+  {:global {:distinct-count 1
+            :nil%           1.0}
    :type {:type/DateTime {:earliest nil
                           :latest   nil}}}
   (transduce identity
@@ -21,16 +23,21 @@
              (repeat 10 nil)))
 
 (expect
-  {:global {:distinct-count 3}
+  {:global {:distinct-count 3
+            :nil%           0.0}
    :type {:type/Number {:avg 2.0
                         :min 1.0
-                        :max 3.0}}}
+                        :max 3.0
+                        :q1 1.25
+                        :q3 2.75
+                        :sd 1.0}}}
   (transduce identity
              (fingerprinter (field/map->FieldInstance {:base_type :type/Number}))
              [1.0 2.0 3.0]))
 
 (expect
-  {:global {:distinct-count 5}
+  {:global {:distinct-count 5
+            :nil%           0.0}
    :type   {:type/Text {:percent-json 0.2,
                         :percent-url 0.0,
                         :percent-email 0.0,
diff --git a/test/metabase/sync/analyze/fingerprint/insights_test.clj b/test/metabase/sync/analyze/fingerprint/insights_test.clj
new file mode 100644
index 0000000000000000000000000000000000000000..f3f58333f001eb0fdce47cf93166953a697c5857
--- /dev/null
+++ b/test/metabase/sync/analyze/fingerprint/insights_test.clj
@@ -0,0 +1,35 @@
+(ns metabase.sync.analyze.fingerprint.insights-test
+  (:require [expectations :refer :all]
+            [metabase.sync.analyze.fingerprint.insights :refer :all]))
+
+(def ^:private cols [{:base_type :type/DateTime} {:base_type :type/Number}])
+
+(expect
+  700
+  (-> (transduce identity (insights cols) [["2014" 100]
+                                           ["2015" 200]
+                                           ["2016" nil]
+                                           [nil 300]
+                                           [nil nil]
+                                           ["2017" 700]])
+      first
+      :last-value))
+
+(expect
+  700
+  (-> (transduce identity (insights cols) [["2017" 700]])
+      first
+      :last-value))
+
+;; Here we just make sure we don't blow up on empty input
+(expect
+  nil
+  (-> (transduce identity (insights cols) [])
+      first
+      :last-value))
+
+(expect
+  nil
+  (-> (transduce identity (insights cols) [[nil nil]])
+      first
+      :last-value))
diff --git a/test/metabase/sync/analyze/query_results_test.clj b/test/metabase/sync/analyze/query_results_test.clj
index a9cf399b86af3a6883a9a642ead58052a4c3ece2..727d50d01b3e0fc6162641f68d3bd64fe325870b 100644
--- a/test/metabase/sync/analyze/query_results_test.clj
+++ b/test/metabase/sync/analyze/query_results_test.clj
@@ -23,7 +23,7 @@
 
 (defn- name->fingerprints [field-or-metadata]
   (zipmap (map column->name-keyword field-or-metadata)
-          (map :fingerprint field-or-metadata)))
+          (map :fingerprint (tu/round-fingerprint-cols field-or-metadata))))
 
 (defn- name->special-type [field-or-metadata]
   (zipmap (map column->name-keyword field-or-metadata)
@@ -74,10 +74,11 @@
 ;; Native queries don't know what the associated Fields are for the results, we need to compute the fingerprints, but
 ;; they should sill be the same except for some of the optimizations we do when we have all the information.
 (expect
-  (update mutil/venue-fingerprints :category_id assoc :type {:type/Number {:min 2.0, :max 74.0, :avg 29.98}})
+  (update mutil/venue-fingerprints :category_id assoc :type {:type/Number {:min 2.0, :max 74.0, :avg 29.98, :q1 7.0, :q3 49.0 :sd 23.06}})
   (tt/with-temp Card [card {:dataset_query {:database (data/id)
                                             :type     :native
                                             :native   {:query "select * from venues"}}}]
+
     (name->fingerprints
      (query->result-metadata (query-for-card card)))))
 
diff --git a/test/metabase/sync/sync_metadata/comments_test.clj b/test/metabase/sync/sync_metadata/comments_test.clj
index 2954c56320e2efa6d6b1aac3b9661eb2651d7564..b28c818afa4c23f7be344e556786922745153d8d 100644
--- a/test/metabase/sync/sync_metadata/comments_test.clj
+++ b/test/metabase/sync/sync_metadata/comments_test.clj
@@ -61,7 +61,7 @@
     {:name (data/format-name "comment_after_sync"), :description "added comment"}}
   (data/with-temp-db [db comment-after-sync]
     ;; modify the source DB to add the comment and resync
-    (i/create-db! ds/*driver* (assoc-in comment-after-sync [:table-definitions 0 :field-definitions 0 :field-comment] "added comment") true)
+    (i/create-db! ds/*driver* (assoc-in comment-after-sync [:table-definitions 0 :field-definitions 0 :field-comment] "added comment"), {:skip-drop-db? true})
     (sync/sync-table! (Table (data/id "comment_after_sync")))
     (db->fields db)))
 
@@ -98,7 +98,7 @@
 (ds/expect-with-engines #{:h2 :postgres}
   #{{:name (data/format-name "table_with_comment_after_sync"), :description "added comment"}}
   (data/with-temp-db [db (basic-table "table_with_comment_after_sync" nil)]
-     ;; modify the source DB to add the comment and resync
-     (i/create-db! ds/*driver* (basic-table "table_with_comment_after_sync" "added comment") true)
-     (metabase.sync.sync-metadata.tables/sync-tables! db)
-     (db->tables db)))
+    ;; modify the source DB to add the comment and resync
+    (i/create-db! ds/*driver* (basic-table "table_with_comment_after_sync" "added comment") {:skip-drop-db? true})
+    (metabase.sync.sync-metadata.tables/sync-tables! db)
+    (db->tables db)))
diff --git a/test/metabase/sync/sync_metadata/sync_timezone_test.clj b/test/metabase/sync/sync_metadata/sync_timezone_test.clj
index 1e1eeef9b3de276743554e45f313c2cbcc9dda3d..162c892471beaebce1fabe4c1568fb7b314aaeaa 100644
--- a/test/metabase/sync/sync_metadata/sync_timezone_test.clj
+++ b/test/metabase/sync/sync_metadata/sync_timezone_test.clj
@@ -39,16 +39,15 @@
        ;; Check that the value was set again after sync
        (boolean (time/time-zone-for-id (db-timezone db)))])))
 
+;; TODO - this works for me ok with Postgres 9.6 & Java 10. Returns Australia/Hobart
 (datasets/expect-with-engines #{:postgres}
   ["UTC" "UTC"]
   (data/dataset test-data
     (let [db (data/db)]
       (sync-tz/sync-timezone! db)
       [(db-timezone db)
-       ;; This call fails as the dates on PostgreSQL return 'AEST'
-       ;; for the time zone name. The exception is logged, but the
-       ;; timezone column should be left alone and processing should
-       ;; continue
+       ;; This call fails as the dates on PostgreSQL return 'AEST' for the time zone name. The exception is logged,
+       ;; but the timezone column should be left alone and processing should continue
        (tu/with-temporary-setting-values [report-timezone "Australia/Sydney"]
          (do
            (sync-tz/sync-timezone! db)
diff --git a/test/metabase/sync_database_test.clj b/test/metabase/sync_database_test.clj
index a38656c0beb2f127a555db7f8ce2199d7616b731..d6fe48c65218640934d294a0aa0d913e4cf3b345 100644
--- a/test/metabase/sync_database_test.clj
+++ b/test/metabase/sync_database_test.clj
@@ -87,9 +87,7 @@
 (defn- table-details [table]
   (into {} (-> (dissoc table :db :pk_field :field_values)
                (assoc :fields (for [field (db/select Field, :table_id (:id table), {:order-by [:name]})]
-                                (into {} (-> (dissoc field
-                                                     :table :db :children :qualified-name :qualified-name-components
-                                                     :values :target)
+                                (into {} (-> field
                                              (update :fingerprint map?)
                                              (update :fingerprint_version (complement zero?))))))
                tu/boolean-ids-and-timestamps)))
@@ -132,45 +130,73 @@
    :visibility_type     :normal
    :settings            nil})
 
+(def ^:private field-defaults-with-fingerprint
+  (assoc field-defaults
+    :last_analyzed       true
+    :fingerprint_version true
+    :fingerprint         true))
+
+(def ^:private field:movie-id
+  (merge
+   field-defaults
+   {:name          "id"
+    :display_name  "ID"
+    :database_type "SERIAL"
+    :base_type     :type/Integer
+    :special_type  :type/PK}))
+
+(def ^:private field:movie-studio
+  (merge
+   field-defaults-with-fingerprint
+   {:name               "studio"
+    :display_name       "Studio"
+    :database_type      "VARCHAR"
+    :base_type          :type/Text
+    :fk_target_field_id true
+    :special_type       :type/FK}))
+
+(def ^:private field:movie-title
+  (merge
+   field-defaults-with-fingerprint
+   {:name          "title"
+    :display_name  "Title"
+    :database_type "VARCHAR"
+    :base_type     :type/Text
+    :special_type  :type/Title}))
+
+(def ^:private field:studio-name
+  (merge
+   field-defaults-with-fingerprint
+   {:name          "name"
+    :display_name  "Name"
+    :database_type "VARCHAR"
+    :base_type     :type/Text
+    :special_type  :type/Name}))
+
+;; `studio.studio`? huh?
+(def ^:private field:studio-studio
+  (merge
+   field-defaults
+   {:name          "studio"
+    :display_name  "Studio"
+    :database_type "VARCHAR"
+    :base_type     :type/Text
+    :special_type  :type/PK}))
+
 ;; ## SYNC DATABASE
 (expect
   [(merge table-defaults
           {:schema       "default"
            :name         "movie"
            :display_name "Movie"
-           :fields       [(merge field-defaults
-                                 {:name          "id"
-                                  :display_name  "ID"
-                                  :database_type "SERIAL"
-                                  :base_type     :type/Integer
-                                  :special_type  :type/PK})
-                          (merge field-defaults
-                                 {:name               "studio"
-                                  :display_name       "Studio"
-                                  :database_type      "VARCHAR"
-                                  :base_type          :type/Text
-                                  :fk_target_field_id true
-                                  :special_type       :type/FK})
-                          (merge field-defaults
-                                 {:name          "title"
-                                  :display_name  "Title"
-                                  :database_type "VARCHAR"
-                                  :base_type     :type/Text
-                                  :special_type  :type/Title})]})
+           :fields       [field:movie-id
+                          field:movie-studio
+                          field:movie-title]})
    (merge table-defaults
           {:name         "studio"
            :display_name "Studio"
-           :fields       [(merge field-defaults
-                                 {:name          "name"
-                                  :display_name  "Name"
-                                  :database_type "VARCHAR"
-                                  :base_type     :type/Text})
-                          (merge field-defaults
-                                 {:name          "studio"
-                                  :display_name  "Studio"
-                                  :database_type "VARCHAR"
-                                  :base_type     :type/Text
-                                  :special_type  :type/PK})]})]
+           :fields       [field:studio-name
+                          field:studio-studio]})]
   (tt/with-temp Database [db {:engine :sync-test}]
     (sync/sync-database! db)
     ;; we are purposely running the sync twice to test for possible logic issues which only manifest on resync of a
@@ -186,23 +212,10 @@
          {:schema       "default"
           :name         "movie"
           :display_name "Movie"
-          :fields       [(merge field-defaults
-                                {:name          "id"
-                                 :display_name  "ID"
-                                 :database_type "SERIAL"
-                                 :base_type     :type/Integer
-                                 :special_type  :type/PK})
-                         (merge field-defaults
-                                {:name          "studio"
-                                 :display_name  "Studio"
-                                 :database_type "VARCHAR"
-                                 :base_type     :type/Text})
-                         (merge field-defaults
-                                {:name          "title"
-                                 :display_name  "Title"
-                                 :database_type "VARCHAR"
-                                 :base_type     :type/Text
-                                 :special_type  :type/Title})]})
+          :fields       [field:movie-id
+                         ;; FKs only get synced when you sync the whole DB
+                         (assoc field:movie-studio :fk_target_field_id false, :special_type nil)
+                         field:movie-title]})
   (tt/with-temp* [Database [db    {:engine :sync-test}]
                   Table    [table {:name   "movie"
                                    :schema "default"
diff --git a/test/metabase/task/sync_databases_test.clj b/test/metabase/task/sync_databases_test.clj
index 4be392e908260537049dd1e442a744ac8dce8a65..5ba76457ea49c779effc39a81e8fca8c344973e6 100644
--- a/test/metabase/task/sync_databases_test.clj
+++ b/test/metabase/task/sync_databases_test.clj
@@ -7,6 +7,7 @@
             [metabase.models.database :refer [Database]]
             [metabase.task.sync-databases :as sync-db]
             [metabase.test.util :as tu]
+            [metabase.test.util.log :as tu.log]
             [metabase.util :as u]
             [metabase.util.date :as du]
             [toucan.db :as db]
@@ -126,14 +127,16 @@
 (expect
   Exception
   (tt/with-temp Database [database {:engine :postgres}]
-    (db/update! Database (u/get-id database)
-      :metadata_sync_schedule "2 CANS PER DAY")))
+    (tu.log/suppress-output
+      (db/update! Database (u/get-id database)
+        :metadata_sync_schedule "2 CANS PER DAY"))))
 
 (expect
   Exception
   (tt/with-temp Database [database {:engine :postgres}]
-    (db/update! Database (u/get-id database)
-      :cache_field_values_schedule "2 CANS PER DAY")))
+    (tu.log/suppress-output
+      (db/update! Database (u/get-id database)
+        :cache_field_values_schedule "2 CANS PER DAY"))))
 
 
 ;;; +----------------------------------------------------------------------------------------------------------------+
diff --git a/test/metabase/test/automagic_dashboards.clj b/test/metabase/test/automagic_dashboards.clj
index 73a2f9ce3a33e79416409f1f17004629d64ed847..81e006db2cee914379069ef10c99e1472d03c1a6 100644
--- a/test/metabase/test/automagic_dashboards.clj
+++ b/test/metabase/test/automagic_dashboards.clj
@@ -1,10 +1,13 @@
 (ns metabase.test.automagic-dashboards
   "Helper functions and macros for writing tests for automagic dashboards."
   (:require [metabase.api.common :as api]
+            [metabase.mbql
+             [normalize :as normalize]
+             [schema :as mbql.s]]
             [metabase.models.user :as user]
-            [metabase.query-processor :as qp]
             [metabase.test.data.users :as test-users]
-            [metabase.test.util :as tu]))
+            [metabase.test.util :as tu]
+            [schema.core :as s]))
 
 (defmacro with-rasta
   "Execute body with rasta as the current user."
@@ -38,8 +41,8 @@
                  ((test-users/user->client :rasta) :get 200 (format "automagic-dashboards/%s"
                                                                     (subs url 16)))))))
 
-(def ^:private valid-card?
-  (comp qp/expand :dataset_query))
+(defn- valid-card? [{query :dataset_query}]
+  (nil? (s/check mbql.s/Query (normalize/normalize query))))
 
 (defn valid-dashboard?
   "Is generated dashboard valid?
diff --git a/test/metabase/test/data.clj b/test/metabase/test/data.clj
index 92eae516de6cbefafa6dbfc773ac8480e33ca936..b34ed7cd45fb2017dc99cb65d036a80dd4181c22 100644
--- a/test/metabase/test/data.clj
+++ b/test/metabase/test/data.clj
@@ -20,6 +20,7 @@
              [dataset-definitions :as defs]
              [datasets :refer [*driver*]]
              [interface :as i]]
+            [metabase.test.util.log :as tu.log]
             [toucan.db :as db])
   (:import [metabase.test.data.interface DatabaseDefinition TableDefinition]))
 
@@ -119,8 +120,8 @@
 
     $$table -> (id :venues)"
   {:style/indent 1}
-  [table-name body & {:keys [wrap-field-ids?], :or {wrap-field-ids? false}}]
-  ($->id (keyword table-name) body, :wrap-field-ids? wrap-field-ids?))
+  [table-name & body]
+  ($->id (keyword table-name) `(do ~@body) :wrap-field-ids? false))
 
 
 (defn wrap-inner-mbql-query
@@ -147,7 +148,7 @@
   [table & [query]]
   `(wrap-inner-mbql-query
      ~(merge `{:source-table (id ~(keyword table))}
-             ($->id (keyword table) query))))
+             ($->id table query))))
 
 (defmacro run-mbql-query
   "Like `mbql-query`, but runs the query as well."
@@ -189,7 +190,7 @@
 
 (defn id
   "Get the ID of the current database or one of its `Tables` or `Fields`.
-   Relies on the dynamic variable `*get-db`, which can be rebound with `with-db`."
+   Relies on the dynamic variable `*get-db*`, which can be rebound with `with-db`."
   ([]
    {:post [(integer? %)]}
    (:id (db)))
@@ -286,12 +287,15 @@
                           (or (i/metabase-instance database-definition engine)
                               (create-database! database-definition engine driver)))]
      (try
-       (get-or-create!)
+       ;; it's ok to ignore output here because it's usually the IllegalArgException, and if it fails again we don't
+       ;; suppress it
+       (tu.log/suppress-output
+         (get-or-create!))
        ;; occasionally we'll see an error like
        ;;   java.lang.IllegalArgumentException: No implementation of method: :database->connection-details
        ;;   of protocol: IDriverTestExtensions found for class: metabase.driver.h2.H2Driver
        ;; to fix this we just need to reload a couple namespaces and then try again
-       (catch IllegalArgumentException _
+       (catch Exception _
          (reload-test-extensions engine)
          (get-or-create!))))))
 
diff --git a/test/metabase/test/data/bigquery.clj b/test/metabase/test/data/bigquery.clj
index 65833494aa337e3e7fc07f67962a843943adbb4f..23e0e38d253b1e68b8b1071a8f9c95a4f3385902 100644
--- a/test/metabase/test/data/bigquery.clj
+++ b/test/metabase/test/data/bigquery.clj
@@ -200,35 +200,38 @@
 (def ^:private existing-datasets
   (atom #{}))
 
-(defn- create-db! [{:keys [database-name table-definitions]}]
-  {:pre [(seq database-name) (sequential? table-definitions)]}
-    ;; fetch existing datasets if we haven't done so yet
-  (when-not (seq @existing-datasets)
-    (reset! existing-datasets (set (existing-dataset-names)))
-    (println "These BigQuery datasets have already been loaded:\n" (u/pprint-to-str (sort @existing-datasets))))
-  ;; now check and see if we need to create the requested one
-  (let [database-name (normalize-name database-name)]
-    (when-not (contains? @existing-datasets database-name)
-      (try
-        (u/auto-retry 10
-          ;; if the dataset failed to load successfully last time around, destroy whatever was loaded so we start
-          ;; again from a blank slate
-          (u/ignore-exceptions
-            (destroy-dataset! database-name))
-          (create-dataset! database-name)
-          ;; do this in parallel because otherwise it can literally take an hour to load something like
-          ;; fifty_one_different_tables
-          (u/pdoseq [tabledef table-definitions]
-            (load-tabledef! database-name tabledef))
-          (swap! existing-datasets conj database-name)
-          (println (u/format-color 'green "[OK]")))
-        ;; if creating the dataset ultimately fails to complete, then delete it so it will hopefully work next time
-        ;; around
-        (catch Throwable e
-          (u/ignore-exceptions
-            (println (u/format-color 'red "Failed to load BigQuery dataset '%s'." database-name))
-            (destroy-dataset! database-name))
-          (throw e))))))
+(defn- create-db!
+  ([db-def]
+   (create-db! db-def nil))
+  ([{:keys [database-name table-definitions]} _]
+   {:pre [(seq database-name) (sequential? table-definitions)]}
+   ;; fetch existing datasets if we haven't done so yet
+   (when-not (seq @existing-datasets)
+     (reset! existing-datasets (set (existing-dataset-names)))
+     (println "These BigQuery datasets have already been loaded:\n" (u/pprint-to-str (sort @existing-datasets))))
+   ;; now check and see if we need to create the requested one
+   (let [database-name (normalize-name database-name)]
+     (when-not (contains? @existing-datasets database-name)
+       (try
+         (u/auto-retry 10
+           ;; if the dataset failed to load successfully last time around, destroy whatever was loaded so we start
+           ;; again from a blank slate
+           (u/ignore-exceptions
+             (destroy-dataset! database-name))
+           (create-dataset! database-name)
+           ;; do this in parallel because otherwise it can literally take an hour to load something like
+           ;; fifty_one_different_tables
+           (u/pdoseq [tabledef table-definitions]
+             (load-tabledef! database-name tabledef))
+           (swap! existing-datasets conj database-name)
+           (println (u/format-color 'green "[OK]")))
+         ;; if creating the dataset ultimately fails to complete, then delete it so it will hopefully work next time
+         ;; around
+         (catch Throwable e
+           (u/ignore-exceptions
+             (println (u/format-color 'red "Failed to load BigQuery dataset '%s'." database-name))
+             (destroy-dataset! database-name))
+           (throw e)))))))
 
 
 ;;; --------------------------------------------- IDriverTestExtensions ----------------------------------------------
diff --git a/test/metabase/test/data/datasets.clj b/test/metabase/test/data/datasets.clj
index 0cb4095c124bba2d534736e2d9be739049349e0b..60b329ecbf75b0a050d3083ceecde28314487d89 100644
--- a/test/metabase/test/data/datasets.clj
+++ b/test/metabase/test/data/datasets.clj
@@ -6,9 +6,10 @@
             [colorize.core :as color]
             [environ.core :refer [env]]
             [expectations :refer [expect]]
-            (metabase [config :as config]
-                      [driver :as driver]
-                      [plugins :as plugins])
+            [metabase
+             [config :as config]
+             [driver :as driver]
+             [plugins :as plugins]]
             [metabase.test.data.interface :as i]))
 
 ;; When running tests, we need to make sure plugins (i.e., the Oracle JDBC driver) are loaded because otherwise the
@@ -75,6 +76,7 @@
   [engine]
   (try (i/engine (driver/engine->driver engine))
        (catch IllegalArgumentException _
+         (println "Reloading test extensions: (require " (engine->test-extensions-ns-symbol engine) ":reload)")
          (require (engine->test-extensions-ns-symbol engine) :reload)))
   (driver/engine->driver engine))
 
diff --git a/test/metabase/test/data/generic_sql.clj b/test/metabase/test/data/generic_sql.clj
index 265055d8d45dd6353c0e50d3b0497d2bebc119e1..b70afbf65e9e92ce70f62f3b7eae316dbe276025 100644
--- a/test/metabase/test/data/generic_sql.clj
+++ b/test/metabase/test/data/generic_sql.clj
@@ -68,6 +68,7 @@
   (pk-field-name ^String [this]
     "*Optional* Name of a PK field. Defaults to `\"id\"`.")
 
+  ;; TODO - WHAT ABOUT SCHEMA NAME???
   (qualified-name-components [this, ^String database-name]
                              [this, ^String database-name, ^String table-name]
                              [this, ^String database-name, ^String table-name, ^String field-name]
@@ -222,13 +223,19 @@
                                     (du/->Timestamp v du/utc)
                                     v))))))
 
+(defn add-ids
+  "Add an `:id` column to each row in `rows`, for databases that should have data inserted with the ID explicitly
+  specified."
+  [rows]
+  (for [[i row] (m/indexed rows)]
+    (assoc row :id (inc i))))
+
 (defn load-data-add-ids
   "Add IDs to each row, presumabily for doing a parallel insert. This arg should go before `load-data-chunked` or
   `load-data-one-at-a-time`."
   [insert!]
   (fn [rows]
-    (insert! (vec (for [[i row] (m/indexed rows)]
-                    (assoc row :id (inc i)))))))
+    (insert! (vec (add-ids rows)))))
 
 (defn load-data-chunked
   "Insert rows in chunks, which default to 200 rows each."
@@ -290,6 +297,7 @@
 (def load-data-all-at-once!            "Insert all rows at once."                             (make-load-data-fn))
 (def load-data-chunked!                "Insert rows in chunks of 200 at a time."              (make-load-data-fn load-data-chunked))
 (def load-data-one-at-a-time!          "Insert rows one at a time."                           (make-load-data-fn load-data-one-at-a-time))
+(def load-data-add-ids!                "Insert all rows at once; add IDs."                    (make-load-data-fn load-data-add-ids))
 (def load-data-chunked-parallel!       "Insert rows in chunks of 200 at a time, in parallel." (make-load-data-fn load-data-add-ids (partial load-data-chunked pmap)))
 (def load-data-one-at-a-time-parallel! "Insert rows one at a time, in parallel."              (make-load-data-fn load-data-add-ids (partial load-data-one-at-a-time pmap)))
 ;; ^ the parallel versions aren't neccesarily faster than the sequential versions for all drivers so make sure to do some profiling in order to pick the appropriate implementation
@@ -353,46 +361,49 @@
       (when (seq statement)
         (execute! driver context dbdef (s/replace statement #"â…‹" ";"))))))
 
-(defn- create-db!
-  ([driver database-definition]
-    (create-db! driver database-definition false))
-  ([driver {:keys [table-definitions], :as dbdef} skip-drop-db?]
-    (when-not skip-drop-db?
-      ;; Exec SQL for creating the DB
-      (execute-sql! driver :server dbdef (str (drop-db-if-exists-sql driver dbdef) ";\n"
-                                              (create-db-sql driver dbdef))))
-    ;; Build combined statement for creating tables + FKs + comments
-    (let [statements (atom [])]
-      ;; Add the SQL for creating each Table
-      (doseq [tabledef table-definitions]
-        (swap! statements conj (drop-table-if-exists-sql driver dbdef tabledef)
-               (create-table-sql driver dbdef tabledef)))
-      ;; Add the SQL for adding FK constraints
-      (doseq [{:keys [field-definitions], :as tabledef} table-definitions]
-        (doseq [{:keys [fk], :as fielddef} field-definitions]
-          (when fk
-            (swap! statements conj (add-fk-sql driver dbdef tabledef fielddef)))))
-      ;; Add the SQL for adding table comments
-      (doseq [{:keys [table-comment], :as tabledef} table-definitions]
-        (when table-comment
-          (swap! statements conj (standalone-table-comment-sql driver dbdef tabledef))))
-      ;; Add the SQL for adding column comments
-      (doseq [{:keys [field-definitions], :as tabledef} table-definitions]
-        (doseq [{:keys [field-comment], :as fielddef} field-definitions]
-          (when field-comment
-            (swap! statements conj (standalone-column-comment-sql driver dbdef tabledef fielddef)))))
-      ;; exec the combined statement
-      (execute-sql! driver :db dbdef (s/join ";\n" (map hx/unescape-dots @statements))))
+(defn default-create-db!
+  "Default implementation of `create-db!` for SQL drivers."
+  ([driver db-def]
+   (default-create-db! driver db-def nil))
+  ([driver {:keys [table-definitions], :as dbdef} {:keys [skip-drop-db?]
+                                                   :or   {skip-drop-db? false}}]
+   (when-not skip-drop-db?
+     ;; Exec SQL for creating the DB
+     (execute-sql! driver :server dbdef (str (drop-db-if-exists-sql driver dbdef) ";\n"
+                                             (create-db-sql driver dbdef))))
+   ;; Build combined statement for creating tables + FKs + comments
+   (let [statements (atom [])]
+     ;; Add the SQL for creating each Table
+     (doseq [tabledef table-definitions]
+       (swap! statements conj (drop-table-if-exists-sql driver dbdef tabledef)
+              (create-table-sql driver dbdef tabledef)))
+
+     ;; Add the SQL for adding FK constraints
+     (doseq [{:keys [field-definitions], :as tabledef} table-definitions]
+       (doseq [{:keys [fk], :as fielddef} field-definitions]
+         (when fk
+           (swap! statements conj (add-fk-sql driver dbdef tabledef fielddef)))))
+     ;; Add the SQL for adding table comments
+     (doseq [{:keys [table-comment], :as tabledef} table-definitions]
+       (when table-comment
+         (swap! statements conj (standalone-table-comment-sql driver dbdef tabledef))))
+     ;; Add the SQL for adding column comments
+     (doseq [{:keys [field-definitions], :as tabledef} table-definitions]
+       (doseq [{:keys [field-comment], :as fielddef} field-definitions]
+         (when field-comment
+           (swap! statements conj (standalone-column-comment-sql driver dbdef tabledef fielddef)))))
+     ;; exec the combined statement
+     (execute-sql! driver :db dbdef (s/join ";\n" (map hx/unescape-dots @statements))))
    ;; Now load the data for each Table
    (doseq [tabledef table-definitions]
      (du/profile (format "load-data for %s %s %s" (name driver) (:database-name dbdef) (:table-name tabledef))
-        (load-data! driver dbdef tabledef)))))
+       (load-data! driver dbdef tabledef)))))
 
 
 (def IDriverTestExtensionsMixin
   "Mixin for `IGenericSQLTestExtensions` types to implement `create-db!` from `IDriverTestExtensions`."
   (merge i/IDriverTestExtensionsDefaultsMixin
-         {:create-db! create-db!}))
+         {:create-db! default-create-db!}))
 
 
 ;;; ## Various Util Fns
diff --git a/test/metabase/test/data/interface.clj b/test/metabase/test/data/interface.clj
index 32d6a0960d9a9d1204a04e2c3afb7fbdd0fda01e..7d538d13269fcaeb4ad7324355fc920dbad84453 100644
--- a/test/metabase/test/data/interface.clj
+++ b/test/metabase/test/data/interface.clj
@@ -110,17 +110,21 @@
     "Return the connection details map that should be used to connect to this database (i.e. a Metabase `Database`
      details map). CONTEXT is one of:
 
-     *  `:server` - Return details for making the connection in a way that isn't DB-specific (e.g., for
-                    creating/destroying databases)
-     *  `:db`     - Return details for connecting specifically to the DB.")
+ *  `:server` - Return details for making the connection in a way that isn't DB-specific (e.g., for
+                creating/destroying databases)
+ *  `:db`     - Return details for connecting specifically to the DB.")
 
-  (create-db! [this, ^DatabaseDefinition database-definition]
-              [this, ^DatabaseDefinition database-definition, ^Boolean skip-drop-db?]
+  (create-db!
+    [this, ^DatabaseDefinition database-definition]
+    [this, ^DatabaseDefinition database-definition {:keys [skip-drop-db?]}]
     "Create a new database from DATABASE-DEFINITION, including adding tables, fields, and foreign key constraints,
-     and add the appropriate data. This method should drop existing databases with the same name if applicable,
-     unless the skip-drop-db? arg is true. This is to workaround a scenario where the postgres driver terminates
-     the connection before dropping the DB and causes some tests to fail.
-     (This refers to creating the actual *DBMS* database itself, *not* a Metabase `Database` object.)")
+     and add the appropriate data. This method should drop existing databases with the same name if applicable, unless
+     the skip-drop-db? arg is true. This is to workaround a scenario where the postgres driver terminates the
+     connection before dropping the DB and causes some tests to fail.
+     (This refers to creating the actual *DBMS* database itself, *not* a Metabase `Database` object.)
+
+ Optional `options` as third param. Currently supported options include `skip-drop-db?`. If unspecified,`skip-drop-db?`
+ should default to `false`.")
 
   ;; TODO - this would be more useful if DATABASE-DEFINITION was a parameter
   (default-schema ^String [this]
@@ -129,7 +133,7 @@
   (expected-base-type->actual [this base-type]
     "*OPTIONAL*. Return the base type type that is actually used to store `Fields` of BASE-TYPE.
      The default implementation of this method is an identity fn. This is provided so DBs that don't support a given
-     BASE-TYPE used in the test data can specifiy what type we should expect in the results instead.  For example,
+     BASE-TYPE used in the test data can specifiy what type we should expect in the results instead. For example,
      Oracle has no `INTEGER` data types, so `:type/Integer` test values are instead stored as `NUMBER`, which we map
      to `:type/Decimal`.")
 
@@ -166,17 +170,19 @@
 (defn create-table-definition
   "Convenience for creating a `TableDefinition`."
   ^TableDefinition [^String table-name, field-definition-maps rows]
-  (s/validate TableDefinition (map->TableDefinition {:table-name        table-name
-                                                     :rows              rows
-                                                     :field-definitions (mapv create-field-definition field-definition-maps)})))
+  (s/validate TableDefinition (map->TableDefinition
+                               {:table-name        table-name
+                                :rows              rows
+                                :field-definitions (mapv create-field-definition field-definition-maps)})))
 
 (defn create-database-definition
   "Convenience for creating a new `DatabaseDefinition`."
   {:style/indent 1}
   ^DatabaseDefinition [^String database-name & table-name+field-definition-maps+rows]
-  (s/validate DatabaseDefinition (map->DatabaseDefinition {:database-name     database-name
-                                                           :table-definitions (mapv (partial apply create-table-definition)
-                                                                                    table-name+field-definition-maps+rows)})))
+  (s/validate DatabaseDefinition (map->DatabaseDefinition
+                                  {:database-name     database-name
+                                   :table-definitions (mapv (partial apply create-table-definition)
+                                                            table-name+field-definition-maps+rows)})))
 
 (def ^:private ^:const edn-definitions-dir "./test/metabase/test/data/dataset_definitions/")
 
@@ -184,12 +190,9 @@
   (edn/read-string (slurp (str edn-definitions-dir dbname ".edn"))))
 
 (defn update-table-def
-  "Function useful for modifying a table definition before it's
-  applied. Will invoke `UPDATE-TABLE-DEF-FN` on the vector of column
-  definitions and `UPDATE-ROWS-FN` with the vector of rows in the
-  database definition. `TABLE-DEF` is the database
-  definition (typically used directly in a `def-database-definition`
-  invocation)."
+  "Function useful for modifying a table definition before it's applied. Will invoke `UPDATE-TABLE-DEF-FN` on the vector
+  of column definitions and `UPDATE-ROWS-FN` with the vector of rows in the database definition. `TABLE-DEF` is the
+  database definition (typically used directly in a `def-database-definition` invocation)."
   [table-name-to-update update-table-def-fn update-rows-fn table-def]
   (vec
    (for [[table-name table-def rows :as orig-table-def] table-def]
diff --git a/test/metabase/test/data/mongo.clj b/test/metabase/test/data/mongo.clj
index 1317bd9b629718122dadb775818b76da5a02b482..fba2247223d1ce5cd385322c2bf47d459331d996 100644
--- a/test/metabase/test/data/mongo.clj
+++ b/test/metabase/test/data/mongo.clj
@@ -18,26 +18,30 @@
   (with-open [mongo-connection (mg/connect (database->connection-details dbdef))]
     (mg/drop-db mongo-connection (i/escaped-name dbdef))))
 
-(defn- create-db! [{:keys [table-definitions], :as dbdef}]
-  (destroy-db! dbdef)
-  (with-mongo-connection [mongo-db (database->connection-details dbdef)]
-    (doseq [{:keys [field-definitions table-name rows]} table-definitions]
-      (let [field-names (for [field-definition field-definitions]
-                          (keyword (:field-name field-definition)))]
-        ;; Use map-indexed so we can get an ID for each row (index + 1)
-        (doseq [[i row] (map-indexed (partial vector) rows)]
-          (let [row (for [v row]
-                      ;; Conver all the java.sql.Timestamps to java.util.Date, because the Mongo driver insists on being obnoxious and going from
-                      ;; using Timestamps in 2.x to Dates in 3.x
-                      (if (instance? java.sql.Timestamp v)
-                        (java.util.Date. (.getTime ^java.sql.Timestamp v))
-                        v))]
-            (try
-              ;; Insert each row
-              (mc/insert mongo-db (name table-name) (assoc (zipmap field-names row)
-                                                      :_id (inc i)))
-              ;; If row already exists then nothing to do
-              (catch com.mongodb.MongoException _))))))))
+(defn- create-db!
+  ([db-def]
+   (create-db! db-def nil))
+  ([{:keys [table-definitions], :as dbdef} {:keys [skip-drop-db?], :or {skip-drop-db? false}}]
+   (when-not skip-drop-db?
+     (destroy-db! dbdef))
+   (with-mongo-connection [mongo-db (database->connection-details dbdef)]
+     (doseq [{:keys [field-definitions table-name rows]} table-definitions]
+       (let [field-names (for [field-definition field-definitions]
+                           (keyword (:field-name field-definition)))]
+         ;; Use map-indexed so we can get an ID for each row (index + 1)
+         (doseq [[i row] (map-indexed (partial vector) rows)]
+           (let [row (for [v row]
+                       ;; Conver all the java.sql.Timestamps to java.util.Date, because the Mongo driver insists on being obnoxious and going from
+                       ;; using Timestamps in 2.x to Dates in 3.x
+                       (if (instance? java.sql.Timestamp v)
+                         (java.util.Date. (.getTime ^java.sql.Timestamp v))
+                         v))]
+             (try
+               ;; Insert each row
+               (mc/insert mongo-db (name table-name) (assoc (zipmap field-names row)
+                                                       :_id (inc i)))
+               ;; If row already exists then nothing to do
+               (catch com.mongodb.MongoException _)))))))))
 
 
 (u/strict-extend MongoDriver
diff --git a/test/metabase/test/data/presto.clj b/test/metabase/test/data/presto.clj
index 21f4b6550dd5d952f533380ee2b0dc87d3ba8618..2c0f02794614dabdcbd8ab5f368f7b8c223a4289 100644
--- a/test/metabase/test/data/presto.clj
+++ b/test/metabase/test/data/presto.clj
@@ -71,18 +71,22 @@
       query
       (unprepare/unprepare (cons query params) :quote-escape "'", :iso-8601-fn :from_iso8601_timestamp))))
 
-(defn- create-db! [{:keys [table-definitions] :as dbdef}]
-  (let [details (database->connection-details :db dbdef)]
-    (doseq [tabledef table-definitions
-            :let [rows       (:rows tabledef)
-                  ;; generate an ID for each row because we don't have auto increments
-                  keyed-rows (map-indexed (fn [i row] (conj row (inc i))) rows)
-                  ;; make 100 rows batches since we have to inline everything
-                  batches    (partition 100 100 nil keyed-rows)]]
-      (#'presto/execute-presto-query! details (drop-table-if-exists-sql dbdef tabledef))
-      (#'presto/execute-presto-query! details (create-table-sql dbdef tabledef))
-      (doseq [batch batches]
-        (#'presto/execute-presto-query! details (insert-sql dbdef tabledef batch))))))
+(defn- create-db!
+  ([db-def]
+   (create-db! db-def nil))
+  ([{:keys [table-definitions] :as dbdef} {:keys [skip-drop-db?], :or {skip-drop-db? false}}]
+   (let [details (database->connection-details :db dbdef)]
+     (doseq [tabledef table-definitions
+             :let     [rows       (:rows tabledef)
+                       ;; generate an ID for each row because we don't have auto increments
+                       keyed-rows (map-indexed (fn [i row] (conj row (inc i))) rows)
+                       ;; make 100 rows batches since we have to inline everything
+                       batches    (partition 100 100 nil keyed-rows)]]
+       (when-not skip-drop-db?
+         (#'presto/execute-presto-query! details (drop-table-if-exists-sql dbdef tabledef)))
+       (#'presto/execute-presto-query! details (create-table-sql dbdef tabledef))
+       (doseq [batch batches]
+         (#'presto/execute-presto-query! details (insert-sql dbdef tabledef batch)))))))
 
 ;;; IDriverTestExtensions implementation
 
diff --git a/test/metabase/test/data/snowflake.clj b/test/metabase/test/data/snowflake.clj
new file mode 100644
index 0000000000000000000000000000000000000000..27444b1c0d5dd55cd9e46fc7b4daa8f3fa7cfc6b
--- /dev/null
+++ b/test/metabase/test/data/snowflake.clj
@@ -0,0 +1,117 @@
+(ns metabase.test.data.snowflake
+  (:require [clojure.java.jdbc :as jdbc]
+            [clojure.string :as str]
+            [metabase.driver.generic-sql :as sql]
+            [metabase.test.data
+             [generic-sql :as generic]
+             [interface :as i]]
+            [metabase.util :as u]
+            [honeysql.core :as hsql]
+            [honeysql.helpers :as h]
+            [metabase.util.honeysql-extensions :as hx]
+            [honeysql.format :as hformat])
+  (:import metabase.driver.snowflake.SnowflakeDriver))
+
+(def ^:private ^SnowflakeDriver snowflake-driver (SnowflakeDriver.))
+
+(def ^:private ^:const field-base-type->sql-type
+  {:type/BigInteger "BIGINT"
+   :type/Boolean    "BOOLEAN"
+   :type/Date       "DATE"
+   :type/DateTime   "TIMESTAMPLTZ"
+   :type/Decimal    "DECIMAL"
+   :type/Float      "FLOAT"
+   :type/Integer    "INTEGER"
+   :type/Text       "TEXT"
+   :type/Time       "TIME"})
+
+(defn- database->connection-details [context {:keys [database-name]}]
+  (merge
+   {:account   (i/db-test-env-var-or-throw :snowflake :account)
+    :user      (i/db-test-env-var-or-throw :snowflake :user)
+    :password  (i/db-test-env-var-or-throw :snowflake :password)
+    :warehouse (i/db-test-env-var-or-throw :snowflake :warehouse)
+    ;; SESSION parameters
+    :timezone "UTC"}
+   ;; Snowflake JDBC driver ignores this, but we do use it in the `query-db-name` function in
+   ;; `metabase.driver.snowflake`
+   (when (= context :db)
+     {:db database-name})))
+
+
+;; Snowflake requires you identify an object with db-name.schema-name.table-name
+(defn- qualified-name-components
+  ([_ db-name table-name]            [db-name "PUBLIC" table-name])
+  ([_ db-name]                       [db-name])
+  ([_ db-name table-name field-name] [db-name "PUBLIC" table-name field-name]))
+
+(defn- create-db-sql [driver {:keys [database-name]}]
+  (let [db (generic/qualify+quote-name driver database-name)]
+    (format "DROP DATABASE IF EXISTS %s; CREATE DATABASE %s;" db db)))
+
+(defn- expected-base-type->actual [base-type]
+  (if (isa? base-type :type/Integer)
+    :type/Number
+    base-type))
+
+(defn- drop-database [_]) ; no-op since we shouldn't be trying to drop any databases anyway
+
+(defn- no-db-connection-spec
+  "Connection spec for connecting to our Snowflake instance without specifying a DB."
+  []
+  (sql/connection-details->spec snowflake-driver (database->connection-details nil nil)))
+
+(defn- existing-dataset-names []
+  (let [db-spec (no-db-connection-spec)]
+    (jdbc/with-db-metadata [metadata db-spec]
+      ;; for whatever dumb reason the Snowflake JDBC driver always returns these as uppercase despite us making them
+      ;; all lower-case
+      (set (map str/lower-case (sql/get-catalogs metadata))))))
+
+(let [datasets (atom nil)]
+  (defn- existing-datasets []
+    (when-not (seq @datasets)
+      (reset! datasets (existing-dataset-names))
+      (println "These Snowflake datasets have already been loaded:\n" (u/pprint-to-str (sort @datasets))))
+    @datasets)
+
+  (defn- add-existing-dataset! [database-name]
+    (swap! datasets conj database-name)))
+
+(defn- create-db!
+  ([db-def]
+   (create-db! snowflake-driver db-def))
+  ([driver {:keys [database-name] :as db-def}]
+   ;; ok, now check if already created. If already created, no-op
+   (when-not (contains? (existing-datasets) database-name)
+     ;; if not created, create the DB...
+     (try
+       (generic/default-create-db! driver db-def)
+       ;; and add it to the set of DBs that have been created
+       (add-existing-dataset! database-name)
+       ;; if creating the DB failed, DROP it so we don't get stuck with a DB full of bad data and skip trying to
+       ;; load it next time around
+       (catch Throwable e
+         (let [drop-db-sql (format "DROP DATABASE \"%s\";" database-name)]
+           (println "Creating DB failed; executing" drop-db-sql)
+           (jdbc/execute! (no-db-connection-spec) [drop-db-sql]))
+         (throw e))))))
+
+(u/strict-extend SnowflakeDriver
+  generic/IGenericSQLTestExtensions
+  (merge generic/DefaultsMixin
+         {:field-base-type->sql-type (u/drop-first-arg field-base-type->sql-type)
+          :create-db-sql             create-db-sql
+          :execute-sql!              generic/sequentially-execute-sql!
+          :pk-sql-type               (constantly "INTEGER AUTOINCREMENT")
+          :qualified-name-components qualified-name-components
+          :load-data!                generic/load-data-add-ids!})
+
+  i/IDriverTestExtensions
+  (merge generic/IDriverTestExtensionsMixin
+         {:database->connection-details (u/drop-first-arg database->connection-details)
+          :default-schema               (constantly "PUBLIC")
+          :engine                       (constantly :snowflake)
+          :id-field-type                (constantly :type/Number)
+          :expected-base-type->actual   (u/drop-first-arg expected-base-type->actual)
+          :create-db!                   create-db!}))
diff --git a/test/metabase/test/mock/util.clj b/test/metabase/test/mock/util.clj
index fe592beaf3496d8edc4b7f4fbb3192f172114c6a..f3c0fa45b8bc826858b7c7fdb59c62ecf5da6d44 100644
--- a/test/metabase/test/mock/util.clj
+++ b/test/metabase/test/mock/util.clj
@@ -1,5 +1,5 @@
 (ns metabase.test.mock.util
-  (:require [metabase.query-processor :as qp]))
+  (:require [metabase.mbql.util :as mbql.u]))
 
 (def table-defaults
   {:description             nil
@@ -43,25 +43,32 @@
 
 (def venue-fingerprints
   "Fingerprints for the full venues table"
-  {:name        {:global {:distinct-count 100},
+  {:name        {:global {:distinct-count 100
+                          :nil%           0.0},
                  :type   {:type/Text {:percent-json  0.0, :percent-url    0.0,
                                       :percent-email 0.0, :average-length 15.63}}}
    :id          nil
-   :price       {:global {:distinct-count 4},
-                 :type   {:type/Number {:min 1.0, :max 4.0, :avg 2.03}}}
-   :latitude    {:global {:distinct-count 94},
-                 :type   {:type/Number {:min 10.06, :max 40.78, :avg 35.51}}}
-   :category_id {:global {:distinct-count 28}}
-   :longitude   {:global {:distinct-count 84},
-                 :type   {:type/Number {:min -165.37, :max -73.95, :avg -116.0}}}})
+   :price       {:global {:distinct-count 4
+                          :nil%           0.0},
+                 :type   {:type/Number {:min 1.0, :max 4.0, :avg 2.03, :q1 1.0, :q3 2.0 :sd 0.77}}}
+   :latitude    {:global {:distinct-count 94
+                          :nil%           0.0},
+                 :type   {:type/Number {:min 10.06, :max 40.78, :avg 35.51, :q1 34.0, :q3 38.0 :sd 3.43}}}
+   :category_id {:global {:distinct-count 28
+                          :nil%           0.0}}
+   :longitude   {:global {:distinct-count 84
+                          :nil%           0.0},
+                 :type   {:type/Number {:min -165.37, :max -73.95, :avg -116.0 :q1 -122.0, :q3 -118.0 :sd 14.16}}}})
 
 ;; This is just a fake implementation that just swoops in and returns somewhat-correct looking results for different
 ;; queries we know will get ran as part of sync
-(defn- is-table-row-count-query? [expanded-query]
-  (= :count (get-in expanded-query [:query :aggregation 0 :aggregation-type])))
+(defn- is-table-row-count-query? [query]
+  (boolean
+   (mbql.u/match (-> :query :aggregation)
+     [:count & _])))
 
-(defn- is-table-sample-query? [expanded-query]
-  (seq (get-in expanded-query [:query :fields])))
+(defn- is-table-sample-query? [query]
+  (seq (get-in query [:query :fields])))
 
 (defn process-query-in-context
   "QP mock that will return some 'appropriate' fake answers to the questions we know are ran during the sync process
@@ -69,12 +76,16 @@
    for any other queries, including ones for determining FieldValues."
   [_ _]
   (fn [query]
-    (let [expanded-query (qp/expand query)]
-      {:data
-       {:rows
-        (cond
-          (is-table-row-count-query? expanded-query) [[1000]]
-          (is-table-sample-query? expanded-query)    (let [fields-count (count (get-in query [:query :fields]))]
-                                                       (for [i (range 500)]
-                                                         (repeat fields-count i)))
-          :else                                      nil)}})))
+    {:data
+     {:rows
+      (cond
+        (is-table-row-count-query? query)
+        [[1000]]
+
+        (is-table-sample-query? query)
+        (let [fields-count (count (get-in query [:query :fields]))]
+          (for [i (range 500)]
+            (repeat fields-count i)))
+
+        :else
+        nil)}}))
diff --git a/test/metabase/test/util.clj b/test/metabase/test/util.clj
index 864f40c08e99bd17a134d5671d4626b7aff86a63..ad4dc0e724d7d86094a623e2961e1186c6bc4eda 100644
--- a/test/metabase/test/util.clj
+++ b/test/metabase/test/util.clj
@@ -1,13 +1,11 @@
 (ns metabase.test.util
   "Helper functions and macros for writing unit tests."
   (:require [cheshire.core :as json]
-            [clj-time
-             [coerce :as tcoerce]
-             [core :as time]]
-            [clojure.string :as s]
             [clj-time.core :as time]
+            [clojure
+             [string :as s]
+             [walk :as walk]]
             [clojure.tools.logging :as log]
-            [clojure.walk :as walk]
             [clojurewerkz.quartzite.scheduler :as qs]
             [expectations :refer :all]
             [metabase
@@ -35,7 +33,6 @@
              [table :refer [Table]]
              [task-history :refer [TaskHistory]]
              [user :refer [User]]]
-            [metabase.query-processor.util :as qputil]
             [metabase.test.data :as data]
             [metabase.test.data
              [dataset-definitions :as defs]
@@ -49,7 +46,6 @@
            org.joda.time.DateTimeZone
            [org.quartz CronTrigger JobDetail JobKey Scheduler Trigger]))
 
-
 ;;; ---------------------------------------------------- match-$ -----------------------------------------------------
 
 (defn- $->prop
@@ -410,33 +406,49 @@
     m
     (apply update-in m ks f args)))
 
-(defn- round-fingerprint-fields [fprint-type-map fields]
+(defn- round-fingerprint-fields [fprint-type-map decimal-places fields]
   (reduce (fn [fprint field]
             (update-in-if-present fprint [field] (fn [num]
                                                    (if (integer? num)
                                                      num
-                                                     (u/round-to-decimals 3 num)))))
+                                                     (u/round-to-decimals decimal-places num)))))
           fprint-type-map fields))
 
 (defn round-fingerprint
-  "Rounds the numerical fields of a fingerprint to 4 decimal places"
+  "Rounds the numerical fields of a fingerprint to 2 decimal places"
   [field]
   (-> field
-      (update-in-if-present [:fingerprint :type :type/Number] round-fingerprint-fields [:min :max :avg])
-      (update-in-if-present [:fingerprint :type :type/Text] round-fingerprint-fields [:percent-json :percent-url :percent-email :average-length])))
-
-(defn round-fingerprint-cols [query-results]
-  (let [maybe-data-cols (if (contains? query-results :data)
-                          [:data :cols]
-                          [:cols])]
-    (update-in query-results maybe-data-cols #(map round-fingerprint %))))
+      (update-in-if-present [:fingerprint :type :type/Number] round-fingerprint-fields 2 [:min :max :avg :sd])
+      ;; quartal estimation is order dependent and the ordering is not stable across different DB engines, hence more aggressive trimming
+      (update-in-if-present [:fingerprint :type :type/Number] round-fingerprint-fields 0 [:q1 :q3])
+      (update-in-if-present [:fingerprint :type :type/Text] round-fingerprint-fields 2 [:percent-json :percent-url :percent-email :average-length])))
+
+(defn round-fingerprint-cols
+  ([query-results]
+   (if (map? query-results)
+     (let [maybe-data-cols (if (contains? query-results :data)
+                             [:data :cols]
+                             [:cols])]
+       (round-fingerprint-cols maybe-data-cols query-results))
+     (map round-fingerprint query-results)))
+  ([k query-results]
+   (update-in query-results k #(map round-fingerprint %))))
+
+(defn postwalk-pred
+  "Transform `form` by applying `f` to each node where `pred` returns true"
+  [pred f form]
+  (walk/postwalk (fn [node]
+                   (if (pred node)
+                     (f node)
+                     node))
+                 form))
 
 (defn round-all-decimals
   "Uses `walk/postwalk` to crawl `data`, looking for any double values, will round any it finds"
   [decimal-place data]
-  (qputil/postwalk-pred double?
-                        #(u/round-to-decimals decimal-place %)
-                        data))
+  (postwalk-pred double?
+                 #(u/round-to-decimals decimal-place %)
+                 data))
 
 
 ;;; +----------------------------------------------------------------------------------------------------------------+
@@ -646,3 +658,30 @@
   admin has removed them."
   [& body]
   `(do-with-non-admin-groups-no-root-collection-perms (fn [] ~@body)))
+
+
+(defn doall-recursive
+  "Like `doall`, but recursively calls doall on map values and nested sequences, giving you a fully non-lazy object.
+  Useful for tests when you need the entire object to be realized in the body of a `binding`, `with-redefs`, or
+  `with-temp` form."
+  [x]
+  (cond
+    (map? x)
+    (into {} (for [[k v] (doall x)]
+               [k (doall-recursive v)]))
+
+    (sequential? x)
+    (mapv doall-recursive (doall x))
+
+    :else
+    x))
+
+(defmacro exception-and-message
+  "Invokes `body`, catches the exception and returns a map with the exception class, message and data"
+  [& body]
+  `(try
+     ~@body
+     (catch Exception e#
+       {:ex-class (class e#)
+        :msg      (.getMessage e#)
+        :data     (ex-data e#)})))
diff --git a/test/metabase/test/util/log.clj b/test/metabase/test/util/log.clj
new file mode 100644
index 0000000000000000000000000000000000000000..fb00572c9b65208583dc57553dd4cc6a334e0c18
--- /dev/null
+++ b/test/metabase/test/util/log.clj
@@ -0,0 +1,44 @@
+(ns metabase.test.util.log
+  "Utils for controlling the logging that goes on when running tests."
+  (:require [clojure.tools.logging :as log]
+            [metabase.util :as u])
+  (:import [org.apache.log4j Level Logger LogManager]
+           [org.apache.commons.io.output NullOutputStream NullWriter]
+           java.io.PrintStream))
+
+(defn do-with-suppressed-output
+  "Impl for `suppress-output` macro; don't use this directly."
+  [f]
+  ;; yes, swapping out *out*/*err*, swapping out System.out/System.err, and setting all the log levels to OFF is
+  ;; really necessary to suppress everyting (!)
+  (let [orig-out         (System/out)
+        orig-err         (System/err)
+        null-stream      (PrintStream. (NullOutputStream.))
+        null-writer      (NullWriter.)
+        loggers          (cons
+                          (Logger/getRootLogger)
+                          (enumeration-seq (LogManager/getCurrentLoggers)))
+        logger+old-level (vec (for [^Logger logger loggers]
+                                [logger (.getLevel logger)]))]
+    (try
+      (System/setOut null-stream)
+      (System/setErr null-stream)
+      (doseq [^Logger logger loggers]
+        (.setLevel logger Level/OFF))
+      (binding [*out* null-writer
+                *err* null-writer]
+        (f))
+      (finally
+        (System/setOut orig-out)
+        (System/setErr orig-err)
+        (.close null-stream)            ; not 100% sure this is necessary
+        (.close null-writer)
+        (doseq [[^Logger logger, ^Level old-level] logger+old-level]
+          (.setLevel logger old-level))))))
+
+(defmacro suppress-output
+  "Execute `body` with all logging/`*out*`/`*err*` messages suppressed. Useful for avoiding cluttering up test output
+  for tests with stacktraces and error messages from tests that are supposed to fail."
+  {:style/indent 0}
+  [& body]
+  `(do-with-suppressed-output (fn [] ~@body)))
diff --git a/test/metabase/test/util_test.clj b/test/metabase/test/util_test.clj
new file mode 100644
index 0000000000000000000000000000000000000000..ae97bcf7f99a5f72dbe01b16ad5ff9579319b2e0
--- /dev/null
+++ b/test/metabase/test/util_test.clj
@@ -0,0 +1,25 @@
+(ns metabase.test.util-test
+  "Tests for the test utils!"
+  (:require [expectations :refer :all]
+            [metabase.models.field :refer [Field]]
+            [metabase.test
+             [data :as data]
+             [util :as tu]]
+            [metabase.util :as u]
+            [toucan.db :as db]))
+
+;; let's make sure this acutally works right!
+(expect
+  [-1 0]
+  (let [position #(db/select-one-field :position Field :id (data/id :venues :price))]
+    [(tu/with-temp-vals-in-db Field (data/id :venues :price) {:position -1}
+       (position))
+     (position)]))
+
+(expect
+  0
+  (do
+    (u/ignore-exceptions
+      (tu/with-temp-vals-in-db Field (data/id :venues :price) {:position -1}
+        (throw (Exception.))))
+    (db/select-one-field :position Field :id (data/id :venues :price))))
diff --git a/test/metabase/timeseries_query_processor_test.clj b/test/metabase/timeseries_query_processor_test.clj
index f285220be8ff2e36bc7e74ec5299577666225f0d..bca7ee71d65f9631a03957943cecf7a74e399cf2 100644
--- a/test/metabase/timeseries_query_processor_test.clj
+++ b/test/metabase/timeseries_query_processor_test.clj
@@ -19,7 +19,6 @@
 ;;; "bare rows" query, limit
 (expect-with-timeseries-dbs
   {:columns ["id"
-             "timestamp"
              "count"
              "user_last_login"
              "user_name"
@@ -27,16 +26,16 @@
              "venue_latitude"
              "venue_longitude"
              "venue_name"
-             "venue_price"]
-   :rows [["931", "2013-01-03T08:00:00.000Z", 1, "2014-01-01T08:30:00.000Z", "Simcha Yan", "Thai", "34.094",  "-118.344", "Kinaree Thai Bistro",       "1"]
-          ["285", "2013-01-10T08:00:00.000Z", 1, "2014-07-03T01:30:00.000Z", "Kfir Caj",   "Thai", "34.1021", "-118.306", "Ruen Pair Thai Restaurant", "2"]]}
+             "venue_price"
+             "timestamp"]
+   :rows [["931", 1, "2014-01-01T08:30:00.000Z", "Simcha Yan", "Thai", "34.094",  "-118.344", "Kinaree Thai Bistro",       "1", "2013-01-03T08:00:00.000Z"]
+          ["285", 1, "2014-07-03T01:30:00.000Z", "Kfir Caj",   "Thai", "34.1021", "-118.306", "Ruen Pair Thai Restaurant", "2", "2013-01-10T08:00:00.000Z"]]}
   (data (data/run-mbql-query checkins
           {:limit 2})))
 
 ;;; "bare rows" query, limit, order-by timestamp desc
 (expect-with-timeseries-dbs
   {:columns ["id"
-             "timestamp"
              "count"
              "user_last_login"
              "user_name"
@@ -44,9 +43,10 @@
              "venue_latitude"
              "venue_longitude"
              "venue_name"
-             "venue_price"]
-   :rows    [["693", "2015-12-29T08:00:00.000Z", 1, "2014-07-03T19:30:00.000Z", "Frans Hevel", "Mexican", "34.0489", "-118.238", "Señor Fish",       "2"]
-             ["570", "2015-12-26T08:00:00.000Z", 1, "2014-07-03T01:30:00.000Z", "Kfir Caj",    "Chinese", "37.7949", "-122.406", "Empress of China", "3"]]}
+             "venue_price"
+             "timestamp"]
+   :rows    [["693", 1, "2014-07-03T19:30:00.000Z", "Frans Hevel", "Mexican", "34.0489", "-118.238", "Señor Fish",       "2", "2015-12-29T08:00:00.000Z"]
+             ["570", 1, "2014-07-03T01:30:00.000Z", "Kfir Caj",    "Chinese", "37.7949", "-122.406", "Empress of China", "3", "2015-12-26T08:00:00.000Z"]]}
   (data (data/run-mbql-query checkins
           {:order-by [[:desc $timestamp]]
            :limit    2})))
@@ -54,7 +54,6 @@
 ;;; "bare rows" query, limit, order-by timestamp asc
 (expect-with-timeseries-dbs
   {:columns ["id"
-             "timestamp"
              "count"
              "user_last_login"
              "user_name"
@@ -62,9 +61,10 @@
              "venue_latitude"
              "venue_longitude"
              "venue_name"
-             "venue_price"]
-   :rows    [["931", "2013-01-03T08:00:00.000Z", 1, "2014-01-01T08:30:00.000Z", "Simcha Yan", "Thai", "34.094",  "-118.344", "Kinaree Thai Bistro",       "1"]
-             ["285", "2013-01-10T08:00:00.000Z", 1, "2014-07-03T01:30:00.000Z", "Kfir Caj",   "Thai", "34.1021", "-118.306", "Ruen Pair Thai Restaurant", "2"]]}
+             "venue_price"
+             "timestamp"]
+   :rows    [["931", 1, "2014-01-01T08:30:00.000Z", "Simcha Yan", "Thai", "34.094",  "-118.344", "Kinaree Thai Bistro",       "1", "2013-01-03T08:00:00.000Z"]
+             ["285", 1, "2014-07-03T01:30:00.000Z", "Kfir Caj",   "Thai", "34.1021", "-118.306", "Ruen Pair Thai Restaurant", "2", "2013-01-10T08:00:00.000Z"]]}
   (data (data/run-mbql-query checkins
           {:order-by [[:asc $timestamp]]
            :limit    2})))
@@ -77,7 +77,7 @@
    :rows    [["Kinaree Thai Bistro"       "Thai" "2013-01-03T08:00:00.000Z"]
              ["Ruen Pair Thai Restaurant" "Thai" "2013-01-10T08:00:00.000Z"]]}
   (data (data/run-mbql-query checkins
-          {:fields [$venue_name $venue_category_name]
+          {:fields [$venue_name $venue_category_name $timestamp]
            :limit  2})))
 
 ;;; fields clause, order by timestamp asc
@@ -86,7 +86,7 @@
    :rows    [["Kinaree Thai Bistro"       "Thai" "2013-01-03T08:00:00.000Z"]
              ["Ruen Pair Thai Restaurant" "Thai" "2013-01-10T08:00:00.000Z"]]}
   (data (data/run-mbql-query checkins
-          {:fields   [$venue_name $venue_category_name]
+          {:fields   [$venue_name $venue_category_name $timestamp]
            :order-by [[:asc $timestamp]]
            :limit    2})))
 
@@ -96,7 +96,7 @@
    :rows    [["Señor Fish"       "Mexican" "2015-12-29T08:00:00.000Z"]
              ["Empress of China" "Chinese" "2015-12-26T08:00:00.000Z"]]}
   (data (data/run-mbql-query checkins
-          {:fields   [$venue_name $venue_category_name]
+          {:fields   [$venue_name $venue_category_name $timestamp]
            :order-by [[:desc $timestamp]]
            :limit    2})))
 
@@ -301,7 +301,7 @@
              ["Plato Yeshua" "The Daily Pint" "Bar"      "2013-07-25T07:00:00.000Z"]
              ["Plato Yeshua" "Marlowe"        "American" "2013-09-10T07:00:00.000Z"]]}
   (data (data/run-mbql-query checkins
-          {:fields [$user_name $venue_name $venue_category_name]
+          {:fields [$user_name $venue_name $venue_category_name $timestamp]
            :filter [:= $user_name "Plato Yeshua"]
            :limit  5})))
 
@@ -318,7 +318,7 @@
   {:columns ["user_name" "venue_name" "timestamp"]
    :rows    [["Plato Yeshua" "The Daily Pint" "2013-07-25T07:00:00.000Z"]]}
   (data (data/run-mbql-query checkins
-          {:fields [$user_name $venue_name]
+          {:fields [$user_name $venue_name $timestamp]
            :filter [:and
                     [:= $venue_category_name "Bar"]
                     [:= $user_name "Plato Yeshua"]]})))
@@ -632,11 +632,11 @@
 ;; https://github.com/metabase/metabase/issues/5969.
 (expect-with-timeseries-dbs
   {:columns ["timestamp" "count"]
-   :rows    [["2013-01-01T00:00:00.000Z" 8]
-             ["2013-02-01T00:00:00.000Z" 11]
-             ["2013-03-01T00:00:00.000Z" 21]
-             ["2013-04-01T00:00:00.000Z" 26]
-             ["2013-05-01T00:00:00.000Z" 23]]}
+   :rows    [["2013-01-01" 8]
+             ["2013-02-01" 11]
+             ["2013-03-01" 21]
+             ["2013-04-01" 26]
+             ["2013-05-01" 23]]}
   (-> (data/run-mbql-query checkins
         {:aggregation [[:count]]
          :breakout    [[:datetime-field $timestamp :month]]})
diff --git a/test/metabase/util_test.clj b/test/metabase/util_test.clj
index b0d527863f26b94e41250b3785ac80f5d4c21599..5a278d0aea3d19e0e3bef6fc28646f23408ab94c 100644
--- a/test/metabase/util_test.clj
+++ b/test/metabase/util_test.clj
@@ -201,3 +201,7 @@
 ;; make sure we can parse wacky version strings like `9-internal`: See #8282
 (expect
   (is-java-9-or-higher? "9-internal"))
+
+(expect
+  {:num_cans 2, :lisp_case? {:nested_maps? true}}
+  (snake-keys {:num-cans 2, :lisp-case? {:nested-maps? true}}))
diff --git a/test_resources/eastwood-config.clj b/test_resources/eastwood-config.clj
new file mode 100644
index 0000000000000000000000000000000000000000..c0cb617ab3f25cea6eacbdc296775a57d3189fd5
--- /dev/null
+++ b/test_resources/eastwood-config.clj
@@ -0,0 +1,7 @@
+(disable-warning
+ {:linter :suspicious-expression
+  :for-macro 'clojure.core/and
+  :if-inside-macroexpansion-of #{'clojure.core.match/match}
+  :within-depth 50
+  :reason (str "By default, eastwood only allows a depth of up to 13 when ignoring single-arg `and` "
+               "in `core/match` macro expansions; some of our `mbql.u/match` macros exceed that depth.")})
diff --git a/test_resources/slack_channels_response.json b/test_resources/slack_channels_response.json
new file mode 100644
index 0000000000000000000000000000000000000000..b38aa245adecb5410be5fc0b30c708401411825b
--- /dev/null
+++ b/test_resources/slack_channels_response.json
@@ -0,0 +1,70 @@
+{
+    "ok": true,
+    "channels": [
+        {
+            "id": "C0G9QF9GW",
+            "name": "random",
+            "is_channel": true,
+            "created": 1449709280,
+            "creator": "U0G9QF9C6",
+            "is_archived": false,
+            "is_general": false,
+            "name_normalized": "random",
+            "is_shared": false,
+            "is_org_shared": false,
+            "is_member": true,
+            "is_private": false,
+            "is_mpim": false,
+            "members": [
+                "U0G9QF9C6",
+                "U0G9WFXNZ"
+            ],
+            "topic": {
+                "value": "Other stuff",
+                "creator": "U0G9QF9C6",
+                "last_set": 1449709352
+            },
+            "purpose": {
+                "value": "A place for non-work-related flimflam, faffing, hodge-podge or jibber-jabber you'd prefer to keep out of more focused work-related channels.",
+                "creator": "",
+                "last_set": 0
+            },
+            "previous_names": [],
+            "num_members": 2
+        },
+        {
+            "id": "C0G9QKBBL",
+            "name": "general",
+            "is_channel": true,
+            "created": 1449709280,
+            "creator": "U0G9QF9C6",
+            "is_archived": false,
+            "is_general": true,
+            "name_normalized": "general",
+            "is_shared": false,
+            "is_org_shared": false,
+            "is_member": true,
+            "is_private": false,
+            "is_mpim": false,
+            "members": [
+                "U0G9QF9C6",
+                "U0G9WFXNZ"
+            ],
+            "topic": {
+                "value": "Talk about anything!",
+                "creator": "U0G9QF9C6",
+                "last_set": 1449709364
+            },
+            "purpose": {
+                "value": "To talk about anything!",
+                "creator": "U0G9QF9C6",
+                "last_set": 1449709334
+            },
+            "previous_names": [],
+            "num_members": 2
+        }
+    ],
+    "response_metadata": {
+        "next_cursor": "dGVhbTpDMUg5UkVTR0w="
+    }
+}
diff --git a/test_resources/slack_users_response.json b/test_resources/slack_users_response.json
new file mode 100644
index 0000000000000000000000000000000000000000..b91a795278dd46cad98294c79248eed426056724
--- /dev/null
+++ b/test_resources/slack_users_response.json
@@ -0,0 +1,86 @@
+{
+    "ok": true,
+    "members": [
+        {
+            "id": "W012A3CDE",
+            "team_id": "T012AB3C4",
+            "name": "spengler",
+            "deleted": false,
+            "color": "9f69e7",
+            "real_name": "spengler",
+            "tz": "America/Los_Angeles",
+            "tz_label": "Pacific Daylight Time",
+            "tz_offset": -25200,
+            "profile": {
+                "avatar_hash": "ge3b51ca72de",
+                "status_text": "Print is dead",
+                "status_emoji": ":books:",
+                "real_name": "Egon Spengler",
+                "display_name": "spengler",
+                "real_name_normalized": "Egon Spengler",
+                "display_name_normalized": "spengler",
+                "email": "spengler@ghostbusters.example.com",
+                "image_24": "https://.../avatar/e3b51ca72dee4ef87916ae2b9240df50.jpg",
+                "image_32": "https://.../avatar/e3b51ca72dee4ef87916ae2b9240df50.jpg",
+                "image_48": "https://.../avatar/e3b51ca72dee4ef87916ae2b9240df50.jpg",
+                "image_72": "https://.../avatar/e3b51ca72dee4ef87916ae2b9240df50.jpg",
+                "image_192": "https://.../avatar/e3b51ca72dee4ef87916ae2b9240df50.jpg",
+                "image_512": "https://.../avatar/e3b51ca72dee4ef87916ae2b9240df50.jpg",
+                "team": "T012AB3C4"
+            },
+            "is_admin": true,
+            "is_owner": false,
+            "is_primary_owner": false,
+            "is_restricted": false,
+            "is_ultra_restricted": false,
+            "is_bot": false,
+            "updated": 1502138686,
+            "is_app_user": false,
+            "has_2fa": false
+        },
+        {
+            "id": "W07QCRPA4",
+            "team_id": "T0G9PQBBK",
+            "name": "glinda",
+            "deleted": false,
+            "color": "9f69e7",
+            "real_name": "Glinda Southgood",
+            "tz": "America/Los_Angeles",
+            "tz_label": "Pacific Daylight Time",
+            "tz_offset": -25200,
+            "profile": {
+                "avatar_hash": "8fbdd10b41c6",
+                "image_24": "https://a.slack-edge.com...png",
+                "image_32": "https://a.slack-edge.com...png",
+                "image_48": "https://a.slack-edge.com...png",
+                "image_72": "https://a.slack-edge.com...png",
+                "image_192": "https://a.slack-edge.com...png",
+                "image_512": "https://a.slack-edge.com...png",
+                "image_1024": "https://a.slack-edge.com...png",
+                "image_original": "https://a.slack-edge.com...png",
+                "first_name": "Glinda",
+                "last_name": "Southgood",
+                "title": "Glinda the Good",
+                "phone": "",
+                "skype": "",
+                "real_name": "Glinda Southgood",
+                "real_name_normalized": "Glinda Southgood",
+                "display_name": "Glinda the Fairly Good",
+                "display_name_normalized": "Glinda the Fairly Good",
+                "email": "glenda@south.oz.coven"
+            },
+            "is_admin": true,
+            "is_owner": false,
+            "is_primary_owner": false,
+            "is_restricted": false,
+            "is_ultra_restricted": false,
+            "is_bot": false,
+            "updated": 1480527098,
+            "has_2fa": false
+        }
+    ],
+    "cache_ts": 1498777272,
+    "response_metadata": {
+        "next_cursor": "dXNlcjpVMEc5V0ZYTlo="
+    }
+}
diff --git a/yarn.lock b/yarn.lock
index 6132985205478ea3100e74a9293fa2e4dd5a5e97..f9ead6746ad6e7820ab1cec06894c2081e6c723f 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -3713,6 +3713,11 @@ dom-serializer@0, dom-serializer@~0.1.0:
     domelementtype "~1.1.1"
     entities "~1.1.1"
 
+dom-walk@^0.1.0:
+  version "0.1.1"
+  resolved "https://registry.yarnpkg.com/dom-walk/-/dom-walk-0.1.1.tgz#672226dc74c8f799ad35307df936aba11acd6018"
+  integrity sha1-ZyIm3HTI95mtNTB9+TaroRrNYBg=
+
 domain-browser@^1.1.1:
   version "1.1.7"
   resolved "https://registry.yarnpkg.com/domain-browser/-/domain-browser-1.1.7.tgz#867aa4b093faa05f1de08c06f4d7b21fdf8698bc"
@@ -5192,6 +5197,14 @@ global-dirs@^0.1.0:
   dependencies:
     ini "^1.3.4"
 
+global@^4.3.0:
+  version "4.3.2"
+  resolved "https://registry.yarnpkg.com/global/-/global-4.3.2.tgz#e76989268a6c74c38908b1305b10fc0e394e9d0f"
+  integrity sha1-52mJJopsdMOJCLEwWxD8DjlOnQ8=
+  dependencies:
+    min-document "^2.19.0"
+    process "~0.5.1"
+
 globals-docs@^2.3.0:
   version "2.4.0"
   resolved "https://registry.yarnpkg.com/globals-docs/-/globals-docs-2.4.0.tgz#f2c647544eb6161c7c38452808e16e693c2dafbb"
@@ -8075,6 +8088,13 @@ mimic-fn@^1.0.0:
   resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-1.1.0.tgz#e667783d92e89dbd342818b5230b9d62a672ad18"
   integrity sha1-5md4PZLonb00KBi1IwudYqZyrRg=
 
+min-document@^2.19.0:
+  version "2.19.0"
+  resolved "https://registry.yarnpkg.com/min-document/-/min-document-2.19.0.tgz#7bd282e3f5842ed295bb748cdd9f1ffa2c824685"
+  integrity sha1-e9KC4/WELtKVu3SM3Z8f+iyCRoU=
+  dependencies:
+    dom-walk "^0.1.0"
+
 minimalistic-assert@^1.0.0:
   version "1.0.0"
   resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.0.tgz#702be2dda6b37f4836bcb3f5db56641b64a1d3d3"
@@ -10141,6 +10161,11 @@ process@^0.11.10:
   resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182"
   integrity sha1-czIwDoQBYb2j5podHZGn1LwW8YI=
 
+process@~0.5.1:
+  version "0.5.2"
+  resolved "https://registry.yarnpkg.com/process/-/process-0.5.2.tgz#1638d8a8e34c2f440a91db95ab9aeb677fc185cf"
+  integrity sha1-FjjYqONML0QKkduVq5rrZ3/Bhc8=
+
 progress@^1.1.8:
   version "1.1.8"
   resolved "https://registry.yarnpkg.com/progress/-/progress-1.1.8.tgz#e260c78f6161cdd9b0e56cc3e0a85de17c7a57be"
@@ -13773,6 +13798,14 @@ xdg-basedir@^3.0.0:
   resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-3.0.0.tgz#496b2cc109eca8dbacfe2dc72b603c17c5870ad4"
   integrity sha1-SWsswQnsqNus/i3HK2A8F8WHCtQ=
 
+xhr-mock@^2.4.1:
+  version "2.4.1"
+  resolved "https://registry.yarnpkg.com/xhr-mock/-/xhr-mock-2.4.1.tgz#cb502e3d50b8b2ec31bd61766ce516bfc1dd072f"
+  integrity sha1-y1AuPVC4suwxvWF2bOUWv8HdBy8=
+  dependencies:
+    global "^4.3.0"
+    url "^0.11.0"
+
 xml-char-classes@^1.0.0:
   version "1.0.0"
   resolved "https://registry.yarnpkg.com/xml-char-classes/-/xml-char-classes-1.0.0.tgz#64657848a20ffc5df583a42ad8a277b4512bbc4d"