diff --git a/.dir-locals.el b/.dir-locals.el
index f92aa1fe1d171dfdccb49dc855ae4d47bfb7f0c7..1f13d94e1d46a27a42febfe2c66d50620d5ddb61 100644
--- a/.dir-locals.el
+++ b/.dir-locals.el
@@ -36,7 +36,6 @@
                               (expect-with-engine 1)
                               (expect-with-engines 1)
                               (format-color 2)
-                              (if-questionable-timezone-support 0)
                               (ins 1)
                               (let-400 1)
                               (let-404 1)
diff --git a/OSX/Metabase/Metabase-Info.plist b/OSX/Metabase/Metabase-Info.plist
index 7b3bdaf6f174864b38b4d3f696899b61f48fcf0e..b8bc07a19759a73b4dc1db4949feb30ccad7ffc2 100644
--- a/OSX/Metabase/Metabase-Info.plist
+++ b/OSX/Metabase/Metabase-Info.plist
@@ -17,11 +17,11 @@
 	<key>CFBundlePackageType</key>
 	<string>APPL</string>
 	<key>CFBundleShortVersionString</key>
-	<string>0.12.1.991</string>
+	<string>0.13.0.0</string>
 	<key>CFBundleSignature</key>
 	<string>????</string>
 	<key>CFBundleVersion</key>
-	<string>0.12.1.991</string>
+	<string>0.13.0.0</string>
 	<key>LSApplicationCategoryType</key>
 	<string>public.app-category.utilities</string>
 	<key>LSMinimumSystemVersion</key>
diff --git a/bin/start b/bin/start
index da0a16451a534a140a7403d017a53598679dcf4b..d3cb9795c8c6cf9460baca533d7fcee8c6c27f88 100755
--- a/bin/start
+++ b/bin/start
@@ -2,6 +2,9 @@
 
 set -e
 
+# NOTE: The canonical source for this file is in the metabase/metabase repository.
+# Please update it there then copy to the metabase/metabase-deploy repository.
+
 # Translate various Heroku environment variables to Metabase equivalents
 
 if [ "$PORT" ]; then
@@ -73,5 +76,4 @@ if [ ! -z "$RDS_HOSTNAME" ]; then
     export MB_DB_PORT=$RDS_PORT
 fi
 
-
-exec java $JAVA_OPTS -jar ./target/uberjar/metabase.jar
+exec java -Dfile.encoding=UTF-8 $JAVA_OPTS -jar ./target/uberjar/metabase.jar
diff --git a/docs/administration-guide/02-setting-up-email.md b/docs/administration-guide/02-setting-up-email.md
index 0e33cecb78ab5fc99cf10ac4d0fca9096ce546c2..3b64d6758d09e19c9ed2e7f1fd6e3e284c4fa7ea 100644
--- a/docs/administration-guide/02-setting-up-email.md
+++ b/docs/administration-guide/02-setting-up-email.md
@@ -1,7 +1,7 @@
 
 ## Setting up Email
 
-Once you connect your database to Metabase, you'll want to configure an email account to send system notifications to your organization's users.  Metabase uses email to reset passwords, onboard new users, and notify you when something happens.  
+Once you connect your database to Metabase, you'll want to configure an email account to send system notifications to your organization's users.  Metabase uses email to reset passwords, onboard new users, and notify you when something happens.
 
 ### Configuring your Email Account
 
@@ -14,6 +14,7 @@ You should see this form:
 ![Email Credentials](images/EmailCredentials.png)
 
 **If you use Google Apps:**
+
 * In the **SMTP host** field, enter smtp.gmail.com
 * Fill in 465 for the **SMTP port** field
 * For the **SMTP Security** field, enter **SSL**
@@ -22,20 +23,23 @@ You should see this form:
 * Enter the email address you would like to be used as the sender of system notifications in the **From Address* field.
 
 **If you use Amazon SES:**
+
 * Log on to [https://console.aws.amazon.com/ses](https://console.aws.amazon.com/ses).
-* Click **SMTP Settings** from the navigation pane.  
+* Click **SMTP Settings** from the navigation pane.
 * Select **Create My SMTP Credentials** in the content pane.
-* Create a user in the **Create User for SMTP** dialog box and then click **Create**.  
+* Create a user in the **Create User for SMTP** dialog box and then click **Create**.
 * Next, select **Show User SMTP Credentials** to view the user's SMTP credentials.
 * Go back to the Metabase Admin Panel form and enter the info there.
 
 **If you use Mandrill:**
+
 * Log in to your Mandrill account and locate your credentials from the **SMTP & API Info** page there.
 * Your SMTP password is any active API key for your account — *not* your Mandrill password.
-* Although Mandrill lists **port 587**, [any port supported by Mandrill](https://mandrill.zendesk.com/hc/en-us/articles/205582167-What-SMTP-ports-can-I-use-) will work for SMTP email.  
+* Although Mandrill lists **port 587**, [any port supported by Mandrill](https://mandrill.zendesk.com/hc/en-us/articles/205582167-What-SMTP-ports-can-I-use-) will work for SMTP email.
 * Now you can go back to the Metabase Admin Panel form and enter the info there.
 
 **No matter which email provider you use,**
+
 * SSL is strongly recommended because it’s more secure and gives your account extra protection from threats.
 * If your email service has a whitelist of email addresses that are allowed to send email, be sure to whitelist the email address that you put in the **From Address** field to ensure you and your teammates receive all emails from Metabase.
 
diff --git a/docs/administration-guide/start.md b/docs/administration-guide/start.md
index 9927492685d9dc52d4b568e2ed4c41bdca5aa186..1b02872bae9be59554a5d6e51035f14357dae20b 100644
--- a/docs/administration-guide/start.md
+++ b/docs/administration-guide/start.md
@@ -11,6 +11,6 @@ Are you in charge of managing Metabase for your organization? Then you're in the
 * [Configure settings](06-configuration-settings.md)
 * [Setting up Slack Integration](07-setting-up-slack.md)
 
-First things first, you'll need to install Metabase. If you haven’t done that yet, our [Installation Guide](../operations-guide/start.md#installing-metabase) will help you through the process.
+First things first, you'll need to install Metabase. If you haven’t done that yet, our [Installation Guide](../operations-guide/start.md#installing-and-running-metabase) will help you through the process.
 
 Already done with that? Then let’s start with going over [connecting Metabase to your database](01-managing-databases.md).
diff --git a/docs/developers-guide-osx.md b/docs/developers-guide-osx.md
index 97e75f04d5d80d41c89737e92daf7233db52cdb2..59fec7a2d58e96a03287726b45d336b8acce785e 100644
--- a/docs/developers-guide-osx.md
+++ b/docs/developers-guide-osx.md
@@ -8,6 +8,9 @@ You'll need to run the following commands before building the app:
 # Fetch and initialize git submodule
 git submodule update --init
 
+# Install libcurl (needed by WWW::Curl::Simple)
+brew install curl && brew link curl --force
+
 # Install Perl modules used by ./setup and ./release
 sudo cpan install File::Copy::Recursive JSON Readonly String::Util Text::Caml WWW::Curl::Simple
 
diff --git a/docs/operations-guide/running-metabase-on-elastic-beanstalk.md b/docs/operations-guide/running-metabase-on-elastic-beanstalk.md
index 9464f5cc4befa6fcc524efc34ded2bd9d15cfa78..5729d7042d801d47c5fd873ca842d5bad9e40f28 100644
--- a/docs/operations-guide/running-metabase-on-elastic-beanstalk.md
+++ b/docs/operations-guide/running-metabase-on-elastic-beanstalk.md
@@ -186,7 +186,7 @@ Here's each step:
 1. Go to Elastic Beanstalk and select your `Metabase` application
 * Click on `Application Versions` on the left nav (you can also choose `Application Versions` from the dropdown at the top of the page)
 * Download the latest Metabase Elastic Beanstalk deployment file
-  * https://s3.amazonaws.com/downloads.metabase.com/v0.13.1/aws-elastic-beanstalk.zip
+  * https://s3.amazonaws.com/downloads.metabase.com/v0.13.1/metabase-aws-eb.zip
 * Upload a new Application Version
 	* Click the `Upload` button on the upper right side of the listing
 		* Give the new version a name, ideally including the Metabase version number (e.g. v0.13.0)
diff --git a/docs/users-guide/start.md b/docs/users-guide/start.md
index 6201afd049902107179afd8f81d4007e02b51ff0..c441a231039f6eec0cb69717f0ce77abd689fe15 100644
--- a/docs/users-guide/start.md
+++ b/docs/users-guide/start.md
@@ -14,6 +14,8 @@
 
 > [Tips on creating useful dashboards](06-dashboard-tips.md)
 
-> [Some helpful tips on building your data model](07-data-model-reference.md)
+> [Using Pulses for daily emails](07-pulses.md)
 
-Let's get started with an overview of [What Metabase does](01-what-is-metabase.md).
\ No newline at end of file
+> [Some helpful tips on building your data model](08-data-model-reference.md)
+
+Let's get started with an overview of [What Metabase does](01-what-is-metabase.md).
diff --git a/src/metabase/db/metadata_queries.clj b/src/metabase/db/metadata_queries.clj
index 86fca4ded08685e7d33fa98c4edce502efb4093c..67fe649ee025a9919e7f43e98d490cdf0111336f 100644
--- a/src/metabase/db/metadata_queries.clj
+++ b/src/metabase/db/metadata_queries.clj
@@ -23,7 +23,7 @@
    :post [(integer? %)]}
   (-> (qp-query (:db_id table) {:source_table (:id table)
                                 :aggregation  ["count"]})
-      first first))
+      first first int))
 
 (defn field-distinct-values
   "Return the distinct values of FIELD.
@@ -38,10 +38,10 @@
   (-> (field-query field (merge {:aggregation ["distinct" field-id]}
                                 (when limit
                                   {:limit limit})))
-      first first))
+      first first int))
 
 (defn field-count
   "Return the count of FIELD."
   [{field-id :id :as field}]
   (-> (field-query field {:aggregation ["count" field-id]})
-      first first))
+      first first int))
diff --git a/src/metabase/driver.clj b/src/metabase/driver.clj
index c001c5a91d2aebea0f18b2a2cf7a80769f5a4526..045240e0924da9914d76a89b9c4e410b7f4dfccb 100644
--- a/src/metabase/driver.clj
+++ b/src/metabase/driver.clj
@@ -144,7 +144,7 @@
      operation itself is encapsulated as the lambda F, which must be called with no arguments.
 
        (defn sync-in-context [driver database f]
-         (with-jdbc-metadata [_ database]
+         (with-connection [_ database]
            (f)))")
 
   (table-fks ^java.util.Set [this, ^TableInstance table]
diff --git a/src/metabase/driver/generic_sql.clj b/src/metabase/driver/generic_sql.clj
index 383fc975f395e9152db4a0225c8cfa89ace3d786..cdeaa33c4787e48aed34399650ff1c6ce54ede3c 100644
--- a/src/metabase/driver/generic_sql.clj
+++ b/src/metabase/driver/generic_sql.clj
@@ -1,15 +1,20 @@
 (ns metabase.driver.generic-sql
-  (:require [clojure.java.jdbc :as jdbc]
+  (:require [clojure.core.memoize :as memoize]
+            [clojure.java.jdbc :as jdbc]
+            [clojure.set :as set]
             [clojure.tools.logging :as log]
-            [korma.core :as k]
+            (korma [core :as k]
+                   [db :as kdb])
             [korma.sql.utils :as utils]
             [metabase.driver :as driver]
-            [metabase.driver.generic-sql.util :refer :all]
             [metabase.models.field :as field]
             [metabase.util :as u])
   (:import java.util.Map
            clojure.lang.Keyword))
 
+(declare db->korma-db
+         korma-entity)
+
 (defprotocol ISQLDriver
   "Methods SQL-based drivers should implement in order to use `IDriverSQLDefaultsMixin`.
    Methods marked *OPTIONAL* have default implementations in `ISQLDriverDefaultsMixin`."
@@ -40,6 +45,9 @@
   (excluded-schemas ^java.util.Set [this]
     "*OPTIONAL*. Set of string names of schemas to skip syncing tables from.")
 
+  (get-connection-for-sync ^java.sql.Connection [this details]
+    "*OPTIONAL*. Get a connection used for a Sync step. By default, this returns a pooled connection.")
+
   (set-timezone-sql ^String [this]
     "*OPTIONAL*. This should be a prepared JDBC SQL statement string to be used to set the timezone for the current transaction.
 
@@ -55,23 +63,53 @@
     "Return a korma form appropriate for converting a Unix timestamp integer field or value to an proper SQL `Timestamp`.
      SECONDS-OR-MILLISECONDS refers to the resolution of the int in question and with be either `:seconds` or `:milliseconds`."))
 
+(def ^{:arglists '([connection-spec])}
+  connection-spec->pooled-connection-spec
+  "Return a JDBC connection spec that includes a cp30 `ComboPooledDataSource`.
+   Theses connection pools are cached so we don't create multiple ones to the same DB.
+   Pools are destroyed after they aren't used for more than 3 hours."
+  (memoize/ttl
+   (fn [spec]
+     (log/debug (u/format-color 'magenta "Creating new connection pool..."))
+     (kdb/connection-pool (assoc spec :minimum-pool-size 1)))
+   :ttl/threshold (* 3 60 60 1000)))
+
+(defn db->jdbc-connection-spec
+  "Return a JDBC connection spec for DATABASE. Normally this will have a C3P0 pool as its datasource, unless the database is `short-lived`."
+  {:arglists '([database] [driver details])}
+  ;; TODO - I don't think short-lived? key is really needed anymore. It's only used by unit tests, and its original purpose was for creating temporary DBs;
+  ;; since we don't destroy databases at the end of each test anymore, it's probably time to remove this
+  ([{:keys [engine details]}]
+   (db->jdbc-connection-spec (driver/engine->driver engine) details))
+  ([driver {:keys [short-lived?], :as details}]
+   (let [connection-spec (connection-details->spec driver details)]
+     (if short-lived?
+       connection-spec
+       (connection-spec->pooled-connection-spec connection-spec)))))
+
+(def ^{:arglists '([database] [driver details])}
+  db->connection
+  "Return a [possibly pooled] connection to DATABASE. Make sure to close this when you're done! (e.g. by using `with-open`)"
+  (comp jdbc/get-connection db->jdbc-connection-spec))
+
 
 (defn ISQLDriverDefaultsMixin
   "Default implementations for methods in `ISQLDriver`."
   []
   (require 'metabase.driver.generic-sql.query-processor)
-  {:apply-aggregation   (resolve 'metabase.driver.generic-sql.query-processor/apply-aggregation) ; don't resolve the vars yet so during interactive dev if the
-   :apply-breakout      (resolve 'metabase.driver.generic-sql.query-processor/apply-breakout)    ; underlying impl changes we won't have to reload all the drivers
-   :apply-fields        (resolve 'metabase.driver.generic-sql.query-processor/apply-fields)
-   :apply-filter        (resolve 'metabase.driver.generic-sql.query-processor/apply-filter)
-   :apply-join-tables   (resolve 'metabase.driver.generic-sql.query-processor/apply-join-tables)
-   :apply-limit         (resolve 'metabase.driver.generic-sql.query-processor/apply-limit)
-   :apply-order-by      (resolve 'metabase.driver.generic-sql.query-processor/apply-order-by)
-   :apply-page          (resolve 'metabase.driver.generic-sql.query-processor/apply-page)
-   :current-datetime-fn (constantly (k/sqlfn* :NOW))
-   :excluded-schemas    (constantly nil)
-   :set-timezone-sql    (constantly nil)
-   :stddev-fn           (constantly :STDDEV)})
+  {:apply-aggregation       (resolve 'metabase.driver.generic-sql.query-processor/apply-aggregation) ; don't resolve the vars yet so during interactive dev if the
+   :apply-breakout          (resolve 'metabase.driver.generic-sql.query-processor/apply-breakout) ; underlying impl changes we won't have to reload all the drivers
+   :apply-fields            (resolve 'metabase.driver.generic-sql.query-processor/apply-fields)
+   :apply-filter            (resolve 'metabase.driver.generic-sql.query-processor/apply-filter)
+   :apply-join-tables       (resolve 'metabase.driver.generic-sql.query-processor/apply-join-tables)
+   :apply-limit             (resolve 'metabase.driver.generic-sql.query-processor/apply-limit)
+   :apply-order-by          (resolve 'metabase.driver.generic-sql.query-processor/apply-order-by)
+   :apply-page              (resolve 'metabase.driver.generic-sql.query-processor/apply-page)
+   :current-datetime-fn     (constantly (k/sqlfn* :NOW))
+   :get-connection-for-sync db->connection
+   :excluded-schemas        (constantly nil)
+   :set-timezone-sql        (constantly nil)
+   :stddev-fn               (constantly :STDDEV)})
 
 
 (defn- can-connect? [driver details]
@@ -81,26 +119,58 @@
              vals
              first))))
 
-(defn- sync-in-context [_ database do-sync-fn]
-  (with-jdbc-metadata [_ database]
-    (do-sync-fn)))
+(defmacro with-metadata
+  "Execute BODY with `java.sql.DatabaseMetaData` for DATABASE."
+  [[binding driver database] & body]
+  `(with-open [^java.sql.Connection conn# (get-connection-for-sync ~driver (:details ~database))]
+     (let [~binding (.getMetaData conn#)]
+       ~@body)))
 
-(defn- active-tables [driver database]
-  (with-jdbc-metadata [^java.sql.DatabaseMetaData md database]
+
+(defn fast-active-tables
+  "Default, fast implementation of `IDriver/active-tables` best suited for DBs with lots of system tables (like Oracle).
+   Fetch list of schemas, then for each one not in `excluded-schemas`, fetch its Tables, and combine the results.
+
+   This is as much as 15x faster for Databases with lots of system tables than `post-filtered-active-tables` (4 seconds vs 60)."
+  [driver database]
+  (with-metadata [md driver database]
+    (let [all-schemas (set (map :table_schem (jdbc/result-set-seq (.getSchemas md))))
+          schemas     (set/difference all-schemas (excluded-schemas driver))]
+      (set (for [schema     schemas
+                 table-name (mapv :table_name (jdbc/result-set-seq (.getTables md nil schema nil (into-array String ["TABLE", "VIEW"]))))]
+             {:name   table-name
+              :schema schema})))))
+
+(defn post-filtered-active-tables
+  "Alternative implementation of `IDriver/active-tables` best suited for DBs with little or no support for schemas.
+   Fetch *all* Tables, then filter out ones whose schema is in `excluded-schemas` Clojure-side."
+  [driver database]
+  (with-metadata [md driver database]
     (set (for [table (filter #(not (contains? (excluded-schemas driver) (:table_schem %)))
                              (jdbc/result-set-seq (.getTables md nil nil nil (into-array String ["TABLE", "VIEW"]))))]
            {:name   (:table_name table)
             :schema (:table_schem table)}))))
 
+
 (defn- active-column-names->type [driver table]
-  (with-jdbc-metadata [^java.sql.DatabaseMetaData md @(:db table)]
+  (with-metadata [md driver @(:db table)]
     (into {} (for [{:keys [column_name type_name]} (jdbc/result-set-seq (.getColumns md nil (:schema table) (:name table) nil))]
                {column_name (or (column->base-type driver (keyword type_name))
                                 (do (log/warn (format "Don't know how to map column type '%s' to a Field base_type, falling back to :UnknownField." type_name))
                                     :UnknownField))}))))
 
-(defn- table-pks [_ table]
-  (with-jdbc-metadata [^java.sql.DatabaseMetaData md @(:db table)]
+(defn pattern-based-column->base-type
+  "Return a `column->base-type` function that matches types based on a sequence of pattern / base-type pairs."
+  [pattern->type]
+  (fn [_ column-type]
+    (let [column-type (name column-type)]
+      (loop [[[pattern base-type] & more] pattern->type]
+        (cond
+          (re-find pattern column-type) base-type
+          (seq more)                    (recur more))))))
+
+(defn- table-pks [driver table]
+  (with-metadata [md driver @(:db table)]
     (->> (.getPrimaryKeys md nil nil (:name table))
          jdbc/result-set-seq
          (map :column_name)
@@ -150,8 +220,8 @@
                 (k/database (db->korma-db database)))))
 
 
-(defn- table-fks [_ table]
-  (with-jdbc-metadata [^java.sql.DatabaseMetaData md @(:db table)]
+(defn- table-fks [driver table]
+  (with-metadata [md driver @(:db table)]
     (->> (.getImportedKeys md nil nil (:name table))
          jdbc/result-set-seq
          (map (fn [result]
@@ -195,7 +265,7 @@
            'metabase.driver.generic-sql.query-processor)
   (merge driver/IDriverDefaultsMixin
          {:active-column-names->type active-column-names->type
-          :active-tables             active-tables
+          :active-tables             fast-active-tables
           :can-connect?              can-connect?
           :features                  features
           :field-avg-length          field-avg-length
@@ -203,7 +273,44 @@
           :field-values-lazy-seq     field-values-lazy-seq
           :process-native            (resolve 'metabase.driver.generic-sql.native/process-and-run)
           :process-structured        (resolve 'metabase.driver.generic-sql.query-processor/process-structured)
-          :sync-in-context           sync-in-context
           :table-fks                 table-fks
           :table-pks                 table-pks
           :table-rows-seq            table-rows-seq}))
+
+
+
+;;; ### Util Fns
+
+(defn db->korma-db
+  "Return a Korma DB spec for Metabase DATABASE."
+  ([database]
+   (db->korma-db (driver/engine->driver (:engine database)) (:details database)))
+  ([driver details]
+   {:pool    (db->jdbc-connection-spec driver details)
+    :options (korma.config/extract-options (connection-details->spec driver details))}))
+
+(defn- table->qualified-name [{schema :schema, table-name :name}]
+  (if (seq schema)
+    (str schema \. table-name)
+    table-name))
+
+(defn korma-entity
+  "Return a Korma entity for [DB and] TABLE.
+
+    (-> (sel :one Table :id 100)
+        korma-entity
+        (select (aggregate (count :*) :count)))"
+  ([table]
+   {:pre [(delay? (:db table))]}
+   (korma-entity @(:db table) table))
+
+  ([db table]
+   {:pre [(map? db)]}
+   {:table (table->qualified-name table)
+    :pk    :id
+    :db    (db->korma-db db)})
+
+  ([driver details table]
+   {:table (table->qualified-name table)
+    :pk    :id
+    :db    (db->korma-db driver details)}))
diff --git a/src/metabase/driver/generic_sql/native.clj b/src/metabase/driver/generic_sql/native.clj
index b4c33fb641e87dcd2424bc1842195e479b9f087c..46c60449c70d425a24e0b3bf02d7feb03c4ccc51 100644
--- a/src/metabase/driver/generic_sql/native.clj
+++ b/src/metabase/driver/generic_sql/native.clj
@@ -7,7 +7,6 @@
             [metabase.db :refer [sel]]
             [metabase.driver :as driver]
             [metabase.driver.generic-sql :as sql]
-            [metabase.driver.generic-sql.util :refer :all]
             [metabase.models.database :refer [Database]]
             [metabase.util :as u]))
 
@@ -20,9 +19,7 @@
   "Process and run a native (raw SQL) QUERY."
   [driver {{sql :query} :native, database-id :database, :as query}]
   (try (let [database (sel :one :fields [Database :engine :details] :id database-id)
-             db-conn  (-> database
-                          db->korma-db
-                          korma.db/get-connection)]
+             db-conn  (sql/db->jdbc-connection-spec database)]
 
          (jdbc/with-db-transaction [t-conn db-conn]
            (let [^java.sql.Connection jdbc-connection (:connection t-conn)]
diff --git a/src/metabase/driver/generic_sql/query_processor.clj b/src/metabase/driver/generic_sql/query_processor.clj
index b1e869daf0ab5e58dd8591e8af83006efdefc30c..e89c3043697edd51fdbf385648077b0817a3a496 100644
--- a/src/metabase/driver/generic_sql/query_processor.clj
+++ b/src/metabase/driver/generic_sql/query_processor.clj
@@ -11,7 +11,6 @@
             [metabase.config :as config]
             [metabase.driver :as driver]
             [metabase.driver.generic-sql :as sql]
-            [metabase.driver.generic-sql.util :refer :all]
             [metabase.driver.query-processor :as qp]
             [metabase.util :as u])
   (:import java.sql.Timestamp
@@ -195,8 +194,8 @@
   [korma-form]
   (when (config/config-bool :mb-db-logging)
     (when-not qp/*disable-qp-logging*
-      (log/debug
-       (u/format-color 'green "\nKORMA FORM: 😋\n%s" (u/pprint-to-str (dissoc korma-form :db :ent :from :options :aliases :results :type :alias))))
+      ;; (log/debug
+      ;;  (u/format-color 'green "\nKORMA FORM: 😋\n%s" (u/pprint-to-str (dissoc korma-form :db :ent :from :options :aliases :results :type :alias))))
       (try
         (log/debug
          (u/format-color 'blue "\nSQL: 😈\n%s\n" (-> (k/as-sql korma-form)
@@ -206,8 +205,8 @@
                                                       (s/replace #"\sGROUP BY" "\nGROUP BY")
                                                       (s/replace #"\sORDER BY" "\nORDER BY")
                                                       (s/replace #"\sLIMIT" "\nLIMIT")
-                                                      (s/replace #"\sAND" "\n   AND")
-                                                      (s/replace #"\sOR" "\n    OR"))))
+                                                      (s/replace #"\sAND\s" "\n   AND ")
+                                                      (s/replace #"\sOR\s" "\n    OR "))))
         ;; (k/as-sql korma-form) will barf if the korma form is invalid
         (catch Throwable e
           (log/error (u/format-color 'red "Invalid korma form: %s" (.getMessage e))))))))
@@ -234,6 +233,7 @@
         korma-query))))
 
 (defn- do-with-timezone [driver f]
+  (log/debug (u/format-color 'blue (sql/set-timezone-sql driver)))
   (try (kdb/transaction (k/exec-raw [(sql/set-timezone-sql driver) [(driver/report-timezone)]])
                         (f))
        (catch Throwable e
@@ -257,7 +257,7 @@
   [driver {{:keys [source-table] :as query} :query, database :database, :as outer-query}]
   (let [set-timezone? (and (seq (driver/report-timezone))
                            (contains? (driver/features driver) :set-timezone))
-        entity        (korma-entity database source-table)
+        entity        ((resolve 'metabase.driver.generic-sql/korma-entity) database source-table)
         korma-query   (binding [*query* outer-query]
                         (apply-clauses driver (k/select* entity) query))
         f             (fn []
diff --git a/src/metabase/driver/generic_sql/util.clj b/src/metabase/driver/generic_sql/util.clj
deleted file mode 100644
index 040a9c63d6b0f13afe485026aef6841986099118..0000000000000000000000000000000000000000
--- a/src/metabase/driver/generic_sql/util.clj
+++ /dev/null
@@ -1,99 +0,0 @@
-(ns metabase.driver.generic-sql.util
-  "Shared functions for our generic-sql query processor."
-  (:require [clojure.java.jdbc :as jdbc]
-            [clojure.tools.logging :as log]
-            [colorize.core :as color]
-            (korma [core :as korma]
-                   [db :as kdb])
-            [korma.sql.utils :as utils]
-            [metabase.driver :as driver]
-            [metabase.driver.query-processor :as qp]))
-
-(defn db->connection-spec
-  "Return a JDBC connection spec for a Metabase `Database`."
-  [{{:keys [short-lived?]} :details, :as database}]
-  (let [driver (driver/engine->driver (:engine database))]
-    (assoc (@(resolve 'metabase.driver.generic-sql/connection-details->spec) driver (:details database))
-           ;; unless this is a temp DB, we need to make a pool or the connection will be closed before we get a chance to unCLOB-er the results during JSON serialization
-           ;; TODO - what will we do once we have CLOBS in temp DBs?
-           :make-pool? (not short-lived?))))
-
-(def ^{:arglists '([database])}
-  db->korma-db
-  "Return a Korma database definition for DATABASE.
-   Since Korma/C3PO seems to be bad about cleaning up its connection pools, this function is
-   memoized and will return an existing connection pool on subsequent calls."
-  (let [db->korma-db          (fn [database]
-                                (log/debug (color/red "Creating a new DB connection..."))
-                                (kdb/create-db (db->connection-spec database)))
-        memoized-db->korma-db (memoize db->korma-db)]
-    (fn [{{:keys [short-lived?]} :details, :as database}]
-      ;; Use un-memoized version of function for so-called "short-lived" databases (i.e. temporary ones that we won't create a connection pool for)
-      ((if short-lived?
-         db->korma-db
-         memoized-db->korma-db) (select-keys database [:engine :details]))))) ; only :engine and :details are needed for driver/connection so just pass those so memoization works as expected
-
-(def ^:dynamic ^java.sql.DatabaseMetaData *jdbc-metadata*
-  "JDBC metadata object for a database. This is set by `with-jdbc-metadata`."
-  nil)
-
-(defn -with-jdbc-metadata
-  "Internal implementation. Don't use this directly; use `with-jdbc-metadata`."
-  [database f]
-  (if *jdbc-metadata* (f *jdbc-metadata*)
-                      (jdbc/with-db-metadata [md (db->connection-spec database)]
-                        (binding [*jdbc-metadata* md]
-                          (f *jdbc-metadata*)))))
-
-(defmacro with-jdbc-metadata
-  "Execute BODY with the jdbc metadata for DATABASE bound to BINDING.
-   This will reuse `*jdbc-metadata*` if it's already set (to avoid opening extra connections).
-   Otherwise it will open a new metadata connection and bind `*jdbc-metadata*` so it can be reused by subsequent calls to `with-jdbc-metadata` within BODY.
-
-    (with-jdbc-metadata [^java.sql.DatabaseMetaData md (sel :one Database :id 1)] ; (1)
-      (-> (.getPrimaryKeys md nil nil nil)
-          jdbc/result-set-seq                                                     ; (2)
-          doall))                                                                 ; (3)
-
-   NOTES
-
-   1.  You should tag BINDING to avoid reflection.
-   2.  Use `jdbc/result-set-seq` to convert JDBC `ResultSet` into something we can use in Clojure
-   3.  Make sure to realize the lazy sequence within the BODY before connection is closed."
-  [[binding database] & body]
-  {:pre [(symbol? binding)]}
-  `(-with-jdbc-metadata ~database
-     (fn [~binding]
-       ~@body)))
-
-(defn korma-entity
-  "Return a Korma entity for [DB and] TABLE .
-
-    (-> (sel :one Table :id 100)
-        korma-entity
-        (select (aggregate (count :*) :count)))"
-  {:arglists '([table] [db table])}
-  ([{db-delay :db, :as table}]
-   {:pre [(delay? db-delay)]}
-   (korma-entity @db-delay table))
-  ([db {schema :schema, table-name :name}]
-   {:pre [(map? db)]}
-   {:table (if (seq schema)
-             (str schema \. table-name)
-             table-name)
-    :pk    :id
-    :db    (db->korma-db db)}))
-
-(defn funcs
-  "Convenience for writing nested `utils/func` forms.
-   The first argument is treated the same as with `utils/func`;
-   But when any arg is a vector we'll treat it as a recursive call to `funcs`.
-
-     (funcs \"CONCAT(%s)\" [\"YEAR(%s)\" x] y [\"MONTH(%s)\" z])
-       -> (utils/func \"CONCAT(%s)\" [(utils/func \"YEAR(%s)\" [x])
-                                      y
-                                      (utils/func \"MONTH(%s)\" [z])])"
-  [fn-format-str & args]
-  (utils/func fn-format-str (vec (for [arg args]
-                                   (if (vector? arg) (apply funcs arg)
-                                       arg)))))
diff --git a/src/metabase/driver/h2.clj b/src/metabase/driver/h2.clj
index 4e84b8c2637ea9b009f6e6c2b1fe2518df1569a5..7cc1bdb384323dff79d6d229ed5acf96e24d8e8d 100644
--- a/src/metabase/driver/h2.clj
+++ b/src/metabase/driver/h2.clj
@@ -1,12 +1,13 @@
 (ns metabase.driver.h2
   (:require [clojure.string :as s]
-            [korma.db :as kdb]
-            [korma.sql.utils :as utils]
+            (korma [core :as k]
+                   [db :as kdb])
+            [korma.sql.utils :as kutils]
             [metabase.db :as db]
             [metabase.driver :as driver]
             [metabase.driver.generic-sql :as sql]
-            [metabase.driver.generic-sql.util :refer [funcs]]
-            [metabase.models.database :refer [Database]]))
+            [metabase.models.database :refer [Database]]
+            [metabase.util.korma-extensions :as kx]))
 
 (defn- column->base-type [_ column-type]
   ({:ARRAY                       :UnknownField
@@ -106,11 +107,11 @@
               (update details :db connection-string-set-safe-options))))
 
 
-(defn- unix-timestamp->timestamp [_ field-or-value seconds-or-milliseconds]
-  (utils/func (format "TIMESTAMPADD('%s', %%s, TIMESTAMP '1970-01-01T00:00:00Z')" (case seconds-or-milliseconds
-                                                                                    :seconds      "SECOND"
-                                                                                    :milliseconds "MILLISECOND"))
-              [field-or-value]))
+(defn- unix-timestamp->timestamp [_ expr seconds-or-milliseconds]
+  (kutils/func (format "TIMESTAMPADD('%s', %%s, TIMESTAMP '1970-01-01T00:00:00Z')" (case seconds-or-milliseconds
+                                                                                     :seconds      "SECOND"
+                                                                                     :milliseconds "MILLISECOND"))
+               [expr]))
 
 
 (defn- process-query-in-context [_ qp]
@@ -132,51 +133,48 @@
 ;; H2 doesn't have date_trunc() we fake it by formatting a date to an appropriate string
 ;; and then converting back to a date.
 ;; Format strings are the same as those of SimpleDateFormat.
-(defn- trunc-with-format [format-str]
-  (format "PARSEDATETIME(FORMATDATETIME(%%s, '%s'), '%s')" format-str format-str))
-
-;; Rounding dates to quarters is a bit involved but still doable. Here's the plan:
-;; *  extract the year and quarter from the date;
-;; *  convert the quarter (1 - 4) to the corresponding starting month (1, 4, 7, or 10).
-;;    (do this by multiplying by 3, giving us [3 6 9 12]. Then subtract 2 to get [1 4 7 10])
-;; *  Concatenate the year and quarter start month together to create a yyyyMM date string;
-;; *  Parse the string as a date. :sunglasses:
-;;
-;; Postgres DATE_TRUNC('quarter', x)
-;; becomes  PARSEDATETIME(CONCAT(YEAR(x), ((QUARTER(x) * 3) - 2)), 'yyyyMM')
-(defn- trunc-to-quarter [field-or-value]
-  (funcs "PARSEDATETIME(%s, 'yyyyMM')"
-         ["CONCAT(%s)"
-          ["YEAR(%s)" field-or-value]
-          ["((QUARTER(%s) * 3) - 2)" field-or-value]]))
-
-(defn- date [_ unit field-or-value]
-  (if (= unit :quarter)
-    (trunc-to-quarter field-or-value)
-    (utils/func (case unit
-                  :default         "CAST(%s AS TIMESTAMP)"
-                  :minute          (trunc-with-format "yyyyMMddHHmm")
-                  :minute-of-hour  "MINUTE(%s)"
-                  :hour            (trunc-with-format "yyyyMMddHH")
-                  :hour-of-day     "HOUR(%s)"
-                  :day             "CAST(%s AS DATE)"
-                  :day-of-week     "DAY_OF_WEEK(%s)"
-                  :day-of-month    "DAY_OF_MONTH(%s)"
-                  :day-of-year     "DAY_OF_YEAR(%s)"
-                  :week            (trunc-with-format "yyyyww") ; ww = week of year
-                  :week-of-year    "WEEK(%s)"
-                  :month           (trunc-with-format "yyyyMM")
-                  :month-of-year   "MONTH(%s)"
-                  :quarter-of-year "QUARTER(%s)"
-                  :year            "YEAR(%s)")
-                [field-or-value])))
-
+(defn- format-datetime   [format-str expr] (k/sqlfn :FORMATDATETIME expr (kx/literal format-str)))
+(defn- parse-datetime    [format-str expr] (k/sqlfn :PARSEDATETIME expr  (kx/literal format-str)))
+(defn- trunc-with-format [format-str expr] (parse-datetime format-str (format-datetime format-str expr)))
+
+(defn- date [_ unit expr]
+  (case unit
+    :default         (kx/->timestamp expr)
+    :minute          (trunc-with-format "yyyyMMddHHmm" expr)
+    :minute-of-hour  (kx/minute expr)
+    :hour            (trunc-with-format "yyyyMMddHH" expr)
+    :hour-of-day     (kx/hour expr)
+    :day             (kx/->date expr)
+    :day-of-week     (k/sqlfn :DAY_OF_WEEK expr)
+    :day-of-month    (k/sqlfn :DAY_OF_MONTH expr)
+    :day-of-year     (k/sqlfn :DAY_OF_YEAR expr)
+    :week            (trunc-with-format "yyyyww" expr) ; ww = week of year
+    :week-of-year    (kx/week expr)
+    :month           (trunc-with-format "yyyyMM" expr)
+    :month-of-year   (kx/month expr)
+    ;; Rounding dates to quarters is a bit involved but still doable. Here's the plan:
+    ;; *  extract the year and quarter from the date;
+    ;; *  convert the quarter (1 - 4) to the corresponding starting month (1, 4, 7, or 10).
+    ;;    (do this by multiplying by 3, giving us [3 6 9 12]. Then subtract 2 to get [1 4 7 10])
+    ;; *  Concatenate the year and quarter start month together to create a yyyyMM date string;
+    ;; *  Parse the string as a date. :sunglasses:
+    ;;
+    ;; Postgres DATE_TRUNC('quarter', x)
+    ;; becomes  PARSEDATETIME(CONCAT(YEAR(x), ((QUARTER(x) * 3) - 2)), 'yyyyMM')
+    :quarter         (parse-datetime "yyyyMM"
+                                     (kx/concat (kx/year expr) (kx/- (kx/* (kx/quarter expr)
+                                                                           3)
+                                                                     2)))
+    :quarter-of-year (kx/quarter expr)
+    :year            (kx/year expr)))
+
+(def ^:private now (k/sqlfn :NOW))
 
 ;; TODO - maybe rename this relative-date ?
 (defn- date-interval [_ unit amount]
-  (utils/generated (if (= unit :quarter)
-                     (format "DATEADD('MONTH', (%d * 3), NOW())" amount)
-                     (format "DATEADD('%s', %d, NOW())" (s/upper-case (name unit)) amount))))
+  (if (= unit :quarter)
+    (recur nil :month (kx/* amount 3))
+    (k/sqlfn :DATEADD (kx/literal (s/upper-case (name unit))) amount now)))
 
 
 (defn- humanize-connection-error-message [_ message]
@@ -200,7 +198,8 @@
 (extend H2Driver
   driver/IDriver
   (merge (sql/IDriverSQLDefaultsMixin)
-         {:date-interval                     date-interval
+         {:active-tables                     sql/post-filtered-active-tables
+          :date-interval                     date-interval
           :details-fields                    (constantly [{:name         "db"
                                                            :display-name "Connection String"
                                                            :placeholder  "file:/Users/camsaul/bird_sightings/toucans;AUTO_SERVER=TRUE"
diff --git a/src/metabase/driver/mysql.clj b/src/metabase/driver/mysql.clj
index 10c9ae44e8b994e6feb0288803fa6549804153ee..af878beb05d7a2dff5645d79d36985c86d97afb8 100644
--- a/src/metabase/driver/mysql.clj
+++ b/src/metabase/driver/mysql.clj
@@ -5,11 +5,11 @@
                    [db :as kdb]
                    mysql)
             (korma.sql [engine :refer [sql-func]]
-                       [utils :as utils])
+                       [utils :as kutils])
             [metabase.driver :as driver]
             [metabase.driver.generic-sql :as sql]
-            [metabase.driver.generic-sql.util :refer [funcs]]
-            [metabase.util :as u]))
+            [metabase.util :as u]
+            [metabase.util.korma-extensions :as kx]))
 
 ;;; # Korma 0.4.2 Bug Workaround
 ;; (Buggy code @ https://github.com/korma/Korma/blob/684178c386df529558bbf82097635df6e75fb339/src/korma/mysql.clj)
@@ -19,7 +19,7 @@
   (sql-func "COUNT" (if (and (or (instance? clojure.lang.Named v) ; the issue was that name was being called on things that like maps when we tried to get COUNT(DISTINCT(...))
                                  (string? v))                     ; which would barf since maps don't implement clojure.lang.Named
                              (= (name v) "*"))
-                      (utils/generated "*")
+                      (kutils/generated "*")
                       v)))
 
 (intern 'korma.mysql 'count mysql-count)
@@ -67,54 +67,55 @@
       ;; Add a param to the end of the connection string that tells MySQL to convert 0000-00-00 dates to NULL when returning them.
       (update :subname (u/rpartial str "?zeroDateTimeBehavior=convertToNull"))))
 
-(defn- unix-timestamp->timestamp [_ field-or-value seconds-or-milliseconds]
-  (utils/func (case seconds-or-milliseconds
-                :seconds      "FROM_UNIXTIME(%s)"
-                :milliseconds "FROM_UNIXTIME(%s / 1000)")
-                [field-or-value]))
+(defn- unix-timestamp->timestamp [_ expr seconds-or-milliseconds]
+  (k/sqlfn :FROM_UNIXTIME (case seconds-or-milliseconds
+                            :seconds      expr
+                            :milliseconds (kx// expr (k/raw 1000)))))
+
+(defn- date-format [format-str expr] (k/sqlfn :DATE_FORMAT expr (kx/literal format-str)))
+(defn- str-to-date [format-str expr] (k/sqlfn :STR_TO_DATE expr (kx/literal format-str)))
 
 ;; Since MySQL doesn't have date_trunc() we fake it by formatting a date to an appropriate string and then converting back to a date.
 ;; See http://dev.mysql.com/doc/refman/5.6/en/date-and-time-functions.html#function_date-format for an explanation of format specifiers
-(defn- trunc-with-format [format-str]
-  (let [format-str (s/escape format-str {\% "%%"})] ; replace the format specifiers like %y with ones like %%y so they don't get treated as SQL arg placeholders in result str
-    (format "STR_TO_DATE(DATE_FORMAT(%%s, '%s'), '%s')" format-str format-str)))
-
-;; Truncating to a quarter is trickier since there aren't any format strings.
-;; See the explanation in the H2 driver, which does the same thing but with slightly different syntax.
-(defn- trunc-to-quarter [field-or-value]
-  (funcs "STR_TO_DATE(%s, '%%Y-%%m-%%d')"
-         ["CONCAT(%s)"
-          ["YEAR(%s)" field-or-value]
-          (k/raw "'-'")
-          ["((QUARTER(%s) * 3) - 2)" field-or-value]
-          (k/raw "'-01'")]))
-
-(defn- date [_ unit field-or-value]
-  (if (= unit :quarter)
-    (trunc-to-quarter field-or-value)
-    (utils/func (case unit
-                  :default         "TIMESTAMP(%s)"
-                  :minute          (trunc-with-format "%Y-%m-%d %H:%i")
-                  :minute-of-hour  "MINUTE(%s)"
-                  :hour            (trunc-with-format "%Y-%m-%d %H")
-                  :hour-of-day     "HOUR(%s)"
-                  :day             "DATE(%s)"
-                  :day-of-week     "DAYOFWEEK(%s)"
-                  :day-of-month    "DAYOFMONTH(%s)"
-                  :day-of-year     "DAYOFYEAR(%s)"
-                  ;; To convert a YEARWEEK (e.g. 201530) back to a date you need tell MySQL which day of the week to use,
-                  ;; because otherwise as far as MySQL is concerned you could be talking about any of the days in that week
-                  :week            "STR_TO_DATE(CONCAT(YEARWEEK(%s), ' Sunday'), '%%X%%V %%W')"
-                  ;; mode 6: Sunday is first day of week, first week of year is the first one with 4+ days
-                  :week-of-year    "(WEEK(%s, 6) + 1)"
-                  :month           "STR_TO_DATE(CONCAT(DATE_FORMAT(%s, '%%Y-%%m'), '-01'), '%%Y-%%m-%%d')"
-                  :month-of-year   "MONTH(%s)"
-                  :quarter-of-year "QUARTER(%s)"
-                  :year            "YEAR(%s)")
-                [field-or-value])))
+(defn- trunc-with-format [format-str expr]
+  (str-to-date format-str (date-format format-str expr)))
+
+(defn- date [_ unit expr]
+  (case unit
+    :default         (k/sqlfn :TIMESTAMP expr)
+    :minute          (trunc-with-format "%Y-%m-%d %H:%i" expr)
+    :minute-of-hour  (kx/minute expr)
+    :hour            (trunc-with-format "%Y-%m-%d %H" expr)
+    :hour-of-day     (kx/hour expr)
+    :day             (k/sqlfn :DATE expr)
+    :day-of-week     (k/sqlfn :DAYOFWEEK expr)
+    :day-of-month    (k/sqlfn :DAYOFMONTH expr)
+    :day-of-year     (k/sqlfn :DAYOFYEAR expr)
+    ;; To convert a YEARWEEK (e.g. 201530) back to a date you need tell MySQL which day of the week to use,
+    ;; because otherwise as far as MySQL is concerned you could be talking about any of the days in that week
+    :week            (str-to-date "%X%V %W"
+                                  (kx/concat (k/sqlfn :YEARWEEK expr)
+                                             (kx/literal " Sunday")))
+    ;; mode 6: Sunday is first day of week, first week of year is the first one with 4+ days
+    :week-of-year    (kx/inc (kx/week expr 6))
+    :month           (str-to-date "%Y-%m-%d"
+                                  (kx/concat (date-format "%Y-%m" expr)
+                                             (kx/literal "-01")))
+    :month-of-year   (kx/month expr)
+    ;; Truncating to a quarter is trickier since there aren't any format strings.
+    ;; See the explanation in the H2 driver, which does the same thing but with slightly different syntax.
+    :quarter         (str-to-date "%Y-%m-%d"
+                                  (kx/concat (kx/year expr)
+                                             (kx/literal "-")
+                                             (kx/- (kx/* (kx/quarter expr)
+                                                         3)
+                                                   2)
+                                             (kx/literal "-01")))
+    :quarter-of-year (kx/quarter expr)
+    :year            (kx/year expr)))
 
 (defn- date-interval [_ unit amount]
-  (utils/generated (format "DATE_ADD(NOW(), INTERVAL %d %s)" amount (s/upper-case (name unit)))))
+  (kutils/generated (format "DATE_ADD(NOW(), INTERVAL %d %s)" (int amount) (s/upper-case (name unit)))))
 
 (defn- humanize-connection-error-message [_ message]
   (condp re-matches message
@@ -140,7 +141,8 @@
 (extend MySQLDriver
   driver/IDriver
   (merge (sql/IDriverSQLDefaultsMixin)
-         {:date-interval                     date-interval
+         {:active-tables                     sql/post-filtered-active-tables
+          :date-interval                     date-interval
           :details-fields                    (constantly [{:name         "host"
                                                            :display-name "Host"
                                                            :default      "localhost"}
diff --git a/src/metabase/driver/postgres.clj b/src/metabase/driver/postgres.clj
index 53f216c4aed5b03af5704e0d15b7b3d2f1b69c7b..ba77ff50db47481e6e540f3c227609ea49dc33ac 100644
--- a/src/metabase/driver/postgres.clj
+++ b/src/metabase/driver/postgres.clj
@@ -5,13 +5,13 @@
                      [string :as s])
             (korma [core :as k]
                    [db :as kdb])
-            [korma.sql.utils :as utils]
+            [korma.sql.utils :as kutils]
             [swiss.arrows :refer :all]
             [metabase.db :refer [upd]]
             [metabase.models.field :refer [Field]]
             [metabase.driver :as driver]
             [metabase.driver.generic-sql :as sql]
-            [metabase.driver.generic-sql.util :refer [with-jdbc-metadata]])
+            [metabase.util.korma-extensions :as kx])
   ;; This is necessary for when NonValidatingFactory is passed in the sslfactory connection string argument,
   ;; e.x. when connecting to a Heroku Postgres database from outside of Heroku.
   (:import org.postgresql.ssl.NonValidatingFactory))
@@ -95,44 +95,56 @@
       (rename-keys {:dbname :db})
       kdb/postgres))
 
-(defn- unix-timestamp->timestamp [_ field-or-value seconds-or-milliseconds]
-  (utils/func (case seconds-or-milliseconds
-                :seconds      "TO_TIMESTAMP(%s)"
-                :milliseconds "TO_TIMESTAMP(%s / 1000)")
-              [field-or-value]))
 
-(defn- driver-specific-sync-field! [_ {:keys [table], :as field}]
-  (with-jdbc-metadata [^java.sql.DatabaseMetaData md @(:db @table)]
+(defn- driver-specific-sync-field! [driver {:keys [table], :as field}]
+  ;; TODO - this is throwing a `NullPointerException` (!)
+  (assert (delay? (:db @table))
+    (format "Didn't find DB delay: %s" field))
+  (sql/with-metadata [md driver @(:db @table)]
     (let [[{:keys [type_name]}] (->> (.getColumns md nil nil (:name @table) (:name field))
                                      jdbc/result-set-seq)]
       (when (= type_name "json")
         (upd Field (:id field) :special_type :json)
         (assoc field :special_type :json)))))
 
-(defn- date [_ unit field-or-value]
-  (utils/func (case unit
-                :default         "CAST(%s AS TIMESTAMP)"
-                :minute          "DATE_TRUNC('minute', %s)"
-                :minute-of-hour  "CAST(EXTRACT(MINUTE FROM %s) AS INTEGER)"
-                :hour            "DATE_TRUNC('hour', %s)"
-                :hour-of-day     "CAST(EXTRACT(HOUR FROM %s) AS INTEGER)"
-                :day             "CAST(%s AS DATE)"
-                ;; Postgres DOW is 0 (Sun) - 6 (Sat); increment this to be consistent with Java, H2, MySQL, and Mongo (1-7)
-                :day-of-week     "(CAST(EXTRACT(DOW FROM %s) AS INTEGER) + 1)"
-                :day-of-month    "CAST(EXTRACT(DAY FROM %s) AS INTEGER)"
-                :day-of-year     "CAST(EXTRACT(DOY FROM %s) AS INTEGER)"
-                ;; Postgres weeks start on Monday, so shift this date into the proper bucket and then decrement the resulting day
-                :week            "(DATE_TRUNC('week', (%s + INTERVAL '1 day')) - INTERVAL '1 day')"
-                :week-of-year    "CAST(EXTRACT(WEEK FROM (%s + INTERVAL '1 day')) AS INTEGER)"
-                :month           "DATE_TRUNC('month', %s)"
-                :month-of-year   "CAST(EXTRACT(MONTH FROM %s) AS INTEGER)"
-                :quarter         "DATE_TRUNC('quarter', %s)"
-                :quarter-of-year "CAST(EXTRACT(QUARTER FROM %s) AS INTEGER)"
-                :year            "CAST(EXTRACT(YEAR FROM %s) AS INTEGER)")
-              [field-or-value]))
+
+(defn- unix-timestamp->timestamp [_ expr seconds-or-milliseconds]
+  (case seconds-or-milliseconds
+    :seconds      (k/sqlfn :TO_TIMESTAMP expr)
+    :milliseconds (recur nil (kx// expr 1000) :seconds)))
+
+(defn- date-trunc [unit expr] (k/sqlfn :DATE_TRUNC (kx/literal unit) expr))
+(defn- extract    [unit expr] (kutils/func (format "EXTRACT(%s FROM %%s)" (name unit))
+                                           [expr]))
+
+(def ^:private extract-integer (comp kx/->integer extract))
+
+(def ^:private ^:const one-day (k/raw "INTERVAL '1 day'"))
+
+(defn- date [_ unit expr]
+  (case unit
+    :default         (kx/->timestamp expr)
+    :minute          (date-trunc :minute expr)
+    :minute-of-hour  (extract-integer :minute expr)
+    :hour            (date-trunc :hour expr)
+    :hour-of-day     (extract-integer :hour expr)
+    :day             (kx/->date expr)
+    ;; Postgres DOW is 0 (Sun) - 6 (Sat); increment this to be consistent with Java, H2, MySQL, and Mongo (1-7)
+    :day-of-week     (kx/inc (extract-integer :dow expr))
+    :day-of-month    (extract-integer :day expr)
+    :day-of-year     (extract-integer :doy expr)
+    ;; Postgres weeks start on Monday, so shift this date into the proper bucket and then decrement the resulting day
+    :week            (kx/- (date-trunc :week (kx/+ expr one-day))
+                           one-day)
+    :week-of-year    (extract-integer :week (kx/+ expr one-day))
+    :month           (date-trunc :month expr)
+    :month-of-year   (extract-integer :month expr)
+    :quarter         (date-trunc :quarter expr)
+    :quarter-of-year (extract-integer :quarter expr)
+    :year            (extract-integer :year expr)))
 
 (defn- date-interval [_ unit amount]
-  (utils/generated (format "(NOW() + INTERVAL '%d %s')" amount (name unit))))
+  (k/raw (format "(NOW() + INTERVAL '%d %s')" (int amount) (name unit))))
 
 (defn- humanize-connection-error-message [_ message]
   (condp re-matches message
diff --git a/src/metabase/driver/redshift.clj b/src/metabase/driver/redshift.clj
index 7f983b84f5cecf86adec3a3f9694776ec09cc87d..1a6ca5a12be304a1a7984407a91cb87e80ea876b 100644
--- a/src/metabase/driver/redshift.clj
+++ b/src/metabase/driver/redshift.clj
@@ -7,27 +7,28 @@
             (metabase [config :as config]
                       [driver :as driver])
             [metabase.driver.generic-sql :as sql]
-            [metabase.driver.generic-sql.util :as sqlutil]
-            [metabase.driver.postgres :as postgres]))
+            [metabase.driver.postgres :as postgres]
+            [metabase.util.korma-extensions :as kx]))
 
 (defn- connection-details->spec [_ details]
   (kdb/postgres (merge details postgres/ssl-params))) ; always connect to redshift over SSL
 
 (defn- date-interval [_ unit amount]
-  (kutils/generated (format "(GETDATE() + INTERVAL '%d %s')" amount (name unit))))
+  (k/raw (format "(GETDATE() + INTERVAL '%d %s')" (int amount) (name unit))))
 
-(defn- unix-timestamp->timestamp [_ field-or-value seconds-or-milliseconds]
-  (kutils/func (case seconds-or-milliseconds
-                 :seconds      "(TIMESTAMP '1970-01-01T00:00:00Z' + (%s * INTERVAL '1 second'))"
-                 :milliseconds "(TIMESTAMP '1970-01-01T00:00:00Z' + ((%s / 1000) * INTERVAL '1 second'))")
-               [field-or-value]))
+(defn- unix-timestamp->timestamp [_ expr seconds-or-milliseconds]
+  (case seconds-or-milliseconds
+    :seconds      (kx/+ (k/raw "TIMESTAMP '1970-01-01T00:00:00Z'")
+                        (kx/* expr
+                              (k/raw "INTERVAL '1 second'")))
+    :milliseconds (recur nil (kx// expr 1000) :seconds)))
 
 ;; The Postgres JDBC .getImportedKeys method doesn't work for Redshift, and we're not allowed to access information_schema.constraint_column_usage,
 ;; so we'll have to use this custome query instead
 ;; See also: [Related Postgres JDBC driver issue on GitHub](https://github.com/pgjdbc/pgjdbc/issues/79)
 ;;           [How to access the equivalent of information_schema.constraint_column_usage in Redshift](https://forums.aws.amazon.com/thread.jspa?threadID=133514)
 (defn- table-fks [_ table]
-  (set (jdbc/query (sqlutil/db->connection-spec @(:db table))
+  (set (jdbc/query (sql/db->jdbc-connection-spec @(:db table))
                    ["SELECT source_column.attname AS \"fk-column-name\",
                        dest_table.relname  AS \"dest-table-name\",
                        dest_column.attname AS \"dest-column-name\"
diff --git a/src/metabase/driver/sqlite.clj b/src/metabase/driver/sqlite.clj
index 8fc3cf02c45fdaedb8d57d0dfddbe0fb46d5ed8e..e4827d56dcc301ac407fb81e11f0e954fa6fee52 100644
--- a/src/metabase/driver/sqlite.clj
+++ b/src/metabase/driver/sqlite.clj
@@ -7,7 +7,8 @@
             [metabase.config :as config]
             [metabase.driver :as driver]
             [metabase.driver.generic-sql :as sql]
-            [metabase.util :as u]))
+            [metabase.util :as u]
+            [metabase.util.korma-extensions :as kx]))
 
 ;; We'll do regex pattern matching here for determining Field types
 ;; because SQLite types can have optional lengths, e.g. NVARCHAR(100) or NUMERIC(10,5)
@@ -29,50 +30,37 @@
    [#"DATETIME" :DateTimeField]
    [#"DATE"     :DateField]])
 
-(defn- column->base-type [_ column-type]
-  (let [column-type (name column-type)]
-    (loop [[[pattern base-type] & more] pattern->type]
-      (cond
-        (re-find pattern column-type) base-type
-        (seq more)                    (recur more)))))
+(def ^:private ->date     (partial k/sqlfn* :DATE))
+(def ^:private ->datetime (partial k/sqlfn* :DATETIME))
 
-(def ^:private ->date     (comp (partial kutils/func "DATE(%s)") vector))
-(def ^:private ->datetime (comp (partial kutils/func "DATETIME(%s)") vector))
-(def ^:private ->integer  (comp (partial kutils/func "CAST(%s AS INTEGER)") vector))
-(def ^:private add-1      (comp (partial kutils/func "(%s + 1)") vector))
-
-(defn- literal [s]
-  (k/raw (str \' s \')))
-
-(defn- strftime [format-str field-or-value]
-  (kutils/func (format "STRFTIME('%s', %%s)" (s/replace format-str "%" "%%"))
-               [field-or-value]))
+(defn- strftime [format-str expr]
+  (k/sqlfn :STRFTIME (kx/literal format-str) expr))
 
 (defn- date
   "Apply truncation / extraction to a date field or value for SQLite.
    See also the [SQLite Date and Time Functions Reference](http://www.sqlite.org/lang_datefunc.html)."
-  [_ unit field-or-value]
+  [_ unit expr]
   ;; Convert Timestamps to ISO 8601 strings before passing to SQLite, otherwise they don't seem to work correctly
-  (let [v (if (instance? java.sql.Timestamp field-or-value)
-            (literal (u/date->iso-8601 field-or-value))
-            field-or-value)]
+  (let [v (if (instance? java.sql.Timestamp expr)
+            (kx/literal (u/date->iso-8601 expr))
+            expr)]
     (case unit
       :default         (->datetime v)
       :minute          (->datetime (strftime "%Y-%m-%d %H:%M" v))
-      :minute-of-hour  (->integer (strftime "%M" v))
+      :minute-of-hour  (kx/->integer (strftime "%M" v))
       :hour            (->datetime (strftime "%Y-%m-%d %H:00" v))
-      :hour-of-day     (->integer (strftime "%H" v))
+      :hour-of-day     (kx/->integer (strftime "%H" v))
       :day             (->date v)
       ;; SQLite day of week (%w) is Sunday = 0 <-> Saturday = 6. We want 1 - 7 so add 1
-      :day-of-week     (->integer (add-1 (strftime "%w" v)))
-      :day-of-month    (->integer (strftime "%d" v))
-      :day-of-year     (->integer (strftime "%j" v))
+      :day-of-week     (kx/->integer (kx/inc (strftime "%w" v)))
+      :day-of-month    (kx/->integer (strftime "%d" v))
+      :day-of-year     (kx/->integer (strftime "%j" v))
       ;; Move back 6 days, then forward to the next Sunday
-      :week            (->date v, (literal "-6 days"), (literal "weekday 0"))
+      :week            (->date v, (kx/literal "-6 days"), (kx/literal "weekday 0"))
       ;; SQLite first week of year is 0, so add 1
-      :week-of-year    (->integer (add-1 (strftime "%W" v)))
-      :month           (->date v, (literal "start of month"))
-      :month-of-year   (->integer (strftime "%m" v))
+      :week-of-year    (kx/->integer (kx/inc (strftime "%W" v)))
+      :month           (->date v, (kx/literal "start of month"))
+      :month-of-year   (kx/->integer (strftime "%m" v))
       ;;    DATE(DATE(%s, 'start of month'), '-' || ((STRFTIME('%m', %s) - 1) % 3) || ' months')
       ;; -> DATE(DATE('2015-11-16', 'start of month'), '-' || ((STRFTIME('%m', '2015-11-16') - 1) % 3) || ' months')
       ;; -> DATE('2015-11-01', '-' || ((11 - 1) % 3) || ' months')
@@ -80,13 +68,19 @@
       ;; -> DATE('2015-11-01', '-1 months')
       ;; -> '2015-10-01'
       :quarter         (->date
-                        (->date v, (literal "start of month"))
-                        (kutils/func "'-' || ((%s - 1) %% 3) || ' months'"
-                                     [(strftime "%m" v)]))
+                        (->date v, (kx/literal "start of month"))
+                        (kx/infix "||"
+                                  (kx/literal "-")
+                                  (kx/mod (kx/dec (strftime "%m" v))
+                                          3)
+                                  (kx/literal " months")))
       ;; q = (m + 2) / 3
-      :quarter-of-year (kutils/func "((%s + 2) / 3)"
-                                    [(strftime "%m" v)])
-      :year            (->integer (strftime "%Y" v)))))
+      :quarter-of-year (kx// (kx/+ (strftime "%m" v)
+                                   2)
+                             3)
+      :year            (kx/->integer (strftime "%Y" v)))))
+
+(def ^:private datetime (partial k/sqlfn* :DATETIME))
 
 (defn- date-interval [_ unit amount]
   (let [[multiplier unit] (case unit
@@ -99,13 +93,13 @@
                             :quarter [3 "months"]
                             :year    [1 "years"])]
     ;; Make a string like DATE('now', '+7 days')
-    (k/raw (format "DATETIME('now', '%+d %s')" (* amount multiplier) unit))))
+    (datetime (kx/literal "now")
+              (kx/literal (format "%+d %s" (* amount multiplier) unit)))))
 
-(defn- unix-timestamp->timestamp [_ field-or-value seconds-or-milliseconds]
-  (kutils/func (case seconds-or-milliseconds
-                 :seconds      "DATETIME(%s, 'unixepoch')"
-                 :milliseconds "DATETIME(%s / 1000, 'unixepoch')")
-               [field-or-value]))
+(defn- unix-timestamp->timestamp [_ expr seconds-or-milliseconds]
+  (case seconds-or-milliseconds
+    :seconds      (datetime expr (kx/literal "unixepoch"))
+    :milliseconds (recur nil (kx// expr 1000) :seconds)))
 
 (defrecord SQLiteDriver []
   clojure.lang.Named
@@ -114,7 +108,8 @@
 (extend SQLiteDriver
   driver/IDriver
   (merge (sql/IDriverSQLDefaultsMixin)
-         {:date-interval  date-interval
+         {:active-tables  sql/post-filtered-active-tables
+          :date-interval  date-interval
           :details-fields (constantly [{:name         "db"
                                         :display-name "Filename"
                                         :placeholder  "/home/camsaul/toucan_sightings.sqlite 😋"
@@ -129,7 +124,7 @@
                                               #{:foreign-keys})))})
   sql/ISQLDriver
   (merge (sql/ISQLDriverDefaultsMixin)
-         {:column->base-type         column->base-type
+         {:column->base-type         (sql/pattern-based-column->base-type pattern->type)
           :connection-details->spec  (fn [_ details]
                                        (kdb/sqlite3 details))
           :current-datetime-fn       (constantly (k/raw "DATETIME('now')"))
diff --git a/src/metabase/driver/sqlserver.clj b/src/metabase/driver/sqlserver.clj
index 0264494a0a233d052eb25af0a66019df5162ec74..0142a7acd9fb2c1150fddd52ef587fdfdd2ffcd7 100644
--- a/src/metabase/driver/sqlserver.clj
+++ b/src/metabase/driver/sqlserver.clj
@@ -2,10 +2,10 @@
   (:require [clojure.string :as s]
             (korma [core :as k]
                    [db :as kdb])
-            [korma.sql.utils :as utils]
+            [korma.sql.utils :as kutils]
             [metabase.driver :as driver]
             [metabase.driver.generic-sql :as sql]
-            [metabase.driver.generic-sql.util :refer [funcs]])
+            [metabase.util.korma-extensions :as kx])
   (:import net.sourceforge.jtds.jdbc.Driver)) ; need to import this in order to load JDBC driver
 
 (defn- column->base-type
@@ -58,53 +58,56 @@
       (update :subname #(cond-> (s/replace % #";database=" "/")
                           (seq instance) (str ";instance=" instance)))))
 
+(defn- date-part [unit expr]
+  (k/sqlfn :DATEPART (k/raw (name unit)) expr))
+
+(defn- date-add [unit & exprs]
+  (apply k/sqlfn* :DATEADD (k/raw (name unit)) exprs))
+
 (defn- date
   "See also the [jTDS SQL <-> Java types table](http://jtds.sourceforge.net/typemap.html)"
-  [_ unit field-or-value]
+  [_ unit expr]
   (case unit
-    :default         (utils/func "CAST(%s AS DATETIME)" [field-or-value])
-    :minute          (utils/func "CAST(%s AS SMALLDATETIME)" [field-or-value])
-    :minute-of-hour  (utils/func "DATEPART(minute, %s)" [field-or-value])
-    :hour            (utils/func "CAST(FORMAT(%s, 'yyyy-MM-dd HH:00:00') AS DATETIME)" [field-or-value])
-    :hour-of-day     (utils/func "DATEPART(hour, %s)" [field-or-value])
+    :default         (kx/->datetime expr)
+    :minute          (kx/cast :SMALLDATETIME expr)
+    :minute-of-hour  (date-part :minute expr)
+    :hour            (kx/->datetime (kx/format "yyyy-MM-dd HH:00:00" expr))
+    :hour-of-day     (date-part :hour expr)
     ;; jTDS is retarded; I sense an ongoing theme here. It returns DATEs as strings instead of as java.sql.Dates
     ;; like every other SQL DB we support. Work around that by casting to DATE for truncation then back to DATETIME so we get the type we want
-    :day             (utils/func "CAST(CAST(%s AS DATE) AS DATETIME)" [field-or-value])
-    :day-of-week     (utils/func "DATEPART(weekday, %s)" [field-or-value])
-    :day-of-month    (utils/func "DATEPART(day, %s)" [field-or-value])
-    :day-of-year     (utils/func "DATEPART(dayofyear, %s)" [field-or-value])
+    :day             (kx/->datetime (kx/->date expr))
+    :day-of-week     (date-part :weekday expr)
+    :day-of-month    (date-part :day expr)
+    :day-of-year     (date-part :dayofyear expr)
     ;; Subtract the number of days needed to bring us to the first day of the week, then convert to date
     ;; The equivalent SQL looks like:
     ;;     CAST(DATEADD(day, 1 - DATEPART(weekday, %s), CAST(%s AS DATE)) AS DATETIME)
-    ;; But we have to use this ridiculous 'funcs' function in order to generate the korma form we want (AFAIK)
-    ;; utils/func only handles multiple arguments if they are comma separated and injected into a single `%s` format placeholder
-    :week            (funcs "CAST(%s AS DATETIME)"
-                            ["DATEADD(day, %s)"
-                             ["1 - DATEPART(weekday, %s)" field-or-value]
-                             ["CAST(%s AS DATE)" field-or-value]])
-    :week-of-year    (utils/func "DATEPART(iso_week, %s)" [field-or-value])
-    :month           (utils/func "CAST(FORMAT(%s, 'yyyy-MM-01') AS DATETIME)" [field-or-value])
-    :month-of-year   (utils/func "DATEPART(month, %s)" [field-or-value])
+    :week            (kx/->datetime
+                      (date-add :day
+                                (kx/- 1 (date-part :weekday expr))
+                                (kx/->date expr)))
+    :week-of-year    (date-part :iso_week expr)
+    :month           (kx/->datetime (kx/format "yyyy-MM-01" expr))
+    :month-of-year   (date-part :month expr)
     ;; Format date as yyyy-01-01 then add the appropriate number of quarter
     ;; Equivalent SQL:
     ;;     DATEADD(quarter, DATEPART(quarter, %s) - 1, FORMAT(%s, 'yyyy-01-01'))
-    :quarter         (funcs "DATEADD(quarter, %s)"
-                            ["DATEPART(quarter, %s) - 1" field-or-value]
-                            ["FORMAT(%s, 'yyyy-01-01')" field-or-value])
-    :quarter-of-year (utils/func "DATEPART(quarter, %s)" [field-or-value])
-    :year            (utils/func "DATEPART(year, %s)" [field-or-value])))
+    :quarter         (date-add :quarter
+                               (kx/dec (date-part :quarter expr))
+                               (kx/format "yyyy-01-01" expr))
+    :quarter-of-year (date-part :quarter expr)
+    :year            (date-part :year expr)))
 
 (defn- date-interval [_ unit amount]
-  (utils/generated (format "DATEADD(%s, %d, GETUTCDATE())" (name unit) amount)))
+  (date-add unit amount (k/sqlfn :GETUTCDATE)))
 
-(defn- unix-timestamp->timestamp [_ field-or-value seconds-or-milliseconds]
-  (utils/func (case seconds-or-milliseconds
-                ;; The second argument to DATEADD() gets casted to a 32-bit integer. BIGINT is 64 bites, so we tend to run into
-                ;; integer overflow errors (especially for millisecond timestamps).
-                ;; Work around this by converting the timestamps to minutes instead before calling DATEADD().
-                :seconds      "DATEADD(minute, (%s / 60), '1970-01-01')"
-                :milliseconds "DATEADD(minute, (%s / 60000), '1970-01-01')")
-              [field-or-value]))
+(defn- unix-timestamp->timestamp [_ expr seconds-or-milliseconds]
+  (case seconds-or-milliseconds
+    ;; The second argument to DATEADD() gets casted to a 32-bit integer. BIGINT is 64 bites, so we tend to run into
+    ;; integer overflow errors (especially for millisecond timestamps).
+    ;; Work around this by converting the timestamps to minutes instead before calling DATEADD().
+    :seconds      (date-add :minute (kx// expr 60) (kx/literal "1970-01-01"))
+    :milliseconds (recur nil (kx// expr 1000) :seconds)))
 
 (defn- apply-limit [_ korma-query {value :limit}]
   (k/modifier korma-query (format "TOP %d" value)))
diff --git a/src/metabase/driver/sync.clj b/src/metabase/driver/sync.clj
index e19616db5f8635c5ecf56d031ac19436dca2f7dc..eb719b97390cbd237da8036b059b5cde0895fc76 100644
--- a/src/metabase/driver/sync.clj
+++ b/src/metabase/driver/sync.clj
@@ -84,12 +84,12 @@
     (validate-active-tables active-tables)
 
     (mark-inactive-tables! database active-tables existing-table->id)
-    (create-new-tables!    database active-tables existing-table->id))
+    (create-new-tables!    database active-tables existing-table->id)
 
-  (fetch-and-sync-database-active-tables! driver database)
+    (fetch-and-sync-database-active-tables! driver database)
 
-  ;; Ok, now if we had a _metabase_metadata table from earlier we can go ahead and sync from it
-  (sync-metabase-metadata-table! driver database))
+    ;; Ok, now if we had a _metabase_metadata table from earlier we can go ahead and sync from it
+    (sync-metabase-metadata-table! driver database active-tables)))
 
 (defn- -sync-database-with-tracking! [driver database]
   (let [start-time    (System/currentTimeMillis)
@@ -125,8 +125,8 @@
    `keypath` is of the form `table-name.key` or `table-name.field-name.key`, where `key` is the name of some property of `Table` or `Field`.
 
    This functionality is currently only used by the Sample Dataset. In order to use this functionality, drivers must implement optional fn `:table-rows-seq`."
-  [driver database]
-  (doseq [{table-name :name} (driver/active-tables driver database)]
+  [driver database active-tables]
+  (doseq [{table-name :name} active-tables]
     (when (= (s/lower-case table-name) "_metabase_metadata")
       (doseq [{:keys [keypath value]} (driver/table-rows-seq driver database table-name)]
         (let [[_ table-name field-name k] (re-matches #"^([^.]+)\.(?:([^.]+)\.)?([^.]+)$" keypath)]
diff --git a/src/metabase/models/setting.clj b/src/metabase/models/setting.clj
index 97192d8c5ca357e28e8fecec27dbce204937439c..a07e1d307d616e5a090f40177295ec4e0ea15942 100644
--- a/src/metabase/models/setting.clj
+++ b/src/metabase/models/setting.clj
@@ -9,7 +9,8 @@
                              [interface :refer :all]]
             [metabase.setup :as setup]
             [metabase.util :as u]
-            [metabase.util.password :as password]))
+            [metabase.util.password :as password])
+  (:import java.util.TimeZone))
 
 ;; Settings are a fast + simple way to create a setting that can be set
 ;; from the SuperAdmin page. They are saved to the Database, but intelligently
@@ -146,18 +147,25 @@
    metadata of the `Setting` under the key `::options`:
 
      *  `:internal` - This `Setting` is for internal use and shouldn't be exposed in the UI (i.e., not
-        returned by the corresponding endpoints). Default: `false`"
+                      returned by the corresponding endpoints). Default: `false`
+     *  `:getter` - A custom getter fn, which takes no arguments. Overrides the default implementation.
+     *  `:setter` - A custom setter fn, which takes a single argument. Overrides the default implementation."
   [nm description & [default-value & {:as options}]]
   {:pre [(symbol? nm)
          (string? description)]}
-  (let [setting-key (keyword nm)]
+  (let [setting-key (keyword nm)
+        value       (gensym "value")]
     `(defn ~nm ~description
        {::is-setting?   true
         ::default-value ~default-value
         ::options       ~options}
-       ([]       (or (get* ~setting-key)
-                     ~default-value))
-       ([value#] (set* ~setting-key value#)))))
+       ([]       ~(if (:getter options)
+                    `(~(:getter options))
+                    `(or (get* ~setting-key)
+                         ~default-value)))
+       ([~value] ~(if (:setter options)
+                    `(~(:setter options) ~value)
+                    `(set* ~setting-key ~value))))))
 
 
 ;;; ## ALL SETTINGS (ETC)
@@ -181,13 +189,17 @@
          (sort-by :key))))
 
 (defn short-timezone-name
-  [tz]
-  (.getDisplayName tz (.inDaylightTime tz (new java.util.Date)) java.util.TimeZone/SHORT))
+  "Get a short display name for a TIMEZONE, e.g. `PST`."
+  [^TimeZone timezone]
+  (.getDisplayName timezone (.inDaylightTime timezone (new java.util.Date)) TimeZone/SHORT))
 
 (defn get-instance-timezone
+  "Get the `report-timezone`, or fall back to the System default if it's not set."
   []
-  (let [tz (metabase.models.setting/get :report-timezone)]
-    (if (s/blank? tz) (java.util.TimeZone/getDefault) (java.util.TimeZone/getTimeZone tz))))
+  (let [^String timezone-name (get :report-timezone)]
+    (or (when (seq timezone-name)
+          (TimeZone/getTimeZone timezone-name))
+        (TimeZone/getDefault))))
 
 (defn public-settings
   "Return a simple map of key/value pairs which represent the public settings for the front-end application."
@@ -220,7 +232,8 @@
   "Map of setting name (keyword) -> string value, as they exist in the DB."
   (atom nil))
 
-(defentity Setting
+(defentity ^{:doc "The model that underlies `defsetting`."}
+  Setting
   [(k/table :setting)])
 
 (defn- settings-list
@@ -241,3 +254,5 @@
                                                              (s/replace "-" "_")
                                                              s/upper-case)))
                                 default)}))))
+
+(u/require-dox-in-this-namespace)
diff --git a/src/metabase/util.clj b/src/metabase/util.clj
index fc1f1dab400ee9e406f3184ea67db85227064d82..b20a5e47948f9f162794720dd736e398cec32098 100644
--- a/src/metabase/util.clj
+++ b/src/metabase/util.clj
@@ -513,5 +513,12 @@
                                   ~nm)]))]
      ~nm))
 
+(defn round-to-decimals
+  "Round (presumabily floating-point) NUMBER to DECIMAL-PLACE. Returns a `Double`.
+
+     (round-to-decimals 2 35.5058998M) -> 35.51"
+  ^Double [^Integer decimal-place, ^Number number]
+  (double (.setScale (bigdec number) decimal-place BigDecimal/ROUND_HALF_UP)))
+
 
 (require-dox-in-this-namespace)
diff --git a/src/metabase/util/korma_extensions.clj b/src/metabase/util/korma_extensions.clj
new file mode 100644
index 0000000000000000000000000000000000000000..8d21aeb256ff6a44c17047ce16d990692b2b1b9a
--- /dev/null
+++ b/src/metabase/util/korma_extensions.clj
@@ -0,0 +1,64 @@
+(ns metabase.util.korma-extensions
+  "Extensions and utility functions for [SQL Korma](http://www.sqlkorma.com/docs)."
+  (:refer-clojure :exclude [+ - / * mod inc dec cast concat format])
+  (:require (korma [core :as k])
+            (korma.sql [engine :as kengine]
+                       [utils :as kutils])
+            [metabase.util :as u]))
+
+
+(defn wrap [x] (kutils/func "(%s)" [x]))
+
+(defn infix
+  "Interpose OPERATOR between ARGS and wrap the result in parentheses.
+
+     (infix \"+\" :x :y :z) -> \"(x + y + z)\";"
+  {:arglists '([operator & args])}
+  [operator x y & more]
+  (let [x+y (kengine/infix x operator y)]
+    (if (seq more)
+      (apply infix operator x+y more)
+      (wrap x+y))))
+
+(defn math-infix
+  "Interpose OPERATOR between ARGS and wrap the result in parentheses.
+   Integer literals in ARGS are automatically wrapped in a `k/raw` form."
+  [operator & args]
+  (apply infix operator (for [arg args]
+                          (cond-> arg
+                            (integer? arg) k/raw))))
+
+(def ^{:arglists '([& exprs])}  +  (partial math-infix "+"))
+(def ^{:arglists '([& exprs])}  -  (partial math-infix "-"))
+(def ^{:arglists '([& exprs])}  /  (partial math-infix "/"))
+(def ^{:arglists '([& exprs])}  *  (partial math-infix "*"))
+(def ^{:arglists '([& exprs])} mod (partial math-infix "%"))
+
+(defn inc [x] (+ x 1))
+(defn dec [x] (- x 1))
+
+(defn literal [s]
+  (k/raw (str \' (name s) \')))
+
+(defn cast [c x]
+  (kutils/func (clojure.core/format "CAST(%%s AS %s)" (name c))
+               [x]))
+
+(defn format [format-str expr]
+  (k/sqlfn :FORMAT expr (literal format-str)))
+
+(defn ->date                     [x] (cast :DATE x))
+(defn ->datetime                 [x] (cast :DATETIME x))
+(defn ->timestamp                [x] (cast :TIMESTAMP x))
+(defn ->timestamp-with-time-zone [x] (cast "TIMESTAMP WITH TIME ZONE" x))
+(defn ->integer                  [x] (cast :INTEGER x))
+
+;;; Random SQL fns. Not all DBs support all these!
+(def ^{:arglists '([& exprs])} floor   (partial k/sqlfn* :FLOOR))
+(def ^{:arglists '([& exprs])} hour    (partial k/sqlfn* :HOUR))
+(def ^{:arglists '([& exprs])} minute  (partial k/sqlfn* :MINUTE))
+(def ^{:arglists '([& exprs])} week    (partial k/sqlfn* :WEEK))
+(def ^{:arglists '([& exprs])} month   (partial k/sqlfn* :MONTH))
+(def ^{:arglists '([& exprs])} quarter (partial k/sqlfn* :QUARTER))
+(def ^{:arglists '([& exprs])} year    (partial k/sqlfn* :YEAR))
+(def ^{:arglists '([& exprs])} concat  (partial k/sqlfn* :CONCAT))
diff --git a/test/metabase/api/database_test.clj b/test/metabase/api/database_test.clj
index bf84b84c0205849fa235f913f5ed25eb8c8ef483..9f4d5c2f0cfd099f216583e4099d20b5e097dc1f 100644
--- a/test/metabase/api/database_test.clj
+++ b/test/metabase/api/database_test.clj
@@ -106,7 +106,7 @@
       (set (filter identity
                    (conj (for [engine datasets/all-valid-engines]
                            (datasets/when-testing-engine engine
-                             (match-$ (datasets/db (datasets/engine->loader engine))
+                             (match-$ (get-or-create-test-data-db! (driver/engine->driver engine))
                                {:created_at      $
                                 :engine          (name $engine)
                                 :id              $
@@ -129,7 +129,7 @@
       (cascade-delete Database :id [not-in (set (filter identity
                                                         (for [engine datasets/all-valid-engines]
                                                           (datasets/when-testing-engine engine
-                                                            (:id (datasets/db (datasets/engine->loader engine)))))))])
+                                                            (:id (get-or-create-test-data-db! (driver/engine->driver engine)))))))])
       ;; Add an extra DB so we have something to fetch besides the Test DB
       (create-db db-name)
       ;; Now hit the endpoint
diff --git a/test/metabase/driver/generic_sql_test.clj b/test/metabase/driver/generic_sql_test.clj
index 9868628b6683f5bbe246dc6983c04b56ccc22a9e..f4422ba3c2e625ac6556e9e6f1f28dc781294dfd 100644
--- a/test/metabase/driver/generic_sql_test.clj
+++ b/test/metabase/driver/generic_sql_test.clj
@@ -2,7 +2,7 @@
   (:require [expectations :refer :all]
             [metabase.db :refer :all]
             [metabase.driver :as driver]
-            [metabase.driver.generic-sql.util :refer [korma-entity]]
+            [metabase.driver.generic-sql :refer :all]
             (metabase.models [field :refer [Field]]
                              [foreign-key :refer [ForeignKey]]
                              [table :refer [Table]])
diff --git a/test/metabase/driver/mongo_test.clj b/test/metabase/driver/mongo_test.clj
index e859673a1874523da3df7c85ed711212696370b8..9d54d5834e8675792cf3af12d865df00ac5da18a 100644
--- a/test/metabase/driver/mongo_test.clj
+++ b/test/metabase/driver/mongo_test.clj
@@ -6,6 +6,7 @@
             [metabase.driver :as driver]
             (metabase.models [field :refer [Field]]
                              [table :refer [Table]])
+            [metabase.test.data :as data]
             [metabase.test.data.datasets :as datasets]
             [metabase.test.util :refer [expect-eval-actual-first resolve-private-fns]])
   (:import metabase.driver.mongo.MongoDriver))
@@ -18,7 +19,7 @@
      ~actual))
 
 (defn- mongo-db []
-  (datasets/db (datasets/engine->loader :mongo)))
+  (data/get-or-create-test-data-db! (driver/engine->driver :mongo)))
 
 ;; ## Constants + Helper Fns/Macros
 ;; TODO - move these to metabase.test-data ?
diff --git a/test/metabase/driver/query_processor_test.clj b/test/metabase/driver/query_processor_test.clj
index f3424ec869b8309f279d2eaf8893dd6800de2154..5be41ad82ce69b57fa5c28b3b0c5889f7bc05df3 100644
--- a/test/metabase/driver/query_processor_test.clj
+++ b/test/metabase/driver/query_processor_test.clj
@@ -11,7 +11,7 @@
                              [table :refer [Table]])
             (metabase.test.data [dataset-definitions :as defs]
                                 [datasets :as datasets :refer [*data-loader* *engine*]]
-                                [interface :refer [create-database-definition]])
+                                [interface :refer [create-database-definition], :as i])
             [metabase.test.data :refer :all]
             [metabase.test.util.q :refer [Q]]
             [metabase.util :as u]))
@@ -29,11 +29,6 @@
 (defn- engines-that-dont-support [feature]
   (set/difference datasets/all-valid-engines (engines-that-support feature)))
 
-(defmacro if-questionable-timezone-support [then else]
-  `(if (contains? #{:mongo :redshift :sqlite :sqlserver} *engine*)
-     ~then
-     ~else))
-
 (defmacro ^:private qp-expect-with-all-engines [data q-form & post-process-fns]
   `(datasets/expect-with-all-engines
     {:status    :completed
@@ -82,7 +77,7 @@
             :name         (format-name "id")
             :display_name "Id"}
      :name {:special_type :name
-            :base_type    :TextField
+            :base_type    (expected-base-type->actual :TextField)
             :name         (format-name "name")
             :display_name "Name"})))
 
@@ -100,11 +95,11 @@
                   :name         (format-name "id")
                   :display_name "Id"}
      :name       {:special_type :category
-                  :base_type    :TextField
+                  :base_type    (expected-base-type->actual :TextField)
                   :name         (format-name "name")
                   :display_name "Name"}
      :last_login {:special_type :category
-                  :base_type    (timestamp-field-type)
+                  :base_type    (expected-base-type->actual :DateTimeField)
                   :name         (format-name "last_login")
                   :display_name "Last Login"
                   :unit         :day})))
@@ -134,23 +129,23 @@
                                      nil)
                    :special_type (if (fks-supported?) :fk
                                      :category)
-                   :base_type    :IntegerField
+                   :base_type    (expected-base-type->actual :IntegerField)
                    :name         (format-name "category_id")
                    :display_name "Category Id"}
      :price       {:special_type :category
-                   :base_type    :IntegerField
+                   :base_type    (expected-base-type->actual :IntegerField)
                    :name         (format-name "price")
                    :display_name "Price"}
      :longitude   {:special_type :longitude,
-                   :base_type    :FloatField,
+                   :base_type    (expected-base-type->actual :FloatField)
                    :name         (format-name "longitude")
                    :display_name "Longitude"}
      :latitude    {:special_type :latitude
-                   :base_type    :FloatField
+                   :base_type    (expected-base-type->actual :FloatField)
                    :name         (format-name "latitude")
                    :display_name "Latitude"}
      :name        {:special_type :name
-                   :base_type    :TextField
+                   :base_type    (expected-base-type->actual :TextField)
                    :name         (format-name "name")
                    :display_name "Name"})))
 
@@ -179,7 +174,7 @@
                                   nil)
                 :special_type (when (fks-supported?)
                                 :fk)
-                :base_type    :IntegerField
+                :base_type    (expected-base-type->actual :IntegerField)
                 :name         (format-name "venue_id")
                 :display_name "Venue Id"}
      :user_id  {:extra_info   (if (fks-supported?) {:target_table_id (id :users)}
@@ -189,7 +184,7 @@
                                   nil)
                 :special_type (if (fks-supported?) :fk
                                   :category)
-                :base_type    :IntegerField
+                :base_type    (expected-base-type->actual :IntegerField)
                 :name         (format-name "user_id")
                 :display_name "User Id"})))
 
@@ -231,28 +226,46 @@
                     :stddev "stddev"
                     :sum    "sum")}))
 
+(defn- format-rows-by
+  "Format the values in result ROWS with the fns at the corresponding indecies in FORMAT-FNS.
+   ROWS can be a sequence or any of the common map formats we expect in QP tests.
+
+     (format-rows-by [int str double] [[1 1 1]]) -> [[1 \"1\" 1.0]]"
+  [format-fns rows]
+  (cond
+    (:data rows) (update-in rows [:data :rows] (partial format-rows-by format-fns))
+    (:rows rows) (update    rows :rows         (partial format-rows-by format-fns))
+    :else        (vec (for [row rows]
+                        (vec (for [[f v] (partition 2 (interleave format-fns row))]
+                               (when v
+                                 (f v))))))))
+
+(def ^:private formatted-venues-rows (partial format-rows-by [int str int (partial u/round-to-decimals 4) (partial u/round-to-decimals 4) int]))
+
 
 ;; # THE TESTS THEMSELVES (!)
 
 ;; structured-query?
 (expect false (structured-query? {}))
 (expect false (structured-query? {:type "native"}))
-(expect true (structured-query? {:type "query"}))
+(expect true  (structured-query? {:type "query"}))
 
 ;; rows-query-without-limits?
 (expect false (rows-query-without-limits? {:query {:aggregation {:aggregation-type :count}}}))
-(expect true (rows-query-without-limits? {:query {:aggregation {:aggregation-type :rows}}}))
+(expect true  (rows-query-without-limits? {:query {:aggregation {:aggregation-type :rows}}}))
 (expect false (rows-query-without-limits? {:query {:aggregation {:aggregation-type :count}
                                                    :limit       10}}))
 (expect false (rows-query-without-limits? {:query {:aggregation {:aggregation-type :count}
                                                    :page        1}}))
 
 ;; ### "COUNT" AGGREGATION
+
 (qp-expect-with-all-engines
     {:rows    [[100]]
      :columns ["count"]
      :cols    [(aggregate-col :count)]}
-  (Q aggregate count of venues))
+  (Q aggregate count of venues
+     return (format-rows-by [int])))
 
 
 ;; ### "SUM" AGGREGATION
@@ -260,21 +273,17 @@
     {:rows    [[203]]
      :columns ["sum"]
      :cols    [(aggregate-col :sum (venues-col :price))]}
-  (Q aggregate sum price of venues)
-  ;; for some annoying reason SUM(`venues`.`price`) in MySQL comes back from JDBC as a BigDecimal.
-  ;; Cast results as int regardless because ain't nobody got time for dat
-  (update-in [:data :rows] vec)
-  (update-in [:data :rows 0 0] int))
+  (Q aggregate sum price of venues
+     return (format-rows-by [int])))
 
 
 ;; ## "AVG" AGGREGATION
 (qp-expect-with-all-engines
-    {:rows    [[(if (= *engine* :redshift)
-                  35.505892
-                  35.50589199999998)]]
+    {:rows    [[35.5059]]
      :columns ["avg"]
      :cols    [(aggregate-col :avg (venues-col :latitude))]}
-  (Q aggregate avg latitude of venues))
+  (Q aggregate avg latitude of venues
+     return (format-rows-by [(partial u/round-to-decimals 4)])))
 
 
 ;; ### "DISTINCT COUNT" AGGREGATION
@@ -282,115 +291,121 @@
     {:rows    [[15]]
      :columns ["count"]
      :cols    [(aggregate-col :count)]}
-  (Q aggregate distinct user_id of checkins))
+  (Q aggregate distinct user_id of checkins
+     return (format-rows-by [int])))
 
 
 ;; ## "ROWS" AGGREGATION
 ;; Test that a rows aggregation just returns rows as-is.
 (qp-expect-with-all-engines
-    {:rows    [[1 "Red Medicine" 4 10.0646 -165.374 3]
-               [2 "Stout Burgers & Beers" 11 34.0996 -118.329 2]
-               [3 "The Apple Pan" 11 34.0406 -118.428 2]
-               [4 "Wurstküche" 29 33.9997 -118.465 2]
-               [5 "Brite Spot Family Restaurant" 20 34.0778 -118.261 2]
-               [6 "The 101 Coffee Shop" 20 34.1054 -118.324 2]
-               [7 "Don Day Korean Restaurant" 44 34.0689 -118.305 2]
-               [8 "25°" 11 34.1015 -118.342 2]
-               [9 "Krua Siri" 71 34.1018 -118.301 1]
-               [10 "Fred 62" 20 34.1046 -118.292 2]]
+    {:rows    [[ 1 "Red Medicine"                  4 10.0646 -165.374 3]
+               [ 2 "Stout Burgers & Beers"        11 34.0996 -118.329 2]
+               [ 3 "The Apple Pan"                11 34.0406 -118.428 2]
+               [ 4 "Wurstküche"                   29 33.9997 -118.465 2]
+               [ 5 "Brite Spot Family Restaurant" 20 34.0778 -118.261 2]
+               [ 6 "The 101 Coffee Shop"          20 34.1054 -118.324 2]
+               [ 7 "Don Day Korean Restaurant"    44 34.0689 -118.305 2]
+               [ 8 "25°"                          11 34.1015 -118.342 2]
+               [ 9 "Krua Siri"                    71 34.1018 -118.301 1]
+               [10 "Fred 62"                      20 34.1046 -118.292 2]]
      :columns (venues-columns)
      :cols    (venues-cols)}
   (Q aggregate rows of venues
      limit 10
-     order id+))
+     order id+
+     return formatted-venues-rows))
 
 
 ;; ## "PAGE" CLAUSE
 ;; Test that we can get "pages" of results.
 
 ;; ### PAGE - Get the first page
-(qp-expect-with-all-engines
-    {:rows    [[1 "African"]
-               [2 "American"]
-               [3 "Artisan"]
-               [4 "Asian"]
-               [5 "BBQ"]]
-     :columns (->columns "id" "name")
-     :cols    [(categories-col :id)
-               (categories-col :name)]}
-  (Q aggregate rows of categories
-     page 1 items 5
-     order id+))
+(datasets/expect-with-all-engines
+    [[1 "African"]
+     [2 "American"]
+     [3 "Artisan"]
+     [4 "Asian"]
+     [5 "BBQ"]]
+    (Q aggregate rows of categories
+       return rows (format-rows-by [int str])
+       page 1 items 5
+       order id+))
 
 ;; ### PAGE - Get the second page
-(qp-expect-with-all-engines
-    {:rows    [[6 "Bakery"]
-               [7 "Bar"]
-               [8 "Beer Garden"]
-               [9 "Breakfast / Brunch"]
-               [10 "Brewery"]]
-     :columns (->columns "id" "name")
-     :cols    [(categories-col :id)
-               (categories-col :name)]}
-  (Q aggregate rows of categories
-     page 2 items 5
-     order id+))
+(datasets/expect-with-all-engines
+    [[ 6 "Bakery"]
+     [ 7 "Bar"]
+     [ 8 "Beer Garden"]
+     [ 9 "Breakfast / Brunch"]
+     [10 "Brewery"]]
+    (Q aggregate rows of categories
+       page 2 items 5
+       order id+
+       return rows (format-rows-by [int str])))
 
 
 ;; ## "ORDER_BY" CLAUSE
 ;; Test that we can tell the Query Processor to return results ordered by multiple fields
-(qp-expect-with-all-engines
-    {:rows    [[1 12 375] [1 9 139] [1 1 72] [2 15 129] [2 12 471] [2 11 325] [2 9 590] [2 9 833] [2 8 380] [2 5 719]],
-     :columns (->columns "venue_id" "user_id" "id")
-     :cols    [(checkins-col :venue_id)
-               (checkins-col :user_id)
-               (checkins-col :id)]}
+(datasets/expect-with-all-engines
+  [[1 12 375]
+   [1  9 139]
+   [1  1  72]
+   [2 15 129]
+   [2 12 471]
+   [2 11 325]
+   [2  9 590]
+   [2  9 833]
+   [2  8 380]
+   [2  5 719]]
   (Q aggregate rows of checkins
      fields venue_id user_id id
      order venue_id+ user_id- id+
-     limit 10))
+     limit 10
+     return rows (format-rows-by [int int int])))
 
 
 ;; ## "FILTER" CLAUSE
 
 ;; ### FILTER -- "AND", ">", ">="
-(qp-expect-with-all-engines
-    {:rows    [[55 "Dal Rae Restaurant" 67 33.983 -118.096 4]
-               [61 "Lawry's The Prime Rib" 67 34.0677 -118.376 4]
-               [77 "Sushi Nakazawa" 40 40.7318 -74.0045 4]
-               [79 "Sushi Yasuda" 40 40.7514 -73.9736 4]
-               [81 "Tanoshi Sushi & Sake Bar" 40 40.7677 -73.9533 4]]
-     :columns (venues-columns)
-     :cols    (venues-cols)}
+(datasets/expect-with-all-engines
+  [[55 "Dal Rae Restaurant"       67 33.983  -118.096 4]
+   [61 "Lawry's The Prime Rib"    67 34.0677 -118.376 4]
+   [77 "Sushi Nakazawa"           40 40.7318 -74.0045 4]
+   [79 "Sushi Yasuda"             40 40.7514 -73.9736 4]
+   [81 "Tanoshi Sushi & Sake Bar" 40 40.7677 -73.9533 4]]
   (Q aggregate rows of venues
-     filter and > id 50, >= price 4))
+     filter and > id 50, >= price 4
+     order id+
+     return rows formatted-venues-rows))
+
+(defmacro compaare [a b]
+  `(compare-expr ~a ~b '~a '~b))
 
 ;; ### FILTER -- "AND", "<", ">", "!="
-(qp-expect-with-all-engines
-    {:rows    [[21 "PizzaHacker" 58 37.7441 -122.421 2]
-               [23 "Taqueria Los Coyotes" 50 37.765 -122.42 2]]
-     :columns (venues-columns)
-     :cols    (venues-cols)}
+(datasets/expect-with-all-engines
+  [[21 "PizzaHacker"          58 37.7441 -122.421 2]
+   [23 "Taqueria Los Coyotes" 50 37.765  -122.42  2]]
   (Q aggregate rows of venues
-     filter and < id 24, > id 20, != id 22))
+     filter and < id 24, > id 20, != id 22
+     order id+
+     return rows formatted-venues-rows))
 
 ;; ### FILTER WITH A FALSE VALUE
 ;; Check that we're checking for non-nil values, not just logically true ones.
 ;; There's only one place (out of 3) that I don't like
 (datasets/expect-with-all-engines
- [1]
+  [[1]]
  (Q dataset places-cam-likes
-    return first-row
+    return rows (format-rows-by [int])
     aggregate count of places
     filter = liked false))
 
 ;; ### FILTER -- "BETWEEN", single subclause (neither "AND" nor "OR")
-(qp-expect-with-all-engines
-    {:rows    [[21 "PizzaHacker" 58 37.7441 -122.421 2]
-               [22 "Gordo Taqueria" 50 37.7822 -122.484 1]]
-     :columns (venues-columns)
-     :cols    (venues-cols)}
+(datasets/expect-with-all-engines
+  [[21 "PizzaHacker"    58 37.7441 -122.421 2]
+   [22 "Gordo Taqueria" 50 37.7822 -122.484 1]]
   (Q aggregate rows of venues
+     return rows formatted-venues-rows
      filter between id 21 22
      order id+))
 
@@ -400,28 +415,27 @@
      :columns ["count"]
      :cols    [(aggregate-col :count)]}
   (Q aggregate count of checkins
-     filter and between date "2015-04-01" "2015-05-01"))
+     filter and between date "2015-04-01" "2015-05-01"
+     return (format-rows-by [int])))
 
 ;; ### FILTER -- "OR", "<=", "="
-(qp-expect-with-all-engines
-    {:rows    [[1 "Red Medicine" 4 10.0646 -165.374 3]
-               [2 "Stout Burgers & Beers" 11 34.0996 -118.329 2]
-               [3 "The Apple Pan" 11 34.0406 -118.428 2]
-               [5 "Brite Spot Family Restaurant" 20 34.0778 -118.261 2]]
-     :columns (venues-columns)
-     :cols    (venues-cols)}
+(datasets/expect-with-all-engines
+  [[1 "Red Medicine"                  4 10.0646 -165.374 3]
+   [2 "Stout Burgers & Beers"        11 34.0996 -118.329 2]
+   [3 "The Apple Pan"                11 34.0406 -118.428 2]
+   [5 "Brite Spot Family Restaurant" 20 34.0778 -118.261 2]]
   (Q aggregate rows of venues
      filter or <= id 3 = id 5
-     order id+))
+     order id+
+     return rows formatted-venues-rows))
 
 ;; ### FILTER -- "INSIDE"
-(qp-expect-with-all-engines
-    {:rows    [[1 "Red Medicine" 4 10.0646 -165.374 3]]
-     :columns (venues-columns)
-     :cols    (venues-cols)}
+(datasets/expect-with-all-engines
+  [[1 "Red Medicine" 4 10.0646 -165.374 3]]
   (Q aggregate rows of venues
-     filter inside {:lat {:field latitude, :min 10.0641, :max 10.0649}
-                    :lon {:field longitude, :min -165.379, :max -165.371}}))
+     filter inside {:lat {:field latitude,  :min 10.0641,  :max 10.0649}
+                    :lon {:field longitude, :min -165.379, :max -165.371}}
+     return rows formatted-venues-rows))
 
 
 ;; ## "FIELDS" CLAUSE
@@ -441,6 +455,7 @@
      :cols    [(venues-col :name)
                (venues-col :id)]}
   (Q aggregate rows of venues
+     return (format-rows-by [str int])
      fields name id
      limit 10
      order id+))
@@ -455,6 +470,7 @@
      :cols    [(checkins-col :user_id)
                (aggregate-col :count)]}
   (Q aggregate count of checkins
+     return (format-rows-by [int int])
      breakout user_id
      order user_id+))
 
@@ -465,6 +481,7 @@
      :columns [(format-name "user_id")]
      :rows    [[1] [2] [3] [4] [5] [6] [7] [8] [9] [10]]}
   (Q breakout user_id of checkins
+     return (format-rows-by [int])
      limit 10))
 
 
@@ -479,6 +496,7 @@
                (checkins-col :venue_id)
                (aggregate-col :count)]}
   (Q aggregate count of checkins
+     return (format-rows-by [int int int])
      breakout user_id venue_id
      limit 10))
 
@@ -493,6 +511,7 @@
                (checkins-col :venue_id)
                (aggregate-col :count)]}
   (Q aggregate count of checkins
+     return (format-rows-by [int int int])
      breakout user_id venue_id
      order user_id- venue_id+
      limit 10))
@@ -511,36 +530,30 @@
 
 ;; Apply an arbitrary max-results on the query and ensure our results size is appropriately constrained
 (expect 1234
-  (->> ((@(resolve 'metabase.driver.query-processor/limit) identity) {:constraints {:max-results 1234}
-                                                                                    :query       {:aggregation {:aggregation-type :count}}
-                                                                                    :rows        (repeat [:ok])})
+  (->> (((resolve 'metabase.driver.query-processor/limit) identity) {:constraints {:max-results 1234}
+                                                                     :query       {:aggregation {:aggregation-type :count}}
+                                                                     :rows        (repeat [:ok])})
        :rows
        count))
 
 ;; Apply a max-results-bare-rows limit specifically on :rows type query
 (expect [46 46]
-  (let [res ((@(resolve 'metabase.driver.query-processor/limit) identity) {:constraints {:max-results 46}
-                                                                                         :query       {:aggregation {:aggregation-type :rows}}
-                                                                                         :rows        (repeat [:ok])})]
+  (let [res (((resolve 'metabase.driver.query-processor/limit) identity) {:constraints {:max-results 46}
+                                                                          :query       {:aggregation {:aggregation-type :rows}}
+                                                                          :rows        (repeat [:ok])})]
     [(->> res :rows count)
      (->> res :query :limit)]))
 
 
 ;; ## CUMULATIVE SUM
 
-(defn- ->sum-type
-  "Since summed integer fields come back as different types depending on which DB we're use, cast value V appropriately."
-  [v]
-  ((case (sum-field-type)
-      :IntegerField    int
-      :BigIntegerField bigdec) v))
-
 ;; ### cum_sum w/o breakout should be treated the same as sum
 (qp-expect-with-all-engines
-    {:rows    [[(->sum-type 120)]]
+    {:rows    [[120]]
      :columns ["sum"]
      :cols    [(aggregate-col :sum (users-col :id))]}
-  (Q aggregate cum-sum id of users))
+  (Q aggregate cum-sum id of users
+     return (format-rows-by [int])))
 
 
 ;; ### Simple cumulative sum where breakout field is same as cum_sum field
@@ -549,32 +562,34 @@
      :columns (->columns "id")
      :cols    [(users-col :id)]}
   (Q aggregate cum-sum id of users
-     breakout id))
+     breakout id
+     return (format-rows-by [int])))
 
 
 ;; ### Cumulative sum w/ a different breakout field
 (qp-expect-with-all-engines
-    {:rows    [["Broen Olujimi"       (->sum-type 14)]
-               ["Conchúr Tihomir"     (->sum-type 21)]
-               ["Dwight Gresham"      (->sum-type 34)]
-               ["Felipinho Asklepios" (->sum-type 36)]
-               ["Frans Hevel"         (->sum-type 46)]
-               ["Kaneonuskatew Eiran" (->sum-type 49)]
-               ["Kfir Caj"            (->sum-type 61)]
-               ["Nils Gotam"          (->sum-type 70)]
-               ["Plato Yeshua"        (->sum-type 71)]
-               ["Quentin Sören"       (->sum-type 76)]
-               ["Rüstem Hebel"        (->sum-type 91)]
-               ["Shad Ferdynand"      (->sum-type 97)]
-               ["Simcha Yan"          (->sum-type 101)]
-               ["Spiros Teofil"       (->sum-type 112)]
-               ["Szymon Theutrich"    (->sum-type 120)]]
+    {:rows    [["Broen Olujimi"        14]
+               ["Conchúr Tihomir"      21]
+               ["Dwight Gresham"       34]
+               ["Felipinho Asklepios"  36]
+               ["Frans Hevel"          46]
+               ["Kaneonuskatew Eiran"  49]
+               ["Kfir Caj"             61]
+               ["Nils Gotam"           70]
+               ["Plato Yeshua"         71]
+               ["Quentin Sören"        76]
+               ["Rüstem Hebel"         91]
+               ["Shad Ferdynand"       97]
+               ["Simcha Yan"          101]
+               ["Spiros Teofil"       112]
+               ["Szymon Theutrich"    120]]
      :columns [(format-name "name")
                "sum"]
      :cols    [(users-col :name)
                (aggregate-col :sum (users-col :id))]}
   (Q aggregate cum-sum id of users
-     breakout name))
+     breakout name
+     return (format-rows-by [str int])))
 
 
 ;; ### Cumulative sum w/ a different breakout field that requires grouping
@@ -583,12 +598,13 @@
                "sum"]
      :cols    [(venues-col :price)
                (aggregate-col :sum (venues-col :id))]
-     :rows    [[1 (->sum-type 1211)]
-               [2 (->sum-type 4066)]
-               [3 (->sum-type 4681)]
-               [4 (->sum-type 5050)]]}
+     :rows    [[1 1211]
+               [2 4066]
+               [3 4681]
+               [4 5050]]}
   (Q aggregate cum-sum id of venues
-     breakout price))
+     breakout price
+     return (format-rows-by [int int])))
 
 
 ;;; ## STDDEV AGGREGATION
@@ -598,13 +614,10 @@
 (qp-expect-with-engines (engines-that-support :standard-deviation-aggregations)
   {:columns ["stddev"]
    :cols    [(aggregate-col :stddev (venues-col :latitude))]
-   :rows    [[(datasets/engine-case
-                :h2        3.43467255295115      ; annoying :/
-                :mysql     3.417456040761316
-                :postgres  3.4346725529512736
-                :redshift  3.43467255295115
-                :sqlserver 3.43467255295126)]]}
-  (Q aggregate stddev latitude of venues))
+   :rows    [[3.4]]}
+  (-> (Q aggregate stddev latitude of venues)
+      (update-in [:data :rows] (fn [[[v]]]
+                                 [[(u/round-to-decimals 1 v)]]))))
 
 ;; Make sure standard deviation fails for the Mongo driver since its not supported
 (datasets/expect-with-engines (engines-that-dont-support :standard-deviation-aggregations)
@@ -619,7 +632,7 @@
 (qp-expect-with-all-engines
   {:columns [(format-name "price")
              "count"]
-   :rows    [[4 6]
+   :rows    [[4  6]
              [3 13]
              [1 22]
              [2 59]]
@@ -627,29 +640,32 @@
              (aggregate-col :count)]}
   (Q aggregate count of venues
      breakout price
-     order ag.0+))
+     order ag.0+
+     return (format-rows-by [int int])))
 
 
 ;;; ### order_by aggregate ["sum" field-id]
 (qp-expect-with-all-engines
   {:columns [(format-name "price")
              "sum"]
-   :rows    [[2 (->sum-type 2855)]
-             [1 (->sum-type 1211)]
-             [3 (->sum-type 615)]
-             [4 (->sum-type 369)]]
+   :rows    [[2 2855]
+             [1 1211]
+             [3  615]
+             [4  369]]
    :cols    [(venues-col :price)
              (aggregate-col :sum (venues-col :id))]}
   (Q aggregate sum id of venues
      breakout price
-     order ag.0-))
+     order ag.0-
+     return (format-rows-by [int int])))
 
 
 ;;; ### order_by aggregate ["distinct" field-id]
 (qp-expect-with-all-engines
   {:columns [(format-name "price")
+
              "count"]
-   :rows    [[4 6]
+   :rows    [[4  6]
              [3 13]
              [1 22]
              [2 59]]
@@ -657,52 +673,24 @@
              (aggregate-col :count)]}
   (Q aggregate distinct id of venues
      breakout price
-     order ag.0+))
+     order ag.0+
+     return (format-rows-by [int int])))
 
 
 ;;; ### order_by aggregate ["avg" field-id]
 (datasets/expect-with-all-engines
   {:columns [(format-name "price")
              "avg"]
-   :rows    [[3 (datasets/engine-case
-                  :h2        22
-                  :mongo     22.0
-                  :mysql     22.0000M
-                  :postgres  22.0000000000000000M
-                  :redshift  22
-                  :sqlite    22.0
-                  :sqlserver 22)]
-             [2 (datasets/engine-case
-                  :h2        28
-                  :mongo     28.28813559322034
-                  :mysql     28.2881M
-                  :postgres  28.2881355932203390M
-                  :redshift  28
-                  :sqlite    28.28813559322034
-                  :sqlserver 28)]
-             [1 (datasets/engine-case
-                  :h2        32
-                  :mongo     32.81818181818182
-                  :mysql     32.8182M
-                  :postgres  32.8181818181818182M
-                  :redshift  32
-                  :sqlite    32.81818181818182
-                  :sqlserver 32)]
-             [4 (datasets/engine-case
-                  :h2        53
-                  :mongo     53.5
-                  :mysql     53.5000M
-                  :postgres  53.5000000000000000M
-                  :redshift  53
-                  :sqlite    53.5
-                  :sqlserver 53)]]
+   :rows    [[3 22]
+             [2 28]
+             [1 32]
+             [4 53]]
    :cols    [(venues-col :price)
              (aggregate-col :avg (venues-col :category_id))]}
-  (Q return :data
-     of venues
-     aggregate avg category_id
+  (Q aggregate avg category_id of venues
      breakout price
-     order ag.0+))
+     order ag.0+
+     return :data (format-rows-by [int int])))
 
 ;;; ### order_by aggregate ["stddev" field-id]
 ;; MySQL has a nasty tendency to return different results on different systems so just round everything to the nearest int.
@@ -710,19 +698,16 @@
 (datasets/expect-with-engines (engines-that-support :standard-deviation-aggregations)
   {:columns [(format-name "price")
              "stddev"]
-   :rows    [[3 (datasets/engine-case :h2 26, :mysql 25, :postgres 26, :redshift 26, :sqlserver 26)]
+   :rows    [[3 (if (= *engine* :mysql) 25 26)]
              [1 24]
              [2 21]
-             [4 (datasets/engine-case :h2 15, :mysql 14, :postgres 15, :redshift 15, :sqlserver 15)]]
+             [4 (if (= *engine* :mysql) 14 15)]]
    :cols    [(venues-col :price)
              (aggregate-col :stddev (venues-col :category_id))]}
-  (-> (Q return :data
-         of venues
-         aggregate stddev category_id
+  (-> (Q aggregate stddev category_id of venues
          breakout price
-         order ag.0-)
-      (update :rows (partial mapv (fn [[x y]]
-                                    [x (int (math/round y))])))))
+         order ag.0-
+         return :data (format-rows-by [int (comp int math/round)]))))
 
 ;;; ### make sure that rows where preview_display = false are included and properly marked up
 (datasets/expect-with-all-engines
@@ -753,15 +738,15 @@
      :cols    [(users-col :id)
                (users-col :last_login)
                (users-col :name)],
-     :rows    [[1 "Plato Yeshua"]
-               [2 "Felipinho Asklepios"]
-               [3 "Kaneonuskatew Eiran"]
-               [4 "Simcha Yan"]
-               [5 "Quentin Sören"]
-               [6 "Shad Ferdynand"]
-               [7 "Conchúr Tihomir"]
-               [8 "Szymon Theutrich"]
-               [9 "Nils Gotam"]
+     :rows    [[ 1 "Plato Yeshua"]
+               [ 2 "Felipinho Asklepios"]
+               [ 3 "Kaneonuskatew Eiran"]
+               [ 4 "Simcha Yan"]
+               [ 5 "Quentin Sören"]
+               [ 6 "Shad Ferdynand"]
+               [ 7 "Conchúr Tihomir"]
+               [ 8 "Szymon Theutrich"]
+               [ 9 "Nils Gotam"]
                [10 "Frans Hevel"]
                [11 "Spiros Teofil"]
                [12 "Kfir Caj"]
@@ -772,7 +757,7 @@
   (-> (Q aggregate rows of users
          order id+)
       (update-in [:data :rows] (partial mapv (fn [[id last-login name]]
-                                               [id name])))))
+                                               [(int id) name])))))
 
 
 ;; +------------------------------------------------------------------------------------------------------------------------+
@@ -781,7 +766,7 @@
 
 ;; There were 9 "sad toucan incidents" on 2015-06-02
 (datasets/expect-with-all-engines
-  (if-questionable-timezone-support
+  (if (i/has-questionable-timezone-support? *data-loader*)
     10
     9)
   (Q dataset sad-toucan-incidents
@@ -793,48 +778,48 @@
 
 (datasets/expect-with-all-engines
   (cond
-    ;; SQL Server, Mongo, and Redshift don't have a concept of timezone so results are all grouped by UTC
-    (contains? #{:mongo :redshift :sqlserver} *engine*)
-    [[#inst "2015-06-01T07" 6]
-     [#inst "2015-06-02T07" 10]
-     [#inst "2015-06-03T07" 4]
-     [#inst "2015-06-04T07" 9]
-     [#inst "2015-06-05T07" 9]
-     [#inst "2015-06-06T07" 8]
-     [#inst "2015-06-07T07" 8]
-     [#inst "2015-06-08T07" 9]
-     [#inst "2015-06-09T07" 7]
-     [#inst "2015-06-10T07" 9]]
-
     (= *engine* :sqlite)
-    [["2015-06-01" 6]
+    [["2015-06-01"  6]
      ["2015-06-02" 10]
-     ["2015-06-03" 4]
-     ["2015-06-04" 9]
-     ["2015-06-05" 9]
-     ["2015-06-06" 8]
-     ["2015-06-07" 8]
-     ["2015-06-08" 9]
-     ["2015-06-09" 7]
-     ["2015-06-10" 9]]
-
-    ;; Postgres, Redshift, MySQL, and H2 -- grouped by DB timezone, US/Pacific in this case
+     ["2015-06-03"  4]
+     ["2015-06-04"  9]
+     ["2015-06-05"  9]
+     ["2015-06-06"  8]
+     ["2015-06-07"  8]
+     ["2015-06-08"  9]
+     ["2015-06-09"  7]
+     ["2015-06-10"  9]]
+
+    ;; SQL Server, Mongo, and Redshift don't have a concept of timezone so results are all grouped by UTC
+    (i/has-questionable-timezone-support? *data-loader*)
+    [[#inst "2015-06-01T07"  6]
+     [#inst "2015-06-02T07" 10]
+     [#inst "2015-06-03T07"  4]
+     [#inst "2015-06-04T07"  9]
+     [#inst "2015-06-05T07"  9]
+     [#inst "2015-06-06T07"  8]
+     [#inst "2015-06-07T07"  8]
+     [#inst "2015-06-08T07"  9]
+     [#inst "2015-06-09T07"  7]
+     [#inst "2015-06-10T07"  9]]
+
+    ;; Postgres, MySQL, and H2 -- grouped by DB timezone, US/Pacific in this case
     :else
-    [[#inst "2015-06-01T07" 8]
-     [#inst "2015-06-02T07" 9]
-     [#inst "2015-06-03T07" 9]
-     [#inst "2015-06-04T07" 4]
+    [[#inst "2015-06-01T07"  8]
+     [#inst "2015-06-02T07"  9]
+     [#inst "2015-06-03T07"  9]
+     [#inst "2015-06-04T07"  4]
      [#inst "2015-06-05T07" 11]
-     [#inst "2015-06-06T07" 8]
-     [#inst "2015-06-07T07" 6]
+     [#inst "2015-06-06T07"  8]
+     [#inst "2015-06-07T07"  6]
      [#inst "2015-06-08T07" 10]
-     [#inst "2015-06-09T07" 6]
+     [#inst "2015-06-09T07"  6]
      [#inst "2015-06-10T07" 10]])
   (Q dataset sad-toucan-incidents
      aggregate count of incidents
      breakout timestamp
      limit 10
-     return rows))
+     return rows (format-rows-by [identity int])))
 
 
 ;; +------------------------------------------------------------------------------------------------------------------------+
@@ -844,32 +829,31 @@
 ;; The top 10 cities by number of Tupac sightings
 ;; Test that we can breakout on an FK field (Note how the FK Field is returned in the results)
 (datasets/expect-with-engines (engines-that-support :foreign-keys)
-  [["Arlington" 16]
-   ["Albany" 15]
-   ["Portland" 14]
-   ["Louisville" 13]
+  [["Arlington"    16]
+   ["Albany"       15]
+   ["Portland"     14]
+   ["Louisville"   13]
    ["Philadelphia" 13]
-   ["Anchorage" 12]
-   ["Lincoln" 12]
-   ["Houston" 11]
-   ["Irvine" 11]
-   ["Lakeland" 11]]
+   ["Anchorage"    12]
+   ["Lincoln"      12]
+   ["Houston"      11]
+   ["Irvine"       11]
+   ["Lakeland"     11]]
   (Q dataset tupac-sightings
-     return rows
-     of sightings
-     aggregate count
+     aggregate count of sightings
      breakout city_id->cities.name
      order ag.0-
-     limit 10))
+     limit 10
+     return rows (format-rows-by [str int])))
 
 
 ;; Number of Tupac sightings in the Expa office
 ;; (he was spotted here 60 times)
 ;; Test that we can filter on an FK field
 (datasets/expect-with-engines (engines-that-support :foreign-keys)
-  60
+  [[60]]
   (Q dataset tupac-sightings
-     return first-row first
+     return rows (format-rows-by [int])
      aggregate count of sightings
      filter = category_id->categories.id 8))
 
@@ -888,8 +872,8 @@
    [996 "At a Restaurant"]
    [897 "Wearing a Biggie Shirt"]
    [499 "In the Expa Office"]]
-  (Q dataset tupac-sightings
-     return rows of sightings
+  (Q dataset tupac-sightings of sightings
+     return rows (format-rows-by [int str])
      fields id category_id->categories.name
      order timestamp-
      limit 10))
@@ -902,18 +886,18 @@
 (datasets/expect-with-engines (engines-that-support :foreign-keys)
   ;; CITY_ID, CATEGORY_ID, ID
   ;; Cities are already alphabetized in the source data which is why CITY_ID is sorted
-  [[1 12 6]
+  [[1 12   6]
    [1 11 355]
    [1 11 596]
    [1 13 379]
-   [1 5 413]
-   [1 1 426]
-   [2 11 67]
+   [1  5 413]
+   [1  1 426]
+   [2 11  67]
    [2 11 524]
-   [2 13 77]
+   [2 13  77]
    [2 13 202]]
   (Q dataset tupac-sightings
-     return rows (map butlast) (map reverse) ; drop timestamps. reverse ordering to make the results columns order match order_by
+     return rows (map butlast) (map reverse) (format-rows-by [int int int]) ; drop timestamps. reverse ordering to make the results columns order match order_by
      of sightings
      order city_id->cities.name+ category_id->categories.name- id+
      limit 10))
@@ -1009,14 +993,14 @@
 ;;; Nested Field in BREAKOUT
 ;; Let's see how many tips we have by source.service
 (datasets/expect-with-engines (engines-that-support :nested-fields)
-  {:rows    [["facebook" 107]
-             ["flare" 105]
+  {:rows    [["facebook"   107]
+             ["flare"      105]
              ["foursquare" 100]
-             ["twitter" 98]
-             ["yelp" 90]]
+             ["twitter"     98]
+             ["yelp"        90]]
    :columns ["source.service" "count"]}
   (Q dataset geographical-tips
-     return :data (#(dissoc % :cols))
+     return :data (#(dissoc % :cols)) (format-rows-by [str int])
      aggregate count of tips
      breakout source...service))
 
@@ -1045,86 +1029,85 @@
 
 ;;; Nested Field w/ ordering by aggregation
 (datasets/expect-with-engines (engines-that-support :nested-fields)
-    [["jane" 4]
-     ["kyle" 5]
-     ["tupac" 5]
-     ["jessica" 6]
-     ["bob" 7]
-     ["lucky_pigeon" 7]
-     ["joe" 8]
-     ["mandy" 8]
-     ["amy" 9]
-     ["biggie" 9]
-     ["sameer" 9]
-     ["cam_saul" 10]
-     ["rasta_toucan" 13]
-     [nil 400]]
+  [["jane"           4]
+   ["kyle"           5]
+   ["tupac"          5]
+   ["jessica"        6]
+   ["bob"            7]
+   ["lucky_pigeon"   7]
+   ["joe"            8]
+   ["mandy"          8]
+   ["amy"            9]
+   ["biggie"         9]
+   ["sameer"         9]
+   ["cam_saul"      10]
+   ["rasta_toucan"  13]
+   [nil            400]]
   (Q dataset geographical-tips
-     return rows
      aggregate count of tips
      breakout source...mayor
-     order ag.0))
+     order ag.0
+     return rows (format-rows-by [identity int])))
 
 
 ;;; # New Filter Types - CONTAINS, STARTS_WITH, ENDS_WITH
 
 ;;; ## STARTS_WITH
 (datasets/expect-with-all-engines
- [[41 "Cheese Steak Shop" 18 37.7855 -122.44 1]
-  [74 "Chez Jay" 2 34.0104 -118.493 2]]
- (Q return rows
-    aggregate rows of venues
+ [[41 "Cheese Steak Shop" 18 37.7855 -122.44  1]
+  [74 "Chez Jay"           2 34.0104 -118.493 2]]
+ (Q aggregate rows of venues
     filter starts-with name "Che"
-    order id))
+    order id
+    return rows formatted-venues-rows))
 
 
 ;;; ## ENDS_WITH
 (datasets/expect-with-all-engines
- [[5 "Brite Spot Family Restaurant" 20 34.0778 -118.261 2]
-  [7 "Don Day Korean Restaurant" 44 34.0689 -118.305 2]
-  [17 "Ruen Pair Thai Restaurant" 71 34.1021 -118.306 2]
-  [45 "Tu Lan Restaurant" 4 37.7821 -122.41 1]
-  [55 "Dal Rae Restaurant" 67 33.983 -118.096 4]]
- (Q return rows
-    aggregate rows of venues
+ [[ 5 "Brite Spot Family Restaurant" 20 34.0778 -118.261 2]
+  [ 7 "Don Day Korean Restaurant"    44 34.0689 -118.305 2]
+  [17 "Ruen Pair Thai Restaurant"    71 34.1021 -118.306 2]
+  [45 "Tu Lan Restaurant"             4 37.7821 -122.41  1]
+  [55 "Dal Rae Restaurant"           67 33.983  -118.096 4]]
+ (Q aggregate rows of venues
     filter ends-with name "Restaurant"
-    order id))
-
+    order id
+    return rows formatted-venues-rows))
 
 ;;; ## CONTAINS
 (datasets/expect-with-all-engines
- [[31 "Bludso's BBQ" 5 33.8894 -118.207 2]
-  [34 "Beachwood BBQ & Brewing" 10 33.7701 -118.191 2]
-  [39 "Baby Blues BBQ" 5 34.0003 -118.465 2]]
- (Q return rows
-    aggregate rows of venues
-    filter contains name "BBQ"
-    order id))
+  [[31 "Bludso's BBQ"             5 33.8894 -118.207 2]
+   [34 "Beachwood BBQ & Brewing" 10 33.7701 -118.191 2]
+   [39 "Baby Blues BBQ"           5 34.0003 -118.465 2]]
+  (Q aggregate rows of venues
+     filter contains name "BBQ"
+     order id
+     return rows formatted-venues-rows))
 
 ;;; ## Nested AND / OR
 
 (datasets/expect-with-all-engines
- [81]
- (Q aggregate count of venues
-    filter and != price 3
-               or = price 1
-               or = price 2
-    return first-row))
+  [[81]]
+  (Q aggregate count of venues
+     filter and != price 3
+                or = price 1
+                or = price 2
+     return rows (format-rows-by [int])))
 
 
 ;;; ## = / != with multiple values
 
 (datasets/expect-with-all-engines
- [81]
- (Q return first-row
-    aggregate count of venues
-    filter = price 1 2))
+  [[81]]
+  (Q aggregate count of venues
+     filter = price 1 2
+     return rows (format-rows-by [int])))
 
 (datasets/expect-with-all-engines
- [19]
- (Q return first-row
-    aggregate count of venues
-    filter != price 1 2))
+  [[19]]
+  (Q aggregate count of venues
+     filter != price 1 2
+     return rows (format-rows-by [int])))
 
 
 ;; +-------------------------------------------------------------------------------------------------------------+
@@ -1139,7 +1122,8 @@
           aggregate count of incidents
           breakout ["datetime_field" (id :incidents :timestamp) "as" unit]
           limit 10
-          return rows)))
+          return rows (format-rows-by [(fn [x] (if (number? x) (int x) x))
+                                       int]))))
 
 (datasets/expect-with-all-engines
   (cond
@@ -1182,18 +1166,6 @@
 
 (datasets/expect-with-all-engines
   (cond
-    (contains? #{:mongo :redshift :sqlserver} *engine*)
-    [[#inst "2015-06-01T17:31" 1]
-     [#inst "2015-06-01T23:06" 1]
-     [#inst "2015-06-02T00:23" 1]
-     [#inst "2015-06-02T01:55" 1]
-     [#inst "2015-06-02T04:04" 1]
-     [#inst "2015-06-02T04:19" 1]
-     [#inst "2015-06-02T09:13" 1]
-     [#inst "2015-06-02T12:37" 1]
-     [#inst "2015-06-02T15:20" 1]
-     [#inst "2015-06-02T18:11" 1]]
-
     (= *engine* :sqlite)
     [["2015-06-01 10:31:00" 1]
      ["2015-06-01 16:06:00" 1]
@@ -1206,6 +1178,18 @@
      ["2015-06-02 08:20:00" 1]
      ["2015-06-02 11:11:00" 1]]
 
+    (i/has-questionable-timezone-support? *data-loader*)
+    [[#inst "2015-06-01T17:31" 1]
+     [#inst "2015-06-01T23:06" 1]
+     [#inst "2015-06-02T00:23" 1]
+     [#inst "2015-06-02T01:55" 1]
+     [#inst "2015-06-02T04:04" 1]
+     [#inst "2015-06-02T04:19" 1]
+     [#inst "2015-06-02T09:13" 1]
+     [#inst "2015-06-02T12:37" 1]
+     [#inst "2015-06-02T15:20" 1]
+     [#inst "2015-06-02T18:11" 1]]
+
     :else
     [[#inst "2015-06-01T10:31" 1]
      [#inst "2015-06-01T16:06" 1]
@@ -1234,18 +1218,6 @@
 
 (datasets/expect-with-all-engines
   (cond
-    (contains? #{:mongo :redshift :sqlserver} *engine*)
-    [[#inst "2015-06-01T17" 1]
-     [#inst "2015-06-01T23" 1]
-     [#inst "2015-06-02T00" 1]
-     [#inst "2015-06-02T01" 1]
-     [#inst "2015-06-02T04" 2]
-     [#inst "2015-06-02T09" 1]
-     [#inst "2015-06-02T12" 1]
-     [#inst "2015-06-02T15" 1]
-     [#inst "2015-06-02T18" 1]
-     [#inst "2015-06-02T20" 1]]
-
     (= *engine* :sqlite)
     [["2015-06-01 10:00:00" 1]
      ["2015-06-01 16:00:00" 1]
@@ -1258,6 +1230,18 @@
      ["2015-06-02 11:00:00" 1]
      ["2015-06-02 13:00:00" 1]]
 
+    (i/has-questionable-timezone-support? *data-loader*)
+    [[#inst "2015-06-01T17" 1]
+     [#inst "2015-06-01T23" 1]
+     [#inst "2015-06-02T00" 1]
+     [#inst "2015-06-02T01" 1]
+     [#inst "2015-06-02T04" 2]
+     [#inst "2015-06-02T09" 1]
+     [#inst "2015-06-02T12" 1]
+     [#inst "2015-06-02T15" 1]
+     [#inst "2015-06-02T18" 1]
+     [#inst "2015-06-02T20" 1]]
+
     :else
     [[#inst "2015-06-01T10" 1]
      [#inst "2015-06-01T16" 1]
@@ -1272,77 +1256,70 @@
   (sad-toucan-incidents-with-bucketing :hour))
 
 (datasets/expect-with-all-engines
-  (if-questionable-timezone-support
-   [[0 13] [1  8] [2  4] [3  7] [4  5] [5 13] [6 10] [7  8] [8  9] [9  7]]
-   [[0  8] [1  9] [2  7] [3 10] [4 10] [5  9] [6  6] [7  5] [8  7] [9  7]])
+  (if (i/has-questionable-timezone-support? *data-loader*)
+   [[0 13] [1 8] [2 4] [3  7] [4  5] [5 13] [6 10] [7 8] [8 9] [9 7]]
+   [[0  8] [1 9] [2 7] [3 10] [4 10] [5  9] [6  6] [7 5] [8 7] [9 7]])
   (sad-toucan-incidents-with-bucketing :hour-of-day))
 
 (datasets/expect-with-all-engines
   (cond
-    (contains? #{:mongo :redshift :sqlserver} *engine*)
-    [[#inst "2015-06-01T07" 6]
-     [#inst "2015-06-02T07" 10]
-     [#inst "2015-06-03T07" 4]
-     [#inst "2015-06-04T07" 9]
-     [#inst "2015-06-05T07" 9]
-     [#inst "2015-06-06T07" 8]
-     [#inst "2015-06-07T07" 8]
-     [#inst "2015-06-08T07" 9]
-     [#inst "2015-06-09T07" 7]
-     [#inst "2015-06-10T07" 9]]
-
     (= *engine* :sqlite)
-    [["2015-06-01" 6]
+    [["2015-06-01"  6]
      ["2015-06-02" 10]
-     ["2015-06-03" 4]
-     ["2015-06-04" 9]
-     ["2015-06-05" 9]
-     ["2015-06-06" 8]
-     ["2015-06-07" 8]
-     ["2015-06-08" 9]
-     ["2015-06-09" 7]
-     ["2015-06-10" 9]]
+     ["2015-06-03"  4]
+     ["2015-06-04"  9]
+     ["2015-06-05"  9]
+     ["2015-06-06"  8]
+     ["2015-06-07"  8]
+     ["2015-06-08"  9]
+     ["2015-06-09"  7]
+     ["2015-06-10"  9]]
+
+    (i/has-questionable-timezone-support? *data-loader*)
+    [[#inst "2015-06-01T07"  6]
+     [#inst "2015-06-02T07" 10]
+     [#inst "2015-06-03T07"  4]
+     [#inst "2015-06-04T07"  9]
+     [#inst "2015-06-05T07"  9]
+     [#inst "2015-06-06T07"  8]
+     [#inst "2015-06-07T07"  8]
+     [#inst "2015-06-08T07"  9]
+     [#inst "2015-06-09T07"  7]
+     [#inst "2015-06-10T07"  9]]
 
     :else
-    [[#inst "2015-06-01T07" 8]
-     [#inst "2015-06-02T07" 9]
-     [#inst "2015-06-03T07" 9]
-     [#inst "2015-06-04T07" 4]
+    [[#inst "2015-06-01T07"  8]
+     [#inst "2015-06-02T07"  9]
+     [#inst "2015-06-03T07"  9]
+     [#inst "2015-06-04T07"  4]
      [#inst "2015-06-05T07" 11]
-     [#inst "2015-06-06T07" 8]
-     [#inst "2015-06-07T07" 6]
+     [#inst "2015-06-06T07"  8]
+     [#inst "2015-06-07T07"  6]
      [#inst "2015-06-08T07" 10]
-     [#inst "2015-06-09T07" 6]
+     [#inst "2015-06-09T07"  6]
      [#inst "2015-06-10T07" 10]])
   (sad-toucan-incidents-with-bucketing :day))
 
 (datasets/expect-with-all-engines
-  (if-questionable-timezone-support
+  (if (i/has-questionable-timezone-support? *data-loader*)
    [[1 28] [2 38] [3 29] [4 27] [5 24] [6 30] [7 24]]
    [[1 29] [2 36] [3 33] [4 29] [5 13] [6 38] [7 22]])
   (sad-toucan-incidents-with-bucketing :day-of-week))
 
 (datasets/expect-with-all-engines
-  (if-questionable-timezone-support
+  (if (i/has-questionable-timezone-support? *data-loader*)
    [[1  6] [2 10] [3  4] [4  9] [5  9] [6  8] [7  8] [8  9] [9  7] [10  9]]
    [[1  8] [2  9] [3  9] [4  4] [5 11] [6  8] [7  6] [8 10] [9  6] [10 10]])
   (sad-toucan-incidents-with-bucketing :day-of-month))
 
 (datasets/expect-with-all-engines
-  (if-questionable-timezone-support
+  (if (i/has-questionable-timezone-support? *data-loader*)
    [[152  6] [153 10] [154  4] [155  9] [156  9] [157  8] [158  8] [159  9] [160  7] [161  9]]
    [[152  8] [153  9] [154  9] [155  4] [156 11] [157  8] [158  6] [159 10] [160  6] [161 10]])
   (sad-toucan-incidents-with-bucketing :day-of-year))
 
 (datasets/expect-with-all-engines
   (cond
-    (contains? #{:mongo :redshift :sqlserver} *engine*)
-    [[#inst "2015-05-31T07" 46]
-     [#inst "2015-06-07T07" 47]
-     [#inst "2015-06-14T07" 40]
-     [#inst "2015-06-21T07" 60]
-     [#inst "2015-06-28T07" 7]]
-
     (= *engine* :sqlite)
     [["2015-05-31" 46]
      ["2015-06-07" 47]
@@ -1350,6 +1327,13 @@
      ["2015-06-21" 60]
      ["2015-06-28" 7]]
 
+    (i/has-questionable-timezone-support? *data-loader*)
+    [[#inst "2015-05-31T07" 46]
+     [#inst "2015-06-07T07" 47]
+     [#inst "2015-06-14T07" 40]
+     [#inst "2015-06-21T07" 60]
+     [#inst "2015-06-28T07" 7]]
+
     :else
     [[#inst "2015-05-31T07" 49]
      [#inst "2015-06-07T07" 47]
@@ -1383,7 +1367,7 @@
   (sad-toucan-incidents-with-bucketing :quarter))
 
 (datasets/expect-with-all-engines
-  [[(if (= *engine* :mongo) 2.0 2) 200]]
+  [[2 200]]
   (sad-toucan-incidents-with-bucketing :quarter-of-year))
 
 (datasets/expect-with-all-engines
@@ -1409,7 +1393,7 @@
   (with-temp-db [_ db]
     (Q aggregate count of checkins
        filter = ["datetime_field" (id :checkins :timestamp) "as" (name field-grouping)] (apply vector "relative_datetime" relative-datetime-args)
-       return first-row first)))
+       return first-row first int)))
 
 (datasets/expect-with-all-engines 4 (count-of-grouping (checkins:4-per-minute) :minute "current"))
 (datasets/expect-with-all-engines 4 (count-of-grouping (checkins:4-per-minute) :minute -1 "minute"))
@@ -1435,7 +1419,7 @@
           :query    {:source_table (id :checkins)
                      :aggregation  ["count"]
                      :filter       ["TIME_INTERVAL" (id :checkins :timestamp) "current" "day"]}})
-        :data :rows first first)))
+        :data :rows first first int)))
 
 (datasets/expect-with-all-engines
   7
@@ -1446,7 +1430,7 @@
           :query    {:source_table (id :checkins)
                      :aggregation  ["count"]
                      :filter       ["TIME_INTERVAL" (id :checkins :timestamp) "last" "week"]}})
-        :data :rows first first)))
+        :data :rows first first int)))
 
 ;; Make sure that when referencing the same field multiple times with different units we return the one
 ;; that actually reflects the units the results are in.
diff --git a/test/metabase/driver/sync_test.clj b/test/metabase/driver/sync_test.clj
index 3c5b09d27c46d29b74efc6766f544236f64a11fa..1c6b3e78258d74444e474800f126e5f5c89f5ea5 100644
--- a/test/metabase/driver/sync_test.clj
+++ b/test/metabase/driver/sync_test.clj
@@ -5,7 +5,7 @@
             [metabase.driver :as driver]
             (metabase.driver [h2 :as h2]
                              [sync :as sync])
-            [metabase.driver.generic-sql.util :refer [korma-entity]]
+            [metabase.driver.generic-sql :refer [korma-entity]]
             (metabase.models [field :refer [Field]]
                              [field-values :refer [FieldValues]]
                              [foreign-key :refer [ForeignKey]]
diff --git a/test/metabase/test/data.clj b/test/metabase/test/data.clj
index 036ab95d6279bb2c7ce5ec8024a5d36f23f2b6d2..3ee1a332083e2f8a40e20139e24c6e44ffd72377 100644
--- a/test/metabase/test/data.clj
+++ b/test/metabase/test/data.clj
@@ -6,20 +6,27 @@
             (metabase.models [database :refer [Database]]
                              [field :refer [Field] :as field]
                              [table :refer [Table]])
-            (metabase.test.data [datasets :as datasets :refer [*data-loader*]]
+            (metabase.test.data [datasets :refer [*data-loader*]]
+                                [dataset-definitions :as defs]
                                 [h2 :as h2]
-                                [interface :refer :all])
+                                [interface :as i])
             [metabase.util :as u])
   (:import clojure.lang.Keyword
            (metabase.test.data.interface DatabaseDefinition
                                          FieldDefinition
                                          TableDefinition)))
 
+(declare get-or-create-database!)
+
 ;;; ## ---------------------------------------- Dataset-Independent Data Fns ----------------------------------------
 ;; These functions offer a generic way to get bits of info like Table + Field IDs from any of our many driver/dataset combos.
 
-(def ^:dynamic *get-db*
-  (fn [] (datasets/db *data-loader*)))
+(defn get-or-create-test-data-db!
+  "Get or create the Test Data database for DATA-LOADER, which defaults to `*data-loader*`."
+  ([]            (get-or-create-test-data-db! *data-loader*))
+  ([data-loader] (get-or-create-database! data-loader defs/test-data)))
+
+(def ^:dynamic *get-db* get-or-create-test-data-db!)
 
 (defn db
   "Return the current database.
@@ -36,7 +43,7 @@
        ~@body)))
 
 (defn format-name [nm]
-  (datasets/format-name *data-loader* (name nm)))
+  (i/format-name *data-loader* (name nm)))
 
 (defn- get-table-id-or-explode [db-id table-name]
   (let [table-name (format-name table-name)]
@@ -75,10 +82,11 @@
   []
   (contains? (driver/features *data-loader*) :foreign-keys))
 
-(defn default-schema []       (datasets/default-schema *data-loader*))
-(defn id-field-type []        (datasets/id-field-type *data-loader*))
-(defn sum-field-type []       (datasets/sum-field-type *data-loader*))
-(defn timestamp-field-type [] (datasets/timestamp-field-type *data-loader*))
+(defn default-schema [] (i/default-schema *data-loader*))
+(defn id-field-type  [] (i/id-field-type *data-loader*))
+
+(defn expected-base-type->actual [base-type]
+  (i/expected-base-type->actual *data-loader* base-type))
 
 
 ;; ## Loading / Deleting Test Datasets
@@ -90,17 +98,17 @@
   ([^DatabaseDefinition database-definition]
    (get-or-create-database! *data-loader* database-definition))
   ([dataset-loader {:keys [database-name], :as ^DatabaseDefinition database-definition}]
-   (let [engine (engine dataset-loader)]
-     (or (metabase-instance database-definition engine)
+   (let [engine (i/engine dataset-loader)]
+     (or (i/metabase-instance database-definition engine)
          (do
            ;; Create the database
-           (create-db! dataset-loader database-definition)
+           (i/create-db! dataset-loader database-definition)
 
            ;; Add DB object to Metabase DB
            (let [db (ins Database
                       :name    database-name
                       :engine  (name engine)
-                      :details (database->connection-details dataset-loader :db database-definition))]
+                      :details (i/database->connection-details dataset-loader :db database-definition))]
 
              ;; Sync the database
              (driver/sync-database! db)
@@ -108,13 +116,13 @@
              ;; Add extra metadata like Field field-type, base-type, etc.
              (doseq [^TableDefinition table-definition (:table-definitions database-definition)]
                (let [table-name (:table-name table-definition)
-                     table      (delay (or  (metabase-instance table-definition db)
+                     table      (delay (or  (i/metabase-instance table-definition db)
                                             (throw (Exception. (format "Table '%s' not loaded from definiton:\n%s\nFound:\n%s"
                                                                        table-name
                                                                        (u/pprint-to-str (dissoc table-definition :rows))
                                                                        (u/pprint-to-str (sel :many :fields [Table :schema :name], :db_id (:id db))))))))]
                  (doseq [{:keys [field-name field-type special-type], :as field-definition} (:field-definitions table-definition)]
-                   (let [field (delay (or (metabase-instance field-definition @table)
+                   (let [field (delay (or (i/metabase-instance field-definition @table)
                                           (throw (Exception. (format "Field '%s' not loaded from definition:\n"
                                                                      field-name
                                                                      (u/pprint-to-str field-definition))))))]
@@ -134,10 +142,10 @@
    (remove-database! *data-loader* database-definition))
   ([dataset-loader ^DatabaseDefinition database-definition]
    ;; Delete the Metabase Database and associated objects
-   (cascade-delete Database :id (:id (metabase-instance database-definition (engine dataset-loader))))
+   (cascade-delete Database :id (:id (i/metabase-instance database-definition (i/engine dataset-loader))))
 
    ;; now delete the DBMS database
-   (destroy-db! dataset-loader database-definition)))
+   (i/destroy-db! dataset-loader database-definition)))
 
 
 (def ^:private loader->loaded-db-def
@@ -158,7 +166,7 @@
 
 (defn -with-temp-db [^DatabaseDefinition dbdef f]
   (let [loader *data-loader*
-        dbdef  (map->DatabaseDefinition (assoc dbdef :short-lived? true))]
+        dbdef  (i/map->DatabaseDefinition (assoc dbdef :short-lived? true))]
     (swap! loader->loaded-db-def conj [loader dbdef])
     (with-db (binding [*sel-disable-logging* true]
                (let [db (get-or-create-database! loader dbdef)]
diff --git a/test/metabase/test/data/datasets.clj b/test/metabase/test/data/datasets.clj
index 70d70435dc498d9e50ddd9d0183007842b8784de..5c469a3e96a42fde89ffde49ae095598815401dc 100644
--- a/test/metabase/test/data/datasets.clj
+++ b/test/metabase/test/data/datasets.clj
@@ -4,149 +4,11 @@
             [clojure.tools.logging :as log]
             [colorize.core :as color]
             [environ.core :refer [env]]
-            [expectations :refer :all]
-            [metabase.db :refer :all]
-            [metabase.driver :as driver]
-            (metabase.driver [h2 :refer [map->H2Driver]]
-                             [mongo :refer [map->MongoDriver]]
-                             [mysql :refer [map->MySQLDriver]]
-                             [postgres :refer [map->PostgresDriver]]
-                             [redshift :refer [map->RedshiftDriver]]
-                             [sqlite :refer [map->SQLiteDriver]]
-                             [sqlserver :refer [map->SQLServerDriver]])
-            (metabase.models [field :refer [Field]]
-                             [table :refer [Table]])
-            (metabase.test.data [dataset-definitions :as defs]
-                                [h2 :as h2]
-                                [mongo :as mongo]
-                                [mysql :as mysql]
-                                [postgres :as postgres]
-                                [redshift :as redshift]
-                                [sqlite :as sqlite]
-                                [sqlserver :as sqlserver])
-            [metabase.util :as u])
-  (:import metabase.driver.h2.H2Driver
-           metabase.driver.mongo.MongoDriver
-           metabase.driver.mysql.MySQLDriver
-           metabase.driver.postgres.PostgresDriver
-           metabase.driver.redshift.RedshiftDriver
-           metabase.driver.sqlite.SQLiteDriver
-           metabase.driver.sqlserver.SQLServerDriver))
+            [expectations :refer [expect]]
+            [metabase.driver :as driver]))
 
-;; # IDataset
-
-(defprotocol IDataset
-  "Functions needed to fetch test data for various drivers."
-  (load-data! [this]
-    "Load the test data for this dataset.")
-  (db [this]
-    "Return `Database` containing test data for this driver.")
-  (default-schema [this]
-    "Return the default schema name that tables for this DB should be expected to have.")
-  (format-name [this table-or-field-name]
-    "Transform a lowercase string `Table` or `Field` name in a way appropriate for this dataset
-     (e.g., `h2` would want to upcase these names; `mongo` would want to use `\"_id\"` in place of `\"id\"`.")
-  (id-field-type [this]
-    "Return the `base_type` of the `id` `Field` (e.g. `:IntegerField` or `:BigIntegerField`).")
-  (sum-field-type [this]
-    "Return the `base_type` of a aggregate summed field.")
-  (timestamp-field-type [this]
-    "Return the `base_type` of a `TIMESTAMP` `Field` like `users.last_login`."))
-
-
-;; # Implementations
-
-(defn- generic-load-data! [{:keys [dbpromise], :as this}]
-  (when-not (realized? dbpromise)
-    (deliver dbpromise (@(resolve 'metabase.test.data/get-or-create-database!) this defs/test-data)))
-  @dbpromise)
-
-(def ^:private IDatasetDefaultsMixin
-  {:load-data!     generic-load-data!
-   :db             generic-load-data!
-   :id-field-type  (constantly :IntegerField)
-   :sum-field-type (constantly :IntegerField)
-   :default-schema (constantly nil)})
-
-;; ## Mongo
-
-(extend MongoDriver
-  IDataset
-  (merge IDatasetDefaultsMixin
-         {:format-name          (fn [_ table-or-field-name]
-                                  (if (= table-or-field-name "id") "_id"
-                                      table-or-field-name))
-          :timestamp-field-type (constantly :DateField)}))
-
-
-;; ## SQL Drivers
-
-(def ^:private GenericSQLIDatasetMixin
-  (merge IDatasetDefaultsMixin
-         {:format-name          (fn [_ table-or-field-name]
-                                  table-or-field-name)
-          :timestamp-field-type (constantly :DateTimeField)}))
-
-
-(extend H2Driver
-  IDataset
-  (merge GenericSQLIDatasetMixin
-         {:default-schema (constantly "PUBLIC")
-          :format-name    (fn [_ table-or-field-name]
-                            (s/upper-case table-or-field-name))
-          :id-field-type  (constantly :BigIntegerField)
-          :sum-field-type (constantly :BigIntegerField)}))
-
-
-(extend MySQLDriver
-  IDataset
-  (merge GenericSQLIDatasetMixin
-         {:sum-field-type (constantly :BigIntegerField)}))
-
-
-(extend PostgresDriver
-  IDataset
-  (merge GenericSQLIDatasetMixin
-         {:default-schema (constantly "public")}))
-
-
-(extend RedshiftDriver
-  IDataset
-  (merge GenericSQLIDatasetMixin
-         {:default-schema (constantly redshift/session-schema-name)}))
-
-
-(extend SQLiteDriver
-  IDataset
-  GenericSQLIDatasetMixin)
-
-
-(extend SQLServerDriver
-  IDataset
-  (merge GenericSQLIDatasetMixin
-         {:default-schema (constantly "dbo")
-          :sum-field-type (constantly :IntegerField)}))
-
-
-;; # Concrete Instances
-
-(def ^:private engine->loader*
-  "Map of dataset keyword name -> dataset instance (i.e., an object that implements `IDataset`)."
-  {:h2        (map->H2Driver        {:dbpromise (promise)})
-   :mongo     (map->MongoDriver     {:dbpromise (promise)})
-   :mysql     (map->MySQLDriver     {:dbpromise (promise)})
-   :postgres  (map->PostgresDriver  {:dbpromise (promise)})
-   :redshift  (map->RedshiftDriver  {:dbpromise (promise)})
-   :sqlite    (map->SQLiteDriver    {:dbpromise (promise)})
-   :sqlserver (map->SQLServerDriver {:dbpromise (promise)})})
-
-(def ^:const all-valid-engines
-  "Set of names of all valid datasets."
-  (set (keys engine->loader*)))
-
-(defn engine->loader [engine]
-  (or (engine->loader* engine)
-      (throw (Exception.(format "Invalid engine: %s\nMust be one of: %s" engine all-valid-engines)))))
+(driver/find-and-load-drivers!)
+(def ^:const all-valid-engines (set (keys (driver/available-drivers))))
 
 
 ;; # Logic for determining which datasets to test against
@@ -196,7 +58,7 @@
   "The dataset we're currently testing against, bound by `with-engine`.
    This is just a regular driver, e.g. `MySQLDriver`, with an extra promise keyed by `:dbpromise`
    that is used to store the `test-data` dataset when you call `load-data!`."
-  (engine->loader default-engine))
+  (driver/engine->driver default-engine))
 
 
 
@@ -204,8 +66,8 @@
   "Bind `*data-loader*` to the dataset with ENGINE and execute BODY."
   [engine & body]
   `(let [engine# ~engine]
-     (binding [*engine*  engine#
-               *data-loader* (engine->loader engine#)]
+     (binding [*engine*      engine#
+               *data-loader* (driver/engine->driver engine#)]
        ~@body)))
 
 (defmacro when-testing-engine
@@ -248,7 +110,7 @@
   "Generate unit tests for all valid datasets; each test will only run if we're currently testing the corresponding dataset.
   `*data-loader*` is bound to the current dataset inside each test."
   [expected actual]
-  `(expect-with-engines ~all-valid-engines ~expected ~actual))
+  `(expect-with-engines all-valid-engines ~expected ~actual))
 
 (defmacro engine-case
   "Case statement that switches off of the current dataset.
@@ -262,3 +124,8 @@
                      [`(= *engine* ~engine)
                       then])
                    (partition 2 pairs))))
+
+;;; Load metabase.test.data.* namespaces for all available drivers
+(doseq [[engine _] (driver/available-drivers)]
+  (let [driver-test-namespace (symbol (str "metabase.test.data." (name engine)))]
+    (require driver-test-namespace)))
diff --git a/test/metabase/test/data/generic_sql.clj b/test/metabase/test/data/generic_sql.clj
index 333de0354460f0b3f219f753148a8be5bf7d22b3..19b1868dc668cdc18e2603448f584a8f5b8ce4c1 100644
--- a/test/metabase/test/data/generic_sql.clj
+++ b/test/metabase/test/data/generic_sql.clj
@@ -5,9 +5,11 @@
             [clojure.tools.logging :as log]
             (korma [core :as k]
                    [db :as kdb])
+            [medley.core :as m]
             [metabase.driver :as driver]
             [metabase.driver.generic-sql :as sql]
-            [metabase.test.data.interface :as i]
+            (metabase.test.data [datasets :as datasets]
+                                [interface :as i])
             [metabase.util :as u])
   (:import clojure.lang.Keyword
            (metabase.test.data.interface DatabaseDefinition
@@ -76,7 +78,7 @@
     "*Optional*. Return a JDBC spec that should be used to connect to DBDEF.
      Uses `sql/connection-details->spec` by default.")
 
-  (load-table-data! [this ^DatabaseDefinition dbdef, ^TableDefinition tabledef]
+  (load-data! [this ^DatabaseDefinition dbdef, ^TableDefinition tabledef]
     "*Optional*. Load the rows for a specific table into a DB.")
 
   (execute-sql! [loader ^Keyword context, ^DatabaseDefinition dbdef, ^String sql]
@@ -117,7 +119,8 @@
         dest-table-name (name dest-table-name)]
     (format "ALTER TABLE %s ADD CONSTRAINT %s FOREIGN KEY (%s) REFERENCES %s (%s);"
             (qualify+quote-name loader database-name table-name)
-            (quot (format "FK_%s_%s_%s" table-name field-name dest-table-name))
+            ;; limit FK constraint name to 30 chars since Oracle doesn't support names longer than that
+            (quot (apply str (take 30 (format "fk_%s_%s_%s" table-name field-name dest-table-name))))
             (quot field-name)
             (qualify+quote-name loader database-name dest-table-name)
             (quot (pk-field-name loader)))))
@@ -147,10 +150,9 @@
   ([loader db-name table-name field-name]
    (quote+combine-names loader (qualified-name-components loader db-name table-name field-name))))
 
-(defn- default-database->spec [loader context {:keys [short-lived?], :as dbdef}]
-  (assoc (sql/connection-details->spec loader (i/database->connection-details loader context dbdef))
-         :short-lived? short-lived?
-         :make-pool?   false))
+(defn- default-database->spec [loader context dbdef]
+  (let [spec (sql/connection-details->spec loader (i/database->connection-details loader context dbdef))]
+    (assoc spec :make-pool? (not (:short-lived? spec)))))
 
 (defn default-korma-entity [loader {:keys [database-name], :as dbdef} {:keys [table-name]}]
   (-> (k/create-entity (->> (qualified-name-components loader database-name table-name)
@@ -158,36 +160,103 @@
                             (apply str)))  ; korma will split on the periods and re-qualify the individual parts for us
       (k/database (kdb/create-db (database->spec loader :db dbdef)))))
 
-(defn default-load-table-data! [loader dbdef tabledef]
-  (let [rows              (:rows tabledef)
-        fields-for-insert (mapv :field-name (:field-definitions tabledef))
-        entity            (korma-entity loader dbdef tabledef)]
-    ;; Insert groups of 200 rows at a time
-    ;; otherwise SQL Server will be *very* snippy if we try to run queries with too many parameters in them
-    (doseq [group (partition-all 200 rows)]
-      (k/insert entity (k/values (mapv (partial zipmap fields-for-insert)
-                                       (for [row group]
-                                         (for [v row]
-                                           (if (instance? java.util.Date v) (java.sql.Timestamp. (.getTime ^java.util.Date v))
-                                               v)))))))))
+;;; Loading Table Data
+;; Since different DBs have constraints on how we can do this, the logic is broken out into a few different functions
+;; you can compose together a loader that works with a given DB.
+;; (ex. SQL Server has a low limit on how many ? args we can have in a prepared statement, so it needs to be broken out into chunks;
+;;  Oracle doesn't understand the normal syntax for inserting multiple rows at a time so we'll insert them one-at-a-time instead)
+
+(defn load-data-get-rows
+  "Get a sequence of row maps for use in a korma `insert` when loading table data."
+  [loader dbdef tabledef]
+  (let [fields-for-insert (mapv (comp keyword :field-name)
+                                (:field-definitions tabledef))]
+    (for [row (:rows tabledef)]
+      (zipmap fields-for-insert (for [v row]
+                                  (if (instance? java.util.Date v)
+                                    (u/->Timestamp v)
+                                    v))))))
+
+(defn load-data-add-ids
+  "Add IDs to each row, presumabily for doing a parallel insert. This arg should go before `load-data-chunked` or `load-data-one-at-a-time`."
+  [insert!]
+  (fn [rows]
+    (insert! (pmap (fn [[i row]]
+                     (assoc row :id (inc i)))
+                   (m/indexed rows)))
+    ;; (insert! (vec (for [[i row] (m/indexed rows)]
+    ;;                 (assoc row :id (inc i)))))
+    ))
+
+(defn load-data-chunked
+  "Insert rows in chunks, which default to 200 rows each."
+  ([insert!]                   (load-data-chunked map insert!))
+  ([map-fn insert!]            (load-data-chunked map-fn 200 insert!))
+  ([map-fn chunk-size insert!] (fn [rows]
+                                 (dorun (map-fn insert! (partition-all chunk-size rows))))))
+
+(defn load-data-one-at-a-time
+  "Insert rows one at a time."
+  ([insert!]        (load-data-one-at-a-time map insert!))
+  ([map-fn insert!] (fn [rows]
+                      (dorun (map-fn insert! rows)))))
+
+(defn load-data-with-debug-logging
+  "Add debug logging to the data loading fn. This should passed as the first arg to `make-load-data-fn`."
+  [insert!]
+  (fn [rows]
+    (println (u/format-color 'blue "Inserting %d rows like:\n%s" (count rows) (s/replace (k/sql-only (k/insert :some-table (k/values (first rows))))
+                                                                                         #"\"SOME-TABLE\""
+                                                                                         "[table]")))
+    (let [start-time (System/currentTimeMillis)]
+      (insert! rows)
+      (println (u/format-color 'green "Inserting %d rows took %d ms." (count rows) (- (System/currentTimeMillis) start-time))))))
+
+(defn make-load-data-fn
+  "Create a `load-data!` function. This creates a function to actually insert a row or rows, wraps it with any WRAP-INSERT-FNS,
+   the calls the resulting function with the rows to insert."
+  [& wrap-insert-fns]
+  (fn [loader dbdef tabledef]
+    (let [entity  (korma-entity loader dbdef tabledef)
+          ;; _       (assert (or (delay? (:pool (:db entity)))
+          ;;                     (println "Expected pooled connection:" (u/pprint-to-str 'cyan entity))))
+          insert! ((apply comp wrap-insert-fns) (fn [row-or-rows]
+                                                  ;; (let [id (:id row-or-rows)]
+                                                  ;;   (when (zero? (mod id 50))
+                                                  ;;     (println id)))
+                                                  (k/insert entity (k/values row-or-rows))))
+          rows    (load-data-get-rows loader dbdef tabledef)]
+      (insert! rows))))
+
+(def load-data-all-at-once!             "Insert all rows at once."                             (make-load-data-fn))
+(def load-data-chunked!                 "Insert rows in chunks of 200 at a time."              (make-load-data-fn load-data-chunked))
+(def load-data-one-at-a-time!           "Insert rows one at a time."                           (make-load-data-fn load-data-one-at-a-time))
+(def load-data-chunked-parallel!        "Insert rows in chunks of 200 at a time, in parallel." (make-load-data-fn load-data-add-ids (partial load-data-chunked pmap)))
+(def load-data-one-at-a-time-parallel!  "Insert rows one at a time, in parallel."              (make-load-data-fn load-data-add-ids (partial load-data-one-at-a-time pmap)))
+
 
 (defn default-execute-sql! [loader context dbdef sql]
   (let [sql (some-> sql s/trim)]
     (when (and (seq sql)
                ;; make sure SQL isn't just semicolons
                (not (s/blank? (s/replace sql #";" ""))))
-      (try
-        (jdbc/execute! (database->spec loader context dbdef) [sql] :transaction? false, :multi? true)
-        (catch java.sql.SQLException e
-          (println "Error executing SQL:" sql)
-          (println (format "Caught SQLException:\n%s"
-                           (with-out-str (jdbc/print-sql-exception-chain e))))
-          (throw e))
-        (catch Throwable e
-          (println "Error executing SQL:" sql)
-          (println (format "Caught Exception: %s %s\n%s" (class e) (.getMessage e)
-                           (with-out-str (.printStackTrace e))))
-          (throw e))))))
+      ;; Remove excess semicolons, otherwise snippy DBs like Oracle will barf
+      (let [sql (s/replace sql #";+" ";")]
+        ;; (println (u/format-color 'blue "[SQL] <<<%s>>>" sql))
+        (try
+          (jdbc/execute! (database->spec loader context dbdef) [sql] :transaction? false, :multi? true)
+          (catch java.sql.SQLException e
+            (println "Error executing SQL:" sql)
+            (println (format "Caught SQLException:\n%s"
+                             (with-out-str (jdbc/print-sql-exception-chain e))))
+            (throw e))
+          (catch Throwable e
+            (println "Error executing SQL:" sql)
+            (println (format "Caught Exception: %s %s\n%s" (class e) (.getMessage e)
+                             (with-out-str (.printStackTrace e))))
+            (throw e)))
+        ;; (println (u/format-color 'blue "[OK]"))
+        ))))
 
 
 (def DefaultsMixin
@@ -200,7 +269,7 @@
    :drop-table-if-exists-sql  default-drop-table-if-exists-sql
    :execute-sql!              default-execute-sql!
    :korma-entity              default-korma-entity
-   :load-table-data!          default-load-table-data!
+   :load-data!                load-data-chunked!
    :pk-field-name             (constantly "id")
    :qualified-name-components default-qualified-name-components
    :qualify+quote-name        default-qualify+quote-name
@@ -211,12 +280,15 @@
 
 (defn sequentially-execute-sql!
   "Alternative implementation of `execute-sql!` that executes statements one at a time for drivers
-   that don't support executing multiple statements at once."
+   that don't support executing multiple statements at once.
+
+   Since there are some cases were you might want to execute compound statements without splitting, an upside-down ampersand (`â…‹`) is understood as an
+   \"escaped\" semicolon in the resulting SQL statement."
   [loader context dbdef sql]
   (when sql
     (doseq [statement (map s/trim (s/split sql #";+"))]
       (when (seq statement)
-        (default-execute-sql! loader context dbdef statement)))))
+        (default-execute-sql! loader context dbdef (s/replace statement #"â…‹" ";"))))))
 
 (defn- create-db! [loader {:keys [table-definitions], :as dbdef}]
   ;; Exec SQL for creating the DB
@@ -242,12 +314,25 @@
 
   ;; Now load the data for each Table
   (doseq [tabledef table-definitions]
-    (load-table-data! loader dbdef tabledef)))
+    (load-data! loader dbdef tabledef)))
 
 (defn- destroy-db! [loader dbdef]
   (execute-sql! loader :server dbdef (drop-db-if-exists-sql loader dbdef)))
 
 (def IDatasetLoaderMixin
   "Mixin for `IGenericSQLDatasetLoader` types to implemnt `create-db!` and `destroy-db!` from `IDatasetLoader`."
-  {:create-db!  create-db!
-   :destroy-db! destroy-db!})
+  (merge i/IDatasetLoaderDefaultsMixin
+         {:create-db!  create-db!
+          :destroy-db! destroy-db!}))
+
+
+;;; ## Various Util Fns
+
+(defn execute-when-testing!
+  "Execute a prepared SQL-AND-ARGS against Database with spec returned by GET-CONNECTION-SPEC only when running tests against ENGINE.
+   Useful for doing engine-specific setup or teardown."
+  [engine get-connection-spec & sql-and-args]
+  (datasets/when-testing-engine engine
+    (println (u/format-color 'blue "[%s] %s" (name engine) (first sql-and-args)))
+    (jdbc/execute! (get-connection-spec) sql-and-args)
+    (println (u/format-color 'blue "[OK]"))))
diff --git a/test/metabase/test/data/h2.clj b/test/metabase/test/data/h2.clj
index 9b142eb8d13117bd91084cb2ecf57dbb0db9a46b..5652dead36d28e3ef9a192ca041d66133bf40717 100644
--- a/test/metabase/test/data/h2.clj
+++ b/test/metabase/test/data/h2.clj
@@ -4,6 +4,7 @@
             [clojure.string :as s]
             (korma [core :as k]
                    [db :as kdb])
+            metabase.driver.h2
             (metabase.test.data [generic-sql :as generic]
                                 [interface :as i]))
   (:import metabase.driver.h2.H2Driver))
@@ -71,22 +72,28 @@
     (merge mixin
            {:create-db-sql             create-db-sql
             :create-table-sql          create-table-sql
-            :drop-db-if-exists-sql     (constantly nil)
-            :korma-entity              korma-entity
-            :pk-field-name             (constantly "ID")
-            :pk-sql-type               (constantly "BIGINT AUTO_INCREMENT")
-            :quote-name                quote-name
             :database->spec            (fn [this context dbdef]
                                          ;; Don't use the h2 driver implementation, which makes the connection string read-only & if-exists only
                                          (kdb/h2 (i/database->connection-details this context dbdef)))
+            :drop-db-if-exists-sql     (constantly nil)
             :execute-sql!              (fn [this _ dbdef sql]
                                          ;; we always want to use 'server' context when execute-sql! is called
                                          ;; (never try connect as GUEST, since we're not giving them priviledges to create tables / etc)
                                          (execute-sql! this :server dbdef sql))
             :field-base-type->sql-type (fn [_ base-type]
-                                         (field-base-type->sql-type base-type))}))
+                                         (field-base-type->sql-type base-type))
+            :korma-entity              korma-entity
+            :load-data!                generic/load-data-all-at-once!
+            :pk-field-name             (constantly "ID")
+            :pk-sql-type               (constantly "BIGINT AUTO_INCREMENT")
+            :quote-name                quote-name}))
 
   i/IDatasetLoader
   (merge generic/IDatasetLoaderMixin
-         {:database->connection-details database->connection-details
-          :engine                       (constantly :h2)}))
+         {:database->connection-details       database->connection-details
+          :default-schema                     (constantly "PUBLIC")
+          :engine                             (constantly :h2)
+          :format-name                        (fn [_ table-or-field-name]
+                                                (s/upper-case table-or-field-name))
+          :has-questionable-timezone-support? (constantly false)
+          :id-field-type                      (constantly :BigIntegerField)}))
diff --git a/test/metabase/test/data/interface.clj b/test/metabase/test/data/interface.clj
index 7d7ac5f97721d08d32854f0a1cc331b25ae9b72f..ff6d296defd859393010d0c8e8d852b59cb26736 100644
--- a/test/metabase/test/data/interface.clj
+++ b/test/metabase/test/data/interface.clj
@@ -5,6 +5,7 @@
    actual physical RDMS database. This functionality allows us to easily test with multiple datasets."
   (:require [clojure.string :as s]
             [metabase.db :refer :all]
+            [metabase.driver :as driver]
             (metabase.models [database :refer [Database]]
                              [field :refer [Field] :as field]
                              [table :refer [Table]]))
@@ -60,7 +61,8 @@
 ;; ## IDatasetLoader
 
 (defprotocol IDatasetLoader
-  "Methods for creating, deleting, and populating *pyhsical* DBMS databases, tables, and fields."
+  "Methods for creating, deleting, and populating *pyhsical* DBMS databases, tables, and fields.
+   Methods marked *OPTIONAL* have default implementations in `IDatasetLoaderDefaultsMixin`."
   (engine [this]
     "Return the engine keyword associated with this database, e.g. `:h2` or `:mongo`.")
 
@@ -79,7 +81,36 @@
   (destroy-db! [this ^DatabaseDefinition database-definition]
     "Destroy database, if any, associated with DATABASE-DEFINITION.
      This refers to destroying a *DBMS* database -- removing an H2 file, dropping a Postgres database, etc.
-     This does not need to remove corresponding Metabase definitions -- this is handled by `DatasetLoader`."))
+     This does not need to remove corresponding Metabase definitions -- this is handled by `DatasetLoader`.")
+
+  (default-schema [this]
+    "*OPTIONAL* Return the default schema name that tables for this DB should be expected to have.")
+
+  (expected-base-type->actual [this base-type]
+    "*OPTIONAL*. Return the base type type that is actually used to store `Fields` of BASE-TYPE.
+     The default implementation of this method is an identity fn. This is provided so DBs that don't support a given BASE-TYPE used in the test data
+     can specifiy what type we should expect in the results instead.
+     For example, Oracle has `INTEGER` data types, so `:IntegerField` test values are instead stored as `NUMBER`, which we map to `:DecimalField`.")
+
+  (format-name [this table-or-field-name]
+    "*OPTIONAL* Transform a lowercase string `Table` or `Field` name in a way appropriate for this dataset
+     (e.g., `h2` would want to upcase these names; `mongo` would want to use `\"_id\"` in place of `\"id\"`.")
+
+  (has-questionable-timezone-support? [this]
+    "*OPTIONAL*. Does this driver have \"questionable\" timezone support? (i.e., does it group things by UTC instead of the `US/Pacific` when we're testing?)
+     Defaults to `(not (contains? (metabase.driver/features this) :set-timezone)`")
+
+  (id-field-type [this]
+    "*OPTIONAL* Return the `base_type` of the `id` `Field` (e.g. `:IntegerField` or `:BigIntegerField`). Defaults to `:IntegerField`."))
+
+(def IDatasetLoaderDefaultsMixin
+  {:expected-base-type->actual         (fn [_ base-type] base-type)
+   :default-schema                     (constantly nil)
+   :format-name                        (fn [_ table-or-field-name]
+                                         table-or-field-name)
+   :has-questionable-timezone-support? (fn [driver]
+                                         (not (contains? (driver/features driver) :set-timezone)))
+   :id-field-type                      (constantly :IntegerField)})
 
 
 ;; ## Helper Functions for Creating New Definitions
diff --git a/test/metabase/test/data/mongo.clj b/test/metabase/test/data/mongo.clj
index 42ba4d447d9029b42ef37edaa42eb6d055e7f336..7a77741d467d99ea244f355825a128fb6b0ffcba 100644
--- a/test/metabase/test/data/mongo.clj
+++ b/test/metabase/test/data/mongo.clj
@@ -1,6 +1,7 @@
 (ns metabase.test.data.mongo
   (:require (monger [collection :as mc]
                     [core :as mg])
+            metabase.driver.mongo
             [metabase.driver.mongo.util :refer [with-mongo-connection]]
             [metabase.test.data.interface :as i])
   (:import metabase.driver.mongo.MongoDriver))
@@ -41,7 +42,15 @@
 
 (extend MongoDriver
   i/IDatasetLoader
-  {:create-db!                   create-db!
-   :destroy-db!                  destroy-db!
-   :database->connection-details database->connection-details
-   :engine                       (constantly :mongo)})
+  (merge i/IDatasetLoaderDefaultsMixin
+         {:create-db!                   create-db!
+          :destroy-db!                  destroy-db!
+          :database->connection-details database->connection-details
+          :engine                       (constantly :mongo)
+          :expected-base-type->actual   (fn [_ base-type]
+                                          (let [expected->actual {:DateTimeField :DateField}]
+                                            (or (expected->actual base-type)
+                                                base-type)))
+          :format-name                  (fn [_ table-or-field-name]
+                                          (if (= table-or-field-name "id") "_id"
+                                              table-or-field-name))}))
diff --git a/test/metabase/test/data/mysql.clj b/test/metabase/test/data/mysql.clj
index ab6edbd71e5993b1dc2f715a6039a366feb01e10..ebd3142dec7d27e266a50e9c9de0e322a8b9a1a0 100644
--- a/test/metabase/test/data/mysql.clj
+++ b/test/metabase/test/data/mysql.clj
@@ -2,6 +2,7 @@
   "Code for creating / destroying a MySQL database from a `DatabaseDefinition`."
   (:require [clojure.string :as s]
             [environ.core :refer [env]]
+            metabase.driver.mysql
             (metabase.test.data [generic-sql :as generic]
                                 [interface :as i]))
   (:import metabase.driver.mysql.MySQLDriver))
@@ -35,10 +36,11 @@
   generic/IGenericSQLDatasetLoader
   (merge generic/DefaultsMixin
          {:execute-sql!              generic/sequentially-execute-sql!
-          :pk-sql-type               (constantly "INTEGER NOT NULL AUTO_INCREMENT")
-          :quote-name                quote-name
           :field-base-type->sql-type (fn [_ base-type]
-                                       (field-base-type->sql-type base-type))})
+                                       (field-base-type->sql-type base-type))
+          :load-data!                generic/load-data-all-at-once!
+          :pk-sql-type               (constantly "INTEGER NOT NULL AUTO_INCREMENT")
+          :quote-name                quote-name})
   i/IDatasetLoader
   (merge generic/IDatasetLoaderMixin
          {:database->connection-details database->connection-details
diff --git a/test/metabase/test/data/postgres.clj b/test/metabase/test/data/postgres.clj
index d7405df4b58a8e7cd02d9543a4793e4fbd51d788..1aca7e07cdbe8145dbed8d169df8b5799ea434fc 100644
--- a/test/metabase/test/data/postgres.clj
+++ b/test/metabase/test/data/postgres.clj
@@ -1,6 +1,7 @@
 (ns metabase.test.data.postgres
   "Code for creating / destroying a Postgres database from a `DatabaseDefinition`."
   (:require [environ.core :refer [env]]
+            metabase.driver.postgres
             (metabase.test.data [generic-sql :as generic]
                                 [interface :as i]))
   (:import metabase.driver.postgres.PostgresDriver))
@@ -32,10 +33,12 @@
   generic/IGenericSQLDatasetLoader
   (merge generic/DefaultsMixin
          {:drop-table-if-exists-sql  generic/drop-table-if-exists-cascade-sql
-          :pk-sql-type               (constantly "SERIAL")
           :field-base-type->sql-type (fn [_ base-type]
-                                       (field-base-type->sql-type base-type))})
+                                       (field-base-type->sql-type base-type))
+          :load-data!                generic/load-data-all-at-once!
+          :pk-sql-type               (constantly "SERIAL")})
   i/IDatasetLoader
   (merge generic/IDatasetLoaderMixin
          {:database->connection-details database->connection-details
+          :default-schema               (constantly "public")
           :engine                       (constantly :postgres)}))
diff --git a/test/metabase/test/data/redshift.clj b/test/metabase/test/data/redshift.clj
index 795a4eb02220cb6e0d6b3d3a3539a554a4f02161..731ff0f52897d48db866daa7575f8cc54fa9234f 100644
--- a/test/metabase/test/data/redshift.clj
+++ b/test/metabase/test/data/redshift.clj
@@ -3,7 +3,8 @@
             [clojure.tools.logging :as log]
             [clojure.string :as s]
             [environ.core :refer [env]]
-            [metabase.driver.generic-sql :as sql]
+            (metabase.driver [generic-sql :as sql]
+                             redshift)
             (metabase.test.data [generic-sql :as generic]
                                 [interface :as i]
                                 [postgres :as postgres])
@@ -72,17 +73,16 @@
   (merge generic/IDatasetLoaderMixin
          {:database->connection-details (fn [& _]
                                           @db-connection-details)
+          :default-schema               (constantly session-schema-name)
           :engine                       (constantly :redshift)}))
 
 
 ;;; Create + destroy the schema used for this test session
 
 (defn- execute-when-testing-redshift! [format-str & args]
-  (when (contains? @(resolve 'metabase.test.data.datasets/test-engines) :redshift)
-    (let [sql (apply format format-str args)]
-      (log/info (u/format-color 'blue "[Redshift] %s" sql))
-      (jdbc/execute! (sql/connection-details->spec (RedshiftDriver.) @db-connection-details)
-                     [sql]))))
+  (generic/execute-when-testing! :redshift
+    (fn [] (sql/connection-details->spec (RedshiftDriver.) @db-connection-details))
+    (apply format format-str args)))
 
 (defn- create-session-schema!
   {:expectations-options :before-run}
diff --git a/test/metabase/test/data/sqlite.clj b/test/metabase/test/data/sqlite.clj
index a80ce35ece0eec9cbb4c3deaec08d5017681c595..d0788c7d944016de7d67b4dc2837f2ea2885feec 100644
--- a/test/metabase/test/data/sqlite.clj
+++ b/test/metabase/test/data/sqlite.clj
@@ -1,6 +1,7 @@
 (ns metabase.test.data.sqlite
   (:require [clojure.string :as s]
             [korma.core :as k]
+            metabase.driver.sqlite
             (metabase.test.data [generic-sql :as generic]
                                 [interface :as i])
             [metabase.util :as u])
@@ -23,14 +24,14 @@
    :TextField       "TEXT"
    :TimeField       "TIME"})
 
-(defn- load-table-data! [loader dbdef tabledef]
-  ;; Our SQLite JDBC driver doesn't seem to handle Dates/Timestamps correctly so just convert them to string before INSERTing them into the Database
-  (generic/default-load-table-data! loader dbdef (update tabledef :rows (fn [rows]
-                                                                          (for [row rows]
-                                                                            (for [v row]
-                                                                              (if (instance? java.util.Date v)
-                                                                                (k/raw (format "DATETIME('%s')" (u/date->iso-8601 v)))
-                                                                                v)))))))
+(defn- load-data-stringify-dates
+  "Our SQLite JDBC driver doesn't seem to handle Dates/Timestamps correctly so just convert them to string before INSERTing them into the Database."
+  [insert!]
+  (fn [rows]
+    (insert! (for [row rows]
+               (into {} (for [[k v] row]
+                          [k (u/cond-as-> v v
+                               (instance? java.util.Date v) (k/raw (format "DATETIME('%s')" (u/date->iso-8601 v))))]))))))
 
 (extend SQLiteDriver
   generic/IGenericSQLDatasetLoader
@@ -39,7 +40,7 @@
           :create-db-sql             (constantly nil)
           :drop-db-if-exists-sql     (constantly nil)
           :execute-sql!              generic/sequentially-execute-sql!
-          :load-table-data!          load-table-data!
+          :load-data!                (generic/make-load-data-fn load-data-stringify-dates generic/load-data-chunked)
           :pk-sql-type               (constantly "INTEGER")
           :field-base-type->sql-type (fn [_ base-type]
                                        (field-base-type->sql-type base-type))})
diff --git a/test/metabase/test/data/sqlserver.clj b/test/metabase/test/data/sqlserver.clj
index caae6ec866b9ac4c9f6a8ab1aa47a3585f350f2d..ff01acc7dc3eb1f1d14a1aeed02004139fdecf29 100644
--- a/test/metabase/test/data/sqlserver.clj
+++ b/test/metabase/test/data/sqlserver.clj
@@ -3,8 +3,10 @@
   (:require [clojure.java.jdbc :as jdbc]
             [clojure.string :as s]
             [environ.core :refer [env]]
-            [metabase.driver.generic-sql :as sql]
-            (metabase.test.data [generic-sql :as generic]
+            (metabase.driver [generic-sql :as sql]
+                             sqlserver)
+            (metabase.test.data [datasets :as datasets]
+                                [generic-sql :as generic]
                                 [interface :as i]))
   (:import metabase.driver.sqlserver.SQLServerDriver))
 
@@ -94,7 +96,9 @@
                                             (swap! db-name-counter inc)
                                             (create-db! this dbdef))
             :database->connection-details database->connection-details
-            :engine                       (constantly :sqlserver)})))
+            :default-schema               (constantly "dbo")
+            :engine                       (constantly :sqlserver)
+            :sum-field-type               (constantly :IntegerField)})))
 
 
 (defn- cleanup-leftover-dbs
@@ -102,7 +106,7 @@
    This is important because we're limited to a quota of 30 DBs on RDS."
   {:expectations-options :before-run}
   []
-  (when (contains? @(resolve 'metabase.test.data.datasets/test-engines) :sqlserver)
+  (datasets/when-testing-engine :sqlserver
     (let [connection-spec (sql/connection-details->spec (SQLServerDriver.) (database->connection-details nil :server nil))
           leftover-dbs    (mapv :name (jdbc/query connection-spec "SELECT name
                                                                    FROM   master.dbo.sysdatabases
diff --git a/test/metabase/test/util.clj b/test/metabase/test/util.clj
index 7fa69672d7b8fdb9dea854d93fbd00aa57208471..d85e906743c5c4fcac0507beab389f8ff9514298 100644
--- a/test/metabase/test/util.clj
+++ b/test/metabase/test/util.clj
@@ -117,6 +117,7 @@
   {:pre [(symbol? namespc)
          (symbol? fn-name)
          (every? symbol? more)]}
-  `(do (def ~fn-name (ns-resolve '~namespc '~fn-name))
+  `(do (require '~namespc)
+       (def ~(vary-meta fn-name assoc :private true) (ns-resolve '~namespc '~fn-name))
        ~(when (seq more)
           `(resolve-private-fns ~namespc ~(first more) ~@(rest more)))))
diff --git a/test/metabase/test_setup.clj b/test/metabase/test_setup.clj
index f83f7787b7d3eb2a81bc70d5f1cf8d5652ed0ab8..483ad88b58f38bc49e2e16e2a67531f686b280cc 100644
--- a/test/metabase/test_setup.clj
+++ b/test/metabase/test_setup.clj
@@ -19,7 +19,6 @@
 ;; ## GENERAL SETTINGS
 
 ;; Don't run unit tests whenever JVM shuts down
-;; it's pretty annoying to have our DB reset all the time
 (expectations/disable-run-on-shutdown)
 
 
@@ -68,20 +67,7 @@
 
 ;; this is a little odd, but our normal `test-startup` function won't work for loading the drivers because
 ;; they need to be available at evaluation time for some of the unit tests work work properly, so we put this here
-(defonce ^:private loaded-drivers (driver/find-and-load-drivers!))
-
-(defn- load-test-data!
-  "Call `load-data!` on all the datasets we're testing against."
-  []
-  (doseq [engine datasets/test-engines]
-    (log/info (format "Loading test data: %s..." (name engine)))
-    (let [dataset (datasets/engine->loader engine)]
-      (datasets/load-data! dataset)
-
-      ;; Check that dataset is loaded and working
-      (datasets/with-engine engine
-        (assert (Table (data/id :venues))
-          (format "Loading test dataset %s failed: could not find 'venues' Table!" engine))))))
+(driver/find-and-load-drivers!)
 
 (defn test-startup
   {:expectations-options :before-run}
@@ -93,7 +79,6 @@
     (try
       (log/info "Setting up test DB and running migrations...")
       (db/setup-db :auto-migrate true)
-      (load-test-data!)
       (setting/set :site-name "Metabase Test")
       (core/initialization-complete!)
       ;; If test setup fails exit right away