diff --git a/test/metabase/query_processor/middleware/parameters/sql_test.clj b/test/metabase/query_processor/middleware/parameters/sql_test.clj
index ef550dae4c825c78e2be3dae298dcc8c09df149a..8773547852ab2e6f3e9747d056a1838ec8045089 100644
--- a/test/metabase/query_processor/middleware/parameters/sql_test.clj
+++ b/test/metabase/query_processor/middleware/parameters/sql_test.clj
@@ -558,7 +558,8 @@
   "Get the identifier used for `checkins` for the current driver by looking at what the driver uses when converting MBQL
   to SQL. Different drivers qualify to different degrees (i.e. `table` vs `schema.table` vs `database.schema.table`)."
   []
-  (second (re-find #"FROM\s([^\s]+)" (:query (qp/query->native (data/mbql-query checkins))))))
+  (let [sql (:query (qp/query->native (data/mbql-query checkins)))]
+    (second (re-find #"FROM\s([^\s()]+)" sql))))
 
 ;; as with the MBQL parameters tests Redshift and Crate fail for unknown reasons; disable their tests for now
 (def ^:private ^:const sql-parameters-engines
@@ -647,7 +648,12 @@
     (first-row
       (process-native
         :native     {:query         (case datasets/*engine*
-                                      :oracle "SELECT cast({{date}} as date) from dual"
+                                      :bigquery
+                                      "SELECT {{date}} as date"
+
+                                      :oracle
+                                      "SELECT cast({{date}} as date) from dual"
+
                                       "SELECT cast({{date}} as date)")
                      :template-tags {"date" {:name "date" :display-name "Date" :type :date}}}
         :parameters [{:type :date/single :target [:variable [:template-tag "date"]] :value "2018-04-18"}]))))
diff --git a/test/metabase/query_processor_test/nested_queries_test.clj b/test/metabase/query_processor_test/nested_queries_test.clj
index 31ce9805f3e11ecf7adb7c14426aeb65dc3f56ec..ad61977648cffa34a823cb29630969913864d0cf 100644
--- a/test/metabase/query_processor_test/nested_queries_test.clj
+++ b/test/metabase/query_processor_test/nested_queries_test.clj
@@ -181,7 +181,7 @@
 ;; make sure we can do a query with breakout and aggregation using a SQL source query
 
 (datasets/expect-with-engines (non-timeseries-engines-with-feature :nested-queries)
-  breakout-results
+   breakout-results
   (rows+cols
     (format-rows-by [int int]
       (qp/process-query
diff --git a/test/metabase/sync/sync_metadata/comments_test.clj b/test/metabase/sync/sync_metadata/comments_test.clj
index 2954c56320e2efa6d6b1aac3b9661eb2651d7564..b28c818afa4c23f7be344e556786922745153d8d 100644
--- a/test/metabase/sync/sync_metadata/comments_test.clj
+++ b/test/metabase/sync/sync_metadata/comments_test.clj
@@ -61,7 +61,7 @@
     {:name (data/format-name "comment_after_sync"), :description "added comment"}}
   (data/with-temp-db [db comment-after-sync]
     ;; modify the source DB to add the comment and resync
-    (i/create-db! ds/*driver* (assoc-in comment-after-sync [:table-definitions 0 :field-definitions 0 :field-comment] "added comment") true)
+    (i/create-db! ds/*driver* (assoc-in comment-after-sync [:table-definitions 0 :field-definitions 0 :field-comment] "added comment"), {:skip-drop-db? true})
     (sync/sync-table! (Table (data/id "comment_after_sync")))
     (db->fields db)))
 
@@ -98,7 +98,7 @@
 (ds/expect-with-engines #{:h2 :postgres}
   #{{:name (data/format-name "table_with_comment_after_sync"), :description "added comment"}}
   (data/with-temp-db [db (basic-table "table_with_comment_after_sync" nil)]
-     ;; modify the source DB to add the comment and resync
-     (i/create-db! ds/*driver* (basic-table "table_with_comment_after_sync" "added comment") true)
-     (metabase.sync.sync-metadata.tables/sync-tables! db)
-     (db->tables db)))
+    ;; modify the source DB to add the comment and resync
+    (i/create-db! ds/*driver* (basic-table "table_with_comment_after_sync" "added comment") {:skip-drop-db? true})
+    (metabase.sync.sync-metadata.tables/sync-tables! db)
+    (db->tables db)))
diff --git a/test/metabase/test/data/bigquery.clj b/test/metabase/test/data/bigquery.clj
index 65833494aa337e3e7fc07f67962a843943adbb4f..23e0e38d253b1e68b8b1071a8f9c95a4f3385902 100644
--- a/test/metabase/test/data/bigquery.clj
+++ b/test/metabase/test/data/bigquery.clj
@@ -200,35 +200,38 @@
 (def ^:private existing-datasets
   (atom #{}))
 
-(defn- create-db! [{:keys [database-name table-definitions]}]
-  {:pre [(seq database-name) (sequential? table-definitions)]}
-    ;; fetch existing datasets if we haven't done so yet
-  (when-not (seq @existing-datasets)
-    (reset! existing-datasets (set (existing-dataset-names)))
-    (println "These BigQuery datasets have already been loaded:\n" (u/pprint-to-str (sort @existing-datasets))))
-  ;; now check and see if we need to create the requested one
-  (let [database-name (normalize-name database-name)]
-    (when-not (contains? @existing-datasets database-name)
-      (try
-        (u/auto-retry 10
-          ;; if the dataset failed to load successfully last time around, destroy whatever was loaded so we start
-          ;; again from a blank slate
-          (u/ignore-exceptions
-            (destroy-dataset! database-name))
-          (create-dataset! database-name)
-          ;; do this in parallel because otherwise it can literally take an hour to load something like
-          ;; fifty_one_different_tables
-          (u/pdoseq [tabledef table-definitions]
-            (load-tabledef! database-name tabledef))
-          (swap! existing-datasets conj database-name)
-          (println (u/format-color 'green "[OK]")))
-        ;; if creating the dataset ultimately fails to complete, then delete it so it will hopefully work next time
-        ;; around
-        (catch Throwable e
-          (u/ignore-exceptions
-            (println (u/format-color 'red "Failed to load BigQuery dataset '%s'." database-name))
-            (destroy-dataset! database-name))
-          (throw e))))))
+(defn- create-db!
+  ([db-def]
+   (create-db! db-def nil))
+  ([{:keys [database-name table-definitions]} _]
+   {:pre [(seq database-name) (sequential? table-definitions)]}
+   ;; fetch existing datasets if we haven't done so yet
+   (when-not (seq @existing-datasets)
+     (reset! existing-datasets (set (existing-dataset-names)))
+     (println "These BigQuery datasets have already been loaded:\n" (u/pprint-to-str (sort @existing-datasets))))
+   ;; now check and see if we need to create the requested one
+   (let [database-name (normalize-name database-name)]
+     (when-not (contains? @existing-datasets database-name)
+       (try
+         (u/auto-retry 10
+           ;; if the dataset failed to load successfully last time around, destroy whatever was loaded so we start
+           ;; again from a blank slate
+           (u/ignore-exceptions
+             (destroy-dataset! database-name))
+           (create-dataset! database-name)
+           ;; do this in parallel because otherwise it can literally take an hour to load something like
+           ;; fifty_one_different_tables
+           (u/pdoseq [tabledef table-definitions]
+             (load-tabledef! database-name tabledef))
+           (swap! existing-datasets conj database-name)
+           (println (u/format-color 'green "[OK]")))
+         ;; if creating the dataset ultimately fails to complete, then delete it so it will hopefully work next time
+         ;; around
+         (catch Throwable e
+           (u/ignore-exceptions
+             (println (u/format-color 'red "Failed to load BigQuery dataset '%s'." database-name))
+             (destroy-dataset! database-name))
+           (throw e)))))))
 
 
 ;;; --------------------------------------------- IDriverTestExtensions ----------------------------------------------
diff --git a/test/metabase/test/data/generic_sql.clj b/test/metabase/test/data/generic_sql.clj
index 05d6f8560c4895431d5361ddb2d41700ed45e213..32b33e6b02ae76526e1c3c0e2561cb21bc4e09b2 100644
--- a/test/metabase/test/data/generic_sql.clj
+++ b/test/metabase/test/data/generic_sql.clj
@@ -362,9 +362,11 @@
         (execute! driver context dbdef (s/replace statement #"â…‹" ";"))))))
 
 (defn default-create-db!
-  ([driver database-definition]
-   (default-create-db! driver database-definition false))
-  ([driver {:keys [table-definitions], :as dbdef} skip-drop-db?]
+  "Default implementation of `create-db!` for SQL drivers."
+  ([driver db-def]
+   (create-db! driver db-def nil))
+  ([driver {:keys [table-definitions], :as dbdef} {:keys [skip-drop-db?]
+                                                   :or   {skip-drop-db? false}}]
    (when-not skip-drop-db?
      ;; Exec SQL for creating the DB
      (execute-sql! driver :server dbdef (str (drop-db-if-exists-sql driver dbdef) ";\n"
diff --git a/test/metabase/test/data/interface.clj b/test/metabase/test/data/interface.clj
index 32d6a0960d9a9d1204a04e2c3afb7fbdd0fda01e..7d538d13269fcaeb4ad7324355fc920dbad84453 100644
--- a/test/metabase/test/data/interface.clj
+++ b/test/metabase/test/data/interface.clj
@@ -110,17 +110,21 @@
     "Return the connection details map that should be used to connect to this database (i.e. a Metabase `Database`
      details map). CONTEXT is one of:
 
-     *  `:server` - Return details for making the connection in a way that isn't DB-specific (e.g., for
-                    creating/destroying databases)
-     *  `:db`     - Return details for connecting specifically to the DB.")
+ *  `:server` - Return details for making the connection in a way that isn't DB-specific (e.g., for
+                creating/destroying databases)
+ *  `:db`     - Return details for connecting specifically to the DB.")
 
-  (create-db! [this, ^DatabaseDefinition database-definition]
-              [this, ^DatabaseDefinition database-definition, ^Boolean skip-drop-db?]
+  (create-db!
+    [this, ^DatabaseDefinition database-definition]
+    [this, ^DatabaseDefinition database-definition {:keys [skip-drop-db?]}]
     "Create a new database from DATABASE-DEFINITION, including adding tables, fields, and foreign key constraints,
-     and add the appropriate data. This method should drop existing databases with the same name if applicable,
-     unless the skip-drop-db? arg is true. This is to workaround a scenario where the postgres driver terminates
-     the connection before dropping the DB and causes some tests to fail.
-     (This refers to creating the actual *DBMS* database itself, *not* a Metabase `Database` object.)")
+     and add the appropriate data. This method should drop existing databases with the same name if applicable, unless
+     the skip-drop-db? arg is true. This is to workaround a scenario where the postgres driver terminates the
+     connection before dropping the DB and causes some tests to fail.
+     (This refers to creating the actual *DBMS* database itself, *not* a Metabase `Database` object.)
+
+ Optional `options` as third param. Currently supported options include `skip-drop-db?`. If unspecified,`skip-drop-db?`
+ should default to `false`.")
 
   ;; TODO - this would be more useful if DATABASE-DEFINITION was a parameter
   (default-schema ^String [this]
@@ -129,7 +133,7 @@
   (expected-base-type->actual [this base-type]
     "*OPTIONAL*. Return the base type type that is actually used to store `Fields` of BASE-TYPE.
      The default implementation of this method is an identity fn. This is provided so DBs that don't support a given
-     BASE-TYPE used in the test data can specifiy what type we should expect in the results instead.  For example,
+     BASE-TYPE used in the test data can specifiy what type we should expect in the results instead. For example,
      Oracle has no `INTEGER` data types, so `:type/Integer` test values are instead stored as `NUMBER`, which we map
      to `:type/Decimal`.")
 
@@ -166,17 +170,19 @@
 (defn create-table-definition
   "Convenience for creating a `TableDefinition`."
   ^TableDefinition [^String table-name, field-definition-maps rows]
-  (s/validate TableDefinition (map->TableDefinition {:table-name        table-name
-                                                     :rows              rows
-                                                     :field-definitions (mapv create-field-definition field-definition-maps)})))
+  (s/validate TableDefinition (map->TableDefinition
+                               {:table-name        table-name
+                                :rows              rows
+                                :field-definitions (mapv create-field-definition field-definition-maps)})))
 
 (defn create-database-definition
   "Convenience for creating a new `DatabaseDefinition`."
   {:style/indent 1}
   ^DatabaseDefinition [^String database-name & table-name+field-definition-maps+rows]
-  (s/validate DatabaseDefinition (map->DatabaseDefinition {:database-name     database-name
-                                                           :table-definitions (mapv (partial apply create-table-definition)
-                                                                                    table-name+field-definition-maps+rows)})))
+  (s/validate DatabaseDefinition (map->DatabaseDefinition
+                                  {:database-name     database-name
+                                   :table-definitions (mapv (partial apply create-table-definition)
+                                                            table-name+field-definition-maps+rows)})))
 
 (def ^:private ^:const edn-definitions-dir "./test/metabase/test/data/dataset_definitions/")
 
@@ -184,12 +190,9 @@
   (edn/read-string (slurp (str edn-definitions-dir dbname ".edn"))))
 
 (defn update-table-def
-  "Function useful for modifying a table definition before it's
-  applied. Will invoke `UPDATE-TABLE-DEF-FN` on the vector of column
-  definitions and `UPDATE-ROWS-FN` with the vector of rows in the
-  database definition. `TABLE-DEF` is the database
-  definition (typically used directly in a `def-database-definition`
-  invocation)."
+  "Function useful for modifying a table definition before it's applied. Will invoke `UPDATE-TABLE-DEF-FN` on the vector
+  of column definitions and `UPDATE-ROWS-FN` with the vector of rows in the database definition. `TABLE-DEF` is the
+  database definition (typically used directly in a `def-database-definition` invocation)."
   [table-name-to-update update-table-def-fn update-rows-fn table-def]
   (vec
    (for [[table-name table-def rows :as orig-table-def] table-def]
diff --git a/test/metabase/test/data/mongo.clj b/test/metabase/test/data/mongo.clj
index 1317bd9b629718122dadb775818b76da5a02b482..fba2247223d1ce5cd385322c2bf47d459331d996 100644
--- a/test/metabase/test/data/mongo.clj
+++ b/test/metabase/test/data/mongo.clj
@@ -18,26 +18,30 @@
   (with-open [mongo-connection (mg/connect (database->connection-details dbdef))]
     (mg/drop-db mongo-connection (i/escaped-name dbdef))))
 
-(defn- create-db! [{:keys [table-definitions], :as dbdef}]
-  (destroy-db! dbdef)
-  (with-mongo-connection [mongo-db (database->connection-details dbdef)]
-    (doseq [{:keys [field-definitions table-name rows]} table-definitions]
-      (let [field-names (for [field-definition field-definitions]
-                          (keyword (:field-name field-definition)))]
-        ;; Use map-indexed so we can get an ID for each row (index + 1)
-        (doseq [[i row] (map-indexed (partial vector) rows)]
-          (let [row (for [v row]
-                      ;; Conver all the java.sql.Timestamps to java.util.Date, because the Mongo driver insists on being obnoxious and going from
-                      ;; using Timestamps in 2.x to Dates in 3.x
-                      (if (instance? java.sql.Timestamp v)
-                        (java.util.Date. (.getTime ^java.sql.Timestamp v))
-                        v))]
-            (try
-              ;; Insert each row
-              (mc/insert mongo-db (name table-name) (assoc (zipmap field-names row)
-                                                      :_id (inc i)))
-              ;; If row already exists then nothing to do
-              (catch com.mongodb.MongoException _))))))))
+(defn- create-db!
+  ([db-def]
+   (create-db! db-def nil))
+  ([{:keys [table-definitions], :as dbdef} {:keys [skip-drop-db?], :or {skip-drop-db? false}}]
+   (when-not skip-drop-db?
+     (destroy-db! dbdef))
+   (with-mongo-connection [mongo-db (database->connection-details dbdef)]
+     (doseq [{:keys [field-definitions table-name rows]} table-definitions]
+       (let [field-names (for [field-definition field-definitions]
+                           (keyword (:field-name field-definition)))]
+         ;; Use map-indexed so we can get an ID for each row (index + 1)
+         (doseq [[i row] (map-indexed (partial vector) rows)]
+           (let [row (for [v row]
+                       ;; Conver all the java.sql.Timestamps to java.util.Date, because the Mongo driver insists on being obnoxious and going from
+                       ;; using Timestamps in 2.x to Dates in 3.x
+                       (if (instance? java.sql.Timestamp v)
+                         (java.util.Date. (.getTime ^java.sql.Timestamp v))
+                         v))]
+             (try
+               ;; Insert each row
+               (mc/insert mongo-db (name table-name) (assoc (zipmap field-names row)
+                                                       :_id (inc i)))
+               ;; If row already exists then nothing to do
+               (catch com.mongodb.MongoException _)))))))))
 
 
 (u/strict-extend MongoDriver
diff --git a/test/metabase/test/data/presto.clj b/test/metabase/test/data/presto.clj
index 21f4b6550dd5d952f533380ee2b0dc87d3ba8618..2c0f02794614dabdcbd8ab5f368f7b8c223a4289 100644
--- a/test/metabase/test/data/presto.clj
+++ b/test/metabase/test/data/presto.clj
@@ -71,18 +71,22 @@
       query
       (unprepare/unprepare (cons query params) :quote-escape "'", :iso-8601-fn :from_iso8601_timestamp))))
 
-(defn- create-db! [{:keys [table-definitions] :as dbdef}]
-  (let [details (database->connection-details :db dbdef)]
-    (doseq [tabledef table-definitions
-            :let [rows       (:rows tabledef)
-                  ;; generate an ID for each row because we don't have auto increments
-                  keyed-rows (map-indexed (fn [i row] (conj row (inc i))) rows)
-                  ;; make 100 rows batches since we have to inline everything
-                  batches    (partition 100 100 nil keyed-rows)]]
-      (#'presto/execute-presto-query! details (drop-table-if-exists-sql dbdef tabledef))
-      (#'presto/execute-presto-query! details (create-table-sql dbdef tabledef))
-      (doseq [batch batches]
-        (#'presto/execute-presto-query! details (insert-sql dbdef tabledef batch))))))
+(defn- create-db!
+  ([db-def]
+   (create-db! db-def nil))
+  ([{:keys [table-definitions] :as dbdef} {:keys [skip-drop-db?], :or {skip-drop-db? false}}]
+   (let [details (database->connection-details :db dbdef)]
+     (doseq [tabledef table-definitions
+             :let     [rows       (:rows tabledef)
+                       ;; generate an ID for each row because we don't have auto increments
+                       keyed-rows (map-indexed (fn [i row] (conj row (inc i))) rows)
+                       ;; make 100 rows batches since we have to inline everything
+                       batches    (partition 100 100 nil keyed-rows)]]
+       (when-not skip-drop-db?
+         (#'presto/execute-presto-query! details (drop-table-if-exists-sql dbdef tabledef)))
+       (#'presto/execute-presto-query! details (create-table-sql dbdef tabledef))
+       (doseq [batch batches]
+         (#'presto/execute-presto-query! details (insert-sql dbdef tabledef batch)))))))
 
 ;;; IDriverTestExtensions implementation