diff --git a/.circleci/config.yml b/.circleci/config.yml
index 8e1fb11a0d403cc5e7854627998b28661e1170f1..0d9a0c00ccfed09526d159cc66662fe4a30e19f1 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -256,6 +256,24 @@ jobs:
             lein with-profile +ci test
           no_output_timeout: 5m
 
+  be-tests-snowflake:
+    working_directory: /home/circleci/metabase/metabase/
+    docker:
+      - image: circleci/clojure:lein-2.8.1-node-browsers
+    steps:
+      - attach_workspace:
+          at: /home/circleci/
+      - restore_cache:
+          <<: *restore-be-deps-cache
+      - run:
+          name: Run backend unit tests (Snowflake)
+          environment:
+            ENGINES: h2,snowflake
+          command: >
+            /home/circleci/metabase/metabase/.circleci/skip-driver-tests.sh snowflake ||
+            lein with-profile +ci test
+          no_output_timeout: 10m
+
   be-tests-sqlserver:
     <<: *defaults
     steps:
@@ -596,6 +614,9 @@ workflows:
       - be-tests-vertica:
           requires:
             - be-tests
+      - be-tests-snowflake:
+          requires:
+            - be-tests
       - fe-deps:
           requires:
             - checkout
@@ -640,6 +661,7 @@ workflows:
             - be-tests-druid
             - be-tests-redshift
             - be-tests-vertica
+            - be-tests-snowflake
             - fe-linter-eslint
             - fe-linter-prettier
             - fe-linter-flow
diff --git a/project.clj b/project.clj
index 5dc898ec61352800cbacb057e6934bbfb64db03e..9374b0a266f78d47d0437783029287aa8dea36fe 100644
--- a/project.clj
+++ b/project.clj
@@ -87,6 +87,7 @@
                   :exclusions [com.github.wendykierp/JTransforms]]
                  [net.sf.cssbox/cssbox "4.12"                         ; HTML / CSS rendering
                   :exclusions [org.slf4j/slf4j-api]]
+                 [net.snowflake/snowflake-jdbc "3.6.13"]              ; Snowflake JDBC Client Library
                  [org.clojars.pntblnk/clj-ldap "0.0.12"]              ; LDAP client
                  [org.liquibase/liquibase-core "3.6.2"                ; migration management (Java lib)
                   :exclusions [ch.qos.logback/logback-classic]]
diff --git a/src/metabase/driver/snowflake.clj b/src/metabase/driver/snowflake.clj
new file mode 100644
index 0000000000000000000000000000000000000000..001c57a9920dbc9a038472d5fd2434f293982656
--- /dev/null
+++ b/src/metabase/driver/snowflake.clj
@@ -0,0 +1,241 @@
+(ns metabase.driver.snowflake
+  "Snowflake Driver."
+  (:require [clojure.string :as str]
+            [honeysql.core :as hsql]
+            [metabase
+             [driver :as driver]
+             [util :as u]]
+            [metabase.driver.generic-sql :as sql]
+            [metabase.driver.generic-sql.query-processor :as sql.qp]
+            [metabase.models
+             [field :refer [Field]]
+             [table :refer [Table]]]
+            [metabase.query-processor.store :as qp.store]
+            [metabase.util
+             [honeysql-extensions :as hx]
+             [ssh :as ssh]]
+            [toucan.db :as db])
+  (:import java.sql.Time))
+
+(defn- connection-details->spec
+  "Create a database specification for a snowflake database."
+  [{:keys [account regionid] :as opts}]
+  (let [host (if regionid
+               (str account "." regionid)
+               account)]
+    ;; it appears to be the case that their JDBC driver ignores `db` -- see my bug report at
+    ;; https://support.snowflake.net/s/question/0D50Z00008WTOMCSA5/
+    (merge {:subprotocol                                "snowflake"
+            :classname                                  "net.snowflake.client.jdbc.SnowflakeDriver"
+            :subname                                    (str "//" host ".snowflakecomputing.com/")
+            :client_metadata_request_use_connection_ctx true
+            :ssl                                        true
+            ;; other SESSION parameters
+            ;; use the same week start we use for all the other drivers
+            :week_start                                 7
+            ;; not 100% sure why we need to do this but if we don't set the connection to UTC our report timezone
+            ;; stuff doesn't work, even though we ultimately override this when we set the session timezone
+            :timezone                                   "UTC"}
+           (dissoc opts :host :port :timezone))))
+
+(defrecord SnowflakeDriver []
+  :load-ns true
+  clojure.lang.Named
+  (getName [_] "Snowflake"))
+
+(def ^:private snowflake-date-formatters
+  "The default timestamp format for Snowflake.
+  See https://docs.snowflake.net/manuals/sql-reference/data-types-datetime.html#timestamp."
+  (driver/create-db-time-formatters "yyyy-MM-dd HH:mm:ss.SSSSSSSSS Z"))
+
+(def ^:private snowflake-db-time-query
+  "Snowflake current database time, with hour and minute timezone offset."
+  "select to_char(current_timestamp, 'YYYY-MM-DD HH24:MI:SS.FF TZHTZM')")
+
+(def ^:private column->base-type
+  "Map of the default Snowflake column types -> Field base types. Add more
+  mappings here as you come across them."
+  {:NUMBER                     :type/Number
+   :DECIMAL                    :type/Decimal
+   :NUMERIC                    :type/Number
+   :INT                        :type/Integer
+   :INTEGER                    :type/Integer
+   :BIGINT                     :type/BigInteger
+   :SMALLINT                   :type/Integer
+   :TINYINT                    :type/Integer
+   :BYTEINT                    :type/Integer
+   :FLOAT                      :type/Float
+   :FLOAT4                     :type/Float
+   :FLOAT8                     :type/Float
+   :DOUBLE                     :type/Float
+   (keyword "DOUBLE PRECISON") :type/Float
+   :REAL                       :type/Float
+   :VARCHAR                    :type/Text
+   :CHAR                       :type/Text
+   :CHARACTER                  :type/Text
+   :STRING                     :type/Text
+   :TEXT                       :type/Text
+   :BINARY                     :type/*
+   :VARBINARY                  :type/*
+   :BOOLEAN                    :type/Boolean
+   :DATE                       :type/Date
+   :DATETIME                   :type/DateTime
+   :TIME                       :type/Time
+   :TIMESTAMP                  :type/DateTime
+   :TIMESTAMPLTZ               :type/DateTime
+   :TIMESTAMPNTZ               :type/DateTime
+   :TIMESTAMPTZ                :type/DateTime
+   :VARIANT                    :type/*
+   ;; Maybe also type *
+   :OBJECT                     :type/Dictionary
+   :ARRAY                      :type/*})
+
+(defn- unix-timestamp->timestamp [expr seconds-or-milliseconds]
+  (case seconds-or-milliseconds
+    :seconds      (hsql/call :to_timestamp expr)
+    :milliseconds (hsql/call :to_timestamp expr 3)))
+
+(defn- date-interval [unit amount]
+  (hsql/call :dateadd
+    (hsql/raw (name unit))
+    (hsql/raw (int amount))
+    :%current_timestamp))
+
+(defn- extract [unit expr] (hsql/call :date_part unit (hx/->timestamp expr)))
+(defn- date-trunc [unit expr] (hsql/call :date_trunc unit (hx/->timestamp expr)))
+
+(defn- date [unit expr]
+  (case unit
+    :default         expr
+    :minute          (date-trunc :minute expr)
+    :minute-of-hour  (extract :minute expr)
+    :hour            (date-trunc :hour expr)
+    :hour-of-day     (extract :hour expr)
+    :day             (date-trunc :day expr)
+    :day-of-week     (extract :dayofweek expr)
+    :day-of-month    (extract :day expr)
+    :day-of-year     (extract :dayofyear expr)
+    :week            (date-trunc :week expr)
+    :week-of-year    (extract :week expr)
+    :month           (date-trunc :month expr)
+    :month-of-year   (extract :month expr)
+    :quarter         (date-trunc :quarter expr)
+    :quarter-of-year (extract :quarter expr)
+    :year            (extract :year expr)))
+
+(defn- query-db-name []
+  (or (-> (qp.store/database) :details :db)
+      (throw (Exception. "Missing DB name"))))
+
+(defmethod sql.qp/->honeysql [SnowflakeDriver (class Field)]
+  [driver field]
+  (let [table            (qp.store/table (:table_id field))
+        db-name          (when-not (:alias? table)
+                           (query-db-name))
+        field-identifier (keyword
+                          (hx/qualify-and-escape-dots db-name (:schema table) (:name table) (:name field)))]
+    (sql.qp/cast-unix-timestamp-field-if-needed driver field field-identifier)))
+
+(defmethod sql.qp/->honeysql [SnowflakeDriver (class Table)]
+  [_ table]
+  (let [{table-name :name, schema :schema} table]
+    (hx/qualify-and-escape-dots (query-db-name) schema table-name)))
+
+(defmethod sql.qp/->honeysql [SnowflakeDriver :time]
+  [driver [_ value unit]]
+  (hx/->time (sql.qp/->honeysql driver value)))
+
+(defn- field->identifier
+  "Generate appropriate identifier for a Field for SQL parameters. (NOTE: THIS IS ONLY USED FOR SQL PARAMETERS!)"
+  ;; TODO - Making a DB call for each field to fetch its Table is inefficient and makes me cry, but this method is
+  ;; currently only used for SQL params so it's not a huge deal at this point
+  ;;
+  ;; TODO - we should make sure these are in the QP store somewhere and then could at least batch the calls
+  [driver {table-id :table_id, :as field}]
+  (qp.store/store-table! (db/select-one [Table :id :name :schema], :id (u/get-id table-id)))
+  (sql.qp/->honeysql driver field))
+
+
+(defn- table-rows-seq [driver database table]
+
+  (sql/query driver database {:select [:*]
+                              :from   [(qp.store/with-store
+                                         (qp.store/store-database! database)
+                                         (sql.qp/->honeysql driver table))]}))
+
+(defn- string-length-fn [field-key]
+  (hsql/call :length (hx/cast :VARCHAR field-key)))
+
+(defn- describe-database [driver database]
+  (sql/with-metadata [metadata driver database]
+    {:tables (sql/fast-active-tables driver metadata (:name database))}))
+
+(defn- describe-table [driver database table]
+  (sql/with-metadata [metadata driver database]
+    (->> (assoc (select-keys table [:name :schema])
+           :fields (sql/describe-table-fields metadata driver table (:name database)))
+         ;; find PKs and mark them
+         (sql/add-table-pks metadata))))
+
+(defn- describe-table-fks [driver database table]
+  (sql/describe-table-fks driver database table (:name database)))
+
+(u/strict-extend SnowflakeDriver
+  driver/IDriver
+  (merge (sql/IDriverSQLDefaultsMixin)
+         {:date-interval            (u/drop-first-arg date-interval)
+          :details-fields           (constantly (ssh/with-tunnel-config
+                                                  [{:name         "account"
+                                                    :display-name "Account"
+                                                    :placeholder  "Your snowflake account name."
+                                                    :required     true}
+                                                   {:name         "user"
+                                                    :display-name "Database username"
+                                                    :placeholder  "ken bier"
+                                                    :required     true}
+                                                   {:name         "password"
+                                                    :display-name "Database user password"
+                                                    :type         :password
+                                                    :placeholder  "*******"
+                                                    :required     true}
+                                                   {:name         "warehouse"
+                                                    :display-name "Warehouse"
+                                                    :placeholder  "my_warehouse"}
+                                                   {:name         "dbname"
+                                                    :display-name "Database name"
+                                                    :placeholder  "cockerel"}
+                                                   {:name         "regionid"
+                                                    :display-name "Region Id"
+                                                    :placeholder  "my_region"}
+                                                   {:name         "schema"
+                                                    :display-name "Schema"
+                                                    :placeholder  "my_schema"}
+                                                   {:name         "role"
+                                                    :display-name "Role"
+                                                    :placeholder  "my_role"}]))
+          :format-custom-field-name (u/drop-first-arg str/lower-case)
+          :current-db-time          (driver/make-current-db-time-fn
+                                     snowflake-db-time-query
+                                     snowflake-date-formatters)
+          :table-rows-seq           table-rows-seq
+          :describe-database        describe-database
+          :describe-table           describe-table
+          :describe-table-fks       describe-table-fks})
+
+  sql/ISQLDriver
+  (merge (sql/ISQLDriverDefaultsMixin)
+         {:connection-details->spec  (u/drop-first-arg connection-details->spec)
+          :string-length-fn          (u/drop-first-arg string-length-fn)
+          :excluded-schemas          (constantly #{"INFORMATION_SCHEMA"})
+          :date                      (u/drop-first-arg date)
+          :field->identifier         field->identifier
+          :current-datetime-fn       (constantly :%current_timestamp)
+          :set-timezone-sql          (constantly "ALTER SESSION SET TIMEZONE = %s;")
+          :unix-timestamp->timestamp (u/drop-first-arg unix-timestamp->timestamp)
+          :column->base-type         (u/drop-first-arg column->base-type)}))
+
+
+(defn -init-driver
+  "Register the Snowflake driver"
+  []
+  (driver/register-driver! :snowflake (SnowflakeDriver.)))
diff --git a/test/metabase/driver/snowflake_test.clj b/test/metabase/driver/snowflake_test.clj
new file mode 100644
index 0000000000000000000000000000000000000000..5178499f46921fef1fd4a485021dca7e4a8d5aac
--- /dev/null
+++ b/test/metabase/driver/snowflake_test.clj
@@ -0,0 +1,7 @@
+(ns metabase.driver.snowflake-test
+  (:require [metabase.test.data.datasets :refer [expect-with-engine]]
+            [metabase.test.util :as tu]))
+
+(expect-with-engine :snowflake
+                    "UTC"
+                    (tu/db-timezone-id))
diff --git a/test/metabase/query_processor/middleware/format_rows_test.clj b/test/metabase/query_processor/middleware/format_rows_test.clj
index ca5c382d32d831a3f5c773a696ea4fec76e51bd1..e7d830e616d390e2123ad00d3204dca526779ace 100644
--- a/test/metabase/query_processor/middleware/format_rows_test.clj
+++ b/test/metabase/query_processor/middleware/format_rows_test.clj
@@ -10,7 +10,11 @@
              [dataset-definitions :as defs]
              [datasets :refer [*engine*]]]))
 
-(qpt/expect-with-non-timeseries-dbs-except #{:oracle :mongo :redshift :presto :sparksql}
+(def ^:private dbs-exempt-from-format-rows-tests
+  "DBs to skip the tests below for. TODO - why are so many databases not running these tests?"
+  #{:oracle :mongo :redshift :presto :sparksql :snowflake})
+
+(qpt/expect-with-non-timeseries-dbs-except dbs-exempt-from-format-rows-tests
   (if (= :sqlite *engine*)
     [[1 "Plato Yeshua" "2014-04-01 00:00:00" "08:30:00"]
      [2 "Felipinho Asklepios" "2014-12-05 00:00:00" "15:15:00"]
@@ -29,7 +33,7 @@
             :limit    5}))
        qpt/rows))
 
-(qpt/expect-with-non-timeseries-dbs-except #{:oracle :mongo :redshift :presto :sparksql}
+(qpt/expect-with-non-timeseries-dbs-except dbs-exempt-from-format-rows-tests
   (cond
     (= :sqlite *engine*)
     [[1 "Plato Yeshua" "2014-04-01 00:00:00" "08:30:00"]
diff --git a/test/metabase/query_processor/middleware/parameters/mbql_test.clj b/test/metabase/query_processor/middleware/parameters/mbql_test.clj
index 04d7d14756533ec59acb85fd71fe58e622fcc55f..75af45f3446485d85e94b8a0fd605a9adcaec154 100644
--- a/test/metabase/query_processor/middleware/parameters/mbql_test.clj
+++ b/test/metabase/query_processor/middleware/parameters/mbql_test.clj
@@ -6,7 +6,9 @@
              [query-processor-test :refer [first-row format-rows-by non-timeseries-engines rows]]]
             [metabase.mbql.normalize :as normalize]
             [metabase.query-processor.middleware.parameters.mbql :as mbql-params]
-            [metabase.test.data :as data]
+            [metabase.test
+             [data :as data]
+             [util :as tu]]
             [metabase.test.data.datasets :as datasets]
             [metabase.util.date :as du]))
 
@@ -138,17 +140,20 @@
 ;; check that date ranges work correctly
 (datasets/expect-with-engines params-test-engines
   [29]
-  (first-row
-    (format-rows-by [int]
-      (qp/process-query {:database   (data/id)
-                         :type       :query
-                         :query      {:source-table (data/id :checkins)
-                                      :aggregation  [[:count]]}
-                         :parameters [{:hash   "abc123"
-                                       :name   "foo"
-                                       :type   "date"
-                                       :target [:dimension [:field-id (data/id :checkins :date)]]
-                                       :value  "2015-04-01~2015-05-01"}]}))))
+  (do
+    ;; Prevent an issue with Snowflake were a previous connection's report-timezone setting can affect this test's results
+    (when (= :snowflake datasets/*engine*) (tu/clear-connection-pool (data/id)))
+    (first-row
+      (format-rows-by [int]
+        (qp/process-query {:database   (data/id)
+                           :type       :query
+                           :query      {:source-table (data/id :checkins)
+                                        :aggregation  [[:count]]}
+                           :parameters [{:hash   "abc123"
+                                         :name   "foo"
+                                         :type   "date"
+                                         :target [:dimension [:field-id (data/id :checkins :date)]]
+                                         :value  "2015-04-01~2015-05-01"}]})))))
 
 ;; check that IDs work correctly (passed in as numbers)
 (datasets/expect-with-engines params-test-engines
diff --git a/test/metabase/query_processor/middleware/parameters/sql_test.clj b/test/metabase/query_processor/middleware/parameters/sql_test.clj
index 0524a6015b0a50dc8fe4d731c8e0fc7cb26366bf..8773547852ab2e6f3e9747d056a1838ec8045089 100644
--- a/test/metabase/query_processor/middleware/parameters/sql_test.clj
+++ b/test/metabase/query_processor/middleware/parameters/sql_test.clj
@@ -632,6 +632,13 @@
      (= :presto datasets/*engine*)
      "2018-04-18"
 
+     ;; Snowflake appears to have a bug in their JDBC driver when including the target timezone along with the SQL
+     ;; date parameter. The below value is not correct, but is what the driver returns right now. This bug is written
+     ;; up as https://github.com/metabase/metabase/issues/8804 and when fixed this should be removed as it should
+     ;; return the same value as the other drivers that support a report timezone
+     (= :snowflake datasets/*engine*)
+     "2018-04-16T17:00:00.000-07:00"
+
      (qpt/supports-report-timezone? datasets/*engine*)
      "2018-04-18T00:00:00.000-07:00"
 
diff --git a/test/metabase/query_processor_test/date_bucketing_test.clj b/test/metabase/query_processor_test/date_bucketing_test.clj
index e9bff4bc20f1976b793791325cfdbb492faa24f0..084c73570ffe259ee3b9f295f0b03b98e4260486 100644
--- a/test/metabase/query_processor_test/date_bucketing_test.clj
+++ b/test/metabase/query_processor_test/date_bucketing_test.clj
@@ -35,13 +35,14 @@
     (long x)
     x))
 
-(defn- oracle-or-redshift?
-  "We currently have a bug in how report-timezone is used in Oracle. The timeone is applied correctly, but the date
-  operations that we use aren't using that timezone. It's written up as
-  https://github.com/metabase/metabase/issues/5789. This function is used to differentiate Oracle from the other
-  report-timezone databases until that bug can get fixed. Redshift also has this issue."
+(defn tz-shifted-engine-bug?
+  "Returns true if `engine` is affected by the bug originally observed in
+  Oracle (https://github.com/metabase/metabase/issues/5789) but later found in Redshift and Snowflake. The timezone is
+  applied correctly, but the date operations that we use aren't using that timezone. This function is used to
+  differentiate Oracle from the other report-timezone databases until that bug can get fixed. Redshift and Snowflake
+  also have this issue."
   [engine]
-  (contains? #{:oracle :redshift} engine))
+  (contains? #{:snowflake :oracle :redshift} engine))
 
 (defn- sad-toucan-incidents-with-bucketing
   "Returns 10 sad toucan incidents grouped by `UNIT`"
@@ -128,7 +129,7 @@
     (sad-toucan-result (source-date-formatter utc-tz) result-date-formatter-without-tz)
 
     ;; There's a bug here where we are reading in the UTC time as pacific, so we're 7 hours off
-    (oracle-or-redshift? *engine*)
+    (tz-shifted-engine-bug? *engine*)
     (sad-toucan-result (source-date-formatter pacific-tz) (result-date-formatter pacific-tz))
 
     ;; When the reporting timezone is applied, the same datetime value is returned, but set in the pacific timezone
@@ -148,7 +149,7 @@
     (contains? #{:sqlite :crate} *engine*)
     (sad-toucan-result (source-date-formatter utc-tz) result-date-formatter-without-tz)
 
-    (oracle-or-redshift? *engine*)
+    (tz-shifted-engine-bug? *engine*)
     (sad-toucan-result (source-date-formatter eastern-tz) (result-date-formatter eastern-tz))
 
     ;; The time instant is the same as UTC (or pacific) but should be offset by the eastern timezone
@@ -172,7 +173,7 @@
     (contains? #{:sqlite :crate} *engine*)
     (sad-toucan-result (source-date-formatter utc-tz) result-date-formatter-without-tz)
 
-    (oracle-or-redshift? *engine*)
+    (tz-shifted-engine-bug? *engine*)
     (sad-toucan-result (source-date-formatter eastern-tz) (result-date-formatter eastern-tz))
 
     ;; The JVM timezone should have no impact on a database that uses a report timezone
@@ -197,7 +198,7 @@
     (contains? #{:sqlite :crate} *engine*)
     (sad-toucan-result (source-date-formatter utc-tz) result-date-formatter-without-tz)
 
-    (oracle-or-redshift? *engine*)
+    (tz-shifted-engine-bug? *engine*)
     (sad-toucan-result (source-date-formatter pacific-tz) (result-date-formatter pacific-tz))
 
     (supports-report-timezone? *engine*)
@@ -261,7 +262,7 @@
     (results-by-hour (source-date-formatter utc-tz)
                      result-date-formatter-without-tz)
 
-    (oracle-or-redshift? *engine*)
+    (tz-shifted-engine-bug? *engine*)
     (results-by-hour (source-date-formatter pacific-tz) (result-date-formatter pacific-tz))
 
     (supports-report-timezone? *engine*)
@@ -283,7 +284,7 @@
 ;; first three results of the pacific results to the last three of the
 ;; UTC results (i.e. pacific is 7 hours back of UTC at that time)
 (expect-with-non-timeseries-dbs
-  (if (and (not (oracle-or-redshift? *engine*))
+  (if (and (not (tz-shifted-engine-bug? *engine*))
            (supports-report-timezone? *engine*))
     [[0 8] [1 9] [2 7] [3 10] [4 10] [5 9] [6 6] [7 5] [8 7] [9 7]]
     [[0 13] [1 8] [2 4] [3 7] [4 5] [5 13] [6 10] [7 8] [8 9] [9 7]])
@@ -387,7 +388,7 @@
                     date-formatter-without-time
                     [6 10 4 9 9 8 8 9 7 9])
 
-    (oracle-or-redshift? *engine*)
+    (tz-shifted-engine-bug? *engine*)
     (results-by-day (tformat/with-zone date-formatter-without-time pacific-tz)
                     (result-date-formatter pacific-tz)
                     [6 10 4 9 9 8 8 9 7 9])
@@ -420,7 +421,7 @@
                     date-formatter-without-time
                     [6 10 4 9 9 8 8 9 7 9])
 
-    (oracle-or-redshift? *engine*)
+    (tz-shifted-engine-bug? *engine*)
     (results-by-day (tformat/with-zone date-formatter-without-time eastern-tz)
                     (result-date-formatter eastern-tz)
                     [6 10 4 9 9 8 8 9 7 9])
@@ -453,7 +454,7 @@
                     date-formatter-without-time
                     [6 10 4 9 9 8 8 9 7 9])
 
-    (oracle-or-redshift? *engine*)
+    (tz-shifted-engine-bug? *engine*)
     (results-by-day (tformat/with-zone date-formatter-without-time pacific-tz)
                     (result-date-formatter pacific-tz)
                     [6 10 4 9 9 8 8 9 7 9])
@@ -478,7 +479,7 @@
 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
 
 (expect-with-non-timeseries-dbs
-  (if (and (not (oracle-or-redshift? *engine*))
+  (if (and (not (tz-shifted-engine-bug? *engine*))
            (supports-report-timezone? *engine*))
     [[1 29] [2 36] [3 33] [4 29] [5 13] [6 38] [7 22]]
     [[1 28] [2 38] [3 29] [4 27] [5 24] [6 30] [7 24]])
@@ -495,7 +496,7 @@
 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
 
 (expect-with-non-timeseries-dbs
-  (if (and (not (oracle-or-redshift? *engine*))
+  (if (and (not (tz-shifted-engine-bug? *engine*))
            (supports-report-timezone? *engine*))
     [[1 8] [2 9] [3 9] [4 4] [5 11] [6 8] [7 6] [8 10] [9 6] [10 10]]
     [[1 6] [2 10] [3 4] [4 9] [5  9] [6 8] [7 8] [8  9] [9 7] [10  9]])
@@ -512,7 +513,7 @@
 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
 
 (expect-with-non-timeseries-dbs
-  (if (and (not (oracle-or-redshift? *engine*))
+  (if (and (not (tz-shifted-engine-bug? *engine*))
            (supports-report-timezone? *engine*))
     [[152 8] [153 9] [154 9] [155 4] [156 11] [157 8] [158 6] [159 10] [160 6] [161 10]]
     [[152 6] [153 10] [154 4] [155 9] [156  9] [157  8] [158 8] [159  9] [160 7] [161  9]])
@@ -582,7 +583,7 @@
                      date-formatter-without-time
                      [46 47 40 60 7])
 
-    (oracle-or-redshift? *engine*)
+    (tz-shifted-engine-bug? *engine*)
     (results-by-week (tformat/with-zone date-formatter-without-time pacific-tz)
                      (result-date-formatter pacific-tz)
                      [46 47 40 60 7])
@@ -614,7 +615,7 @@
                      date-formatter-without-time
                      [46 47 40 60 7])
 
-    (oracle-or-redshift? *engine*)
+    (tz-shifted-engine-bug? *engine*)
     (results-by-week (tformat/with-zone date-formatter-without-time eastern-tz)
                      (result-date-formatter eastern-tz)
                      [46 47 40 60 7])
@@ -643,7 +644,7 @@
                      date-formatter-without-time
                      [46 47 40 60 7])
 
-    (oracle-or-redshift? *engine*)
+    (tz-shifted-engine-bug? *engine*)
     (results-by-week (tformat/with-zone date-formatter-without-time pacific-tz)
                      (result-date-formatter pacific-tz)
                      [46 47 40 60 7])
@@ -669,6 +670,8 @@
 (expect-with-non-timeseries-dbs
   ;; Not really sure why different drivers have different opinions on these </3
   (cond
+    (= :snowflake *engine*)
+    [[22 46] [23 47] [24 40] [25 60] [26 7]]
 
     (contains? #{:sqlserver :sqlite :crate :oracle :sparksql} *engine*)
     [[23 54] [24 46] [25 39] [26 61]]
@@ -807,29 +810,29 @@
                          (cons :relative-datetime relative-datetime-args)]}))
       first-row first int))
 
-;; HACK - Don't run these tests against BigQuery because the databases need to be loaded every time the tests are ran
-;;        and loading data into BigQuery is mind-bogglingly slow. Don't worry, I promise these work though!
+;; HACK - Don't run these tests against BigQuery/etc. because the databases need to be loaded every time the tests are ran
+;;        and loading data into BigQuery/etc. is mind-bogglingly slow. Don't worry, I promise these work though!
 
 ;; Don't run the minute tests against Oracle because the Oracle tests are kind of slow and case CI to fail randomly
 ;; when it takes so long to load the data that the times are no longer current (these tests pass locally if your
 ;; machine isn't as slow as the CircleCI ones)
-(expect-with-non-timeseries-dbs-except #{:bigquery :oracle} 4 (count-of-grouping (checkins:4-per-minute) :minute "current"))
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery :oracle} 4 (count-of-grouping (checkins:4-per-minute) :minute "current"))
 
-(expect-with-non-timeseries-dbs-except #{:bigquery :oracle} 4 (count-of-grouping (checkins:4-per-minute) :minute -1 "minute"))
-(expect-with-non-timeseries-dbs-except #{:bigquery :oracle} 4 (count-of-grouping (checkins:4-per-minute) :minute  1 "minute"))
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery :oracle} 4 (count-of-grouping (checkins:4-per-minute) :minute -1 "minute"))
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery :oracle} 4 (count-of-grouping (checkins:4-per-minute) :minute  1 "minute"))
 
-(expect-with-non-timeseries-dbs-except #{:bigquery} 4 (count-of-grouping (checkins:4-per-hour) :hour "current"))
-(expect-with-non-timeseries-dbs-except #{:bigquery} 4 (count-of-grouping (checkins:4-per-hour) :hour -1 "hour"))
-(expect-with-non-timeseries-dbs-except #{:bigquery} 4 (count-of-grouping (checkins:4-per-hour) :hour  1 "hour"))
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery} 4 (count-of-grouping (checkins:4-per-hour) :hour "current"))
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery} 4 (count-of-grouping (checkins:4-per-hour) :hour -1 "hour"))
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery} 4 (count-of-grouping (checkins:4-per-hour) :hour  1 "hour"))
 
-(expect-with-non-timeseries-dbs-except #{:bigquery} 1 (count-of-grouping (checkins:1-per-day) :day "current"))
-(expect-with-non-timeseries-dbs-except #{:bigquery} 1 (count-of-grouping (checkins:1-per-day) :day -1 "day"))
-(expect-with-non-timeseries-dbs-except #{:bigquery} 1 (count-of-grouping (checkins:1-per-day) :day  1 "day"))
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery} 1 (count-of-grouping (checkins:1-per-day) :day "current"))
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery} 1 (count-of-grouping (checkins:1-per-day) :day -1 "day"))
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery} 1 (count-of-grouping (checkins:1-per-day) :day  1 "day"))
 
-(expect-with-non-timeseries-dbs-except #{:bigquery} 7 (count-of-grouping (checkins:1-per-day) :week "current"))
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery} 7 (count-of-grouping (checkins:1-per-day) :week "current"))
 
 ;; SYNTACTIC SUGAR
-(expect-with-non-timeseries-dbs-except #{:bigquery}
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery}
   1
   (-> (data/with-temp-db [_ (checkins:1-per-day)]
         (data/run-mbql-query checkins
@@ -837,7 +840,7 @@
            :filter      [:time-interval $timestamp :current :day]}))
       first-row first int))
 
-(expect-with-non-timeseries-dbs-except #{:bigquery}
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery}
   7
   (-> (data/with-temp-db [_ (checkins:1-per-day)]
         (data/run-mbql-query checkins
@@ -859,32 +862,32 @@
                (throw (ex-info "Query failed!" results)))
      :unit (-> results :data :cols first :unit)}))
 
-(expect-with-non-timeseries-dbs-except #{:bigquery}
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery}
   {:rows 1, :unit :day}
   (date-bucketing-unit-when-you :breakout-by "day", :filter-by "day"))
 
-(expect-with-non-timeseries-dbs-except #{:bigquery}
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery}
   {:rows 7, :unit :day}
   (date-bucketing-unit-when-you :breakout-by "day", :filter-by "week"))
 
-(expect-with-non-timeseries-dbs-except #{:bigquery}
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery}
   {:rows 1, :unit :week}
   (date-bucketing-unit-when-you :breakout-by "week", :filter-by "day"))
 
-(expect-with-non-timeseries-dbs-except #{:bigquery}
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery}
   {:rows 1, :unit :quarter}
   (date-bucketing-unit-when-you :breakout-by "quarter", :filter-by "day"))
 
-(expect-with-non-timeseries-dbs-except #{:bigquery}
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery}
   {:rows 1, :unit :hour}
   (date-bucketing-unit-when-you :breakout-by "hour", :filter-by "day"))
 
 ;; make sure if you use a relative date bucket in the past (e.g. "past 2 months") you get the correct amount of rows
 ;; (#3910)
-(expect-with-non-timeseries-dbs-except #{:bigquery}
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery}
   {:rows 2, :unit :day}
   (date-bucketing-unit-when-you :breakout-by "day", :filter-by "day", :with-interval -2))
 
-(expect-with-non-timeseries-dbs-except #{:bigquery}
+(expect-with-non-timeseries-dbs-except #{:snowflake :bigquery}
   {:rows 2, :unit :day}
   (date-bucketing-unit-when-you :breakout-by "day", :filter-by "day", :with-interval 2))
diff --git a/test/metabase/query_processor_test/expressions_test.clj b/test/metabase/query_processor_test/expressions_test.clj
index e40b776ca2cd655b0bbf4b8714ae43fe47487fc4..7982c9948ac9bc25b0e7c571634bb32af84f3573 100644
--- a/test/metabase/query_processor_test/expressions_test.clj
+++ b/test/metabase/query_processor_test/expressions_test.clj
@@ -86,9 +86,11 @@
 ;; Custom aggregation expressions should include their type
 (datasets/expect-with-engines (non-timeseries-engines-with-feature :expressions)
   (conj #{{:name "x" :base_type :type/Float}}
-        (if (= datasets/*engine* :oracle)
-          {:name (data/format-name "category_id") :base_type :type/Decimal}
-          {:name (data/format-name "category_id") :base_type :type/Integer}))
+        {:name      (data/format-name "category_id")
+         :base_type (case datasets/*engine*
+                      :oracle    :type/Decimal
+                      :snowflake :type/Number
+                      :type/Integer)})
   (set (map #(select-keys % [:name :base_type])
             (-> (data/run-mbql-query venues
                   {:aggregation [:named [:sum [:* $price -1]] "x"]
diff --git a/test/metabase/query_processor_test/filter_test.clj b/test/metabase/query_processor_test/filter_test.clj
index 913484cd9388f5351d05ee08c929b6c70ed53c02..184ee0aba0e9887c14c228219060ea5603c35ab6 100644
--- a/test/metabase/query_processor_test/filter_test.clj
+++ b/test/metabase/query_processor_test/filter_test.clj
@@ -1,7 +1,9 @@
 (ns metabase.query-processor-test.filter-test
   "Tests for the `:filter` clause."
   (:require [metabase.query-processor-test :refer :all]
-            [metabase.test.data :as data]
+            [metabase.test
+             [data :as data]
+             [util :as tu]]
             [metabase.test.data.datasets :as datasets]))
 
 ;;; ------------------------------------------------ "FILTER" CLAUSE -------------------------------------------------
@@ -92,11 +94,14 @@
    :columns     ["count"]
    :cols        [(aggregate-col :count)]
    :native_form true}
-  (->> (data/run-mbql-query checkins
-         {:aggregation [[:count]]
-          :filter      [:between [:datetime-field $date :day] "2015-04-01" "2015-05-01"]})
-       booleanize-native-form
-       (format-rows-by [int])))
+  (do
+    ;; Prevent an issue with Snowflake were a previous connection's report-timezone setting can affect this test's results
+    (when (= :snowflake datasets/*engine*) (tu/clear-connection-pool (data/id)))
+    (->> (data/run-mbql-query checkins
+           {:aggregation [[:count]]
+            :filter      [:between [:datetime-field $date :day] "2015-04-01" "2015-05-01"]})
+         booleanize-native-form
+         (format-rows-by [int]))))
 
 ;;; FILTER -- "OR", "<=", "="
 (expect-with-non-timeseries-dbs
diff --git a/test/metabase/query_processor_test/time_field_test.clj b/test/metabase/query_processor_test/time_field_test.clj
index ef2c40ac2d06c82d968a11c8cf0596ee00758246..4d419fc3cc2f3394d1a0ad0e802cb80b744078e2 100644
--- a/test/metabase/query_processor_test/time_field_test.clj
+++ b/test/metabase/query_processor_test/time_field_test.clj
@@ -79,6 +79,12 @@
     (= :mysql *engine*)
     []
 
+    ;; It looks like Snowflake is doing this conversion correctly. Snowflake's time field is stored as wall clock time
+    ;; (vs. PG and others storing it without a timezone). Originally, this time is 16:15 in UTC, which is 8:15 in
+    ;; pacific time. The other report timezone databases are not doing this timezone conversion.
+    (= :snowflake *engine*)
+    [[3 "Kaneonuskatew Eiran" "08:15:00.000-08:00"]]
+
     ;; Databases like PostgreSQL ignore timezone information when
     ;; using a time field, the result below is what happens when the
     ;; 08:00 time is interpreted as UTC, then not adjusted to Pacific
diff --git a/test/metabase/query_processor_test/unix_timestamp_test.clj b/test/metabase/query_processor_test/unix_timestamp_test.clj
index bfc4a9f69ff3102d8047a6c6cb3f01359cc7575a..f09189292c396932cf5bc28fa7ae210a542eb706 100644
--- a/test/metabase/query_processor_test/unix_timestamp_test.clj
+++ b/test/metabase/query_processor_test/unix_timestamp_test.clj
@@ -1,6 +1,7 @@
 (ns metabase.query-processor-test.unix-timestamp-test
   "Tests for UNIX timestamp support."
   (:require [metabase.query-processor-test :refer :all]
+            [metabase.query-processor-test.date-bucketing-test :as dbt]
             [metabase.test
              [data :as data]
              [util :as tu]]
@@ -35,7 +36,7 @@
      ["2015-06-09"  7]
      ["2015-06-10"  9]]
 
-    (contains? #{:oracle :redshift} *engine*)
+    (dbt/tz-shifted-engine-bug? *engine*)
     [["2015-06-01T00:00:00.000-07:00" 6]
      ["2015-06-02T00:00:00.000-07:00" 10]
      ["2015-06-03T00:00:00.000-07:00" 4]
diff --git a/test/metabase/test/data/snowflake.clj b/test/metabase/test/data/snowflake.clj
new file mode 100644
index 0000000000000000000000000000000000000000..27444b1c0d5dd55cd9e46fc7b4daa8f3fa7cfc6b
--- /dev/null
+++ b/test/metabase/test/data/snowflake.clj
@@ -0,0 +1,117 @@
+(ns metabase.test.data.snowflake
+  (:require [clojure.java.jdbc :as jdbc]
+            [clojure.string :as str]
+            [metabase.driver.generic-sql :as sql]
+            [metabase.test.data
+             [generic-sql :as generic]
+             [interface :as i]]
+            [metabase.util :as u]
+            [honeysql.core :as hsql]
+            [honeysql.helpers :as h]
+            [metabase.util.honeysql-extensions :as hx]
+            [honeysql.format :as hformat])
+  (:import metabase.driver.snowflake.SnowflakeDriver))
+
+(def ^:private ^SnowflakeDriver snowflake-driver (SnowflakeDriver.))
+
+(def ^:private ^:const field-base-type->sql-type
+  {:type/BigInteger "BIGINT"
+   :type/Boolean    "BOOLEAN"
+   :type/Date       "DATE"
+   :type/DateTime   "TIMESTAMPLTZ"
+   :type/Decimal    "DECIMAL"
+   :type/Float      "FLOAT"
+   :type/Integer    "INTEGER"
+   :type/Text       "TEXT"
+   :type/Time       "TIME"})
+
+(defn- database->connection-details [context {:keys [database-name]}]
+  (merge
+   {:account   (i/db-test-env-var-or-throw :snowflake :account)
+    :user      (i/db-test-env-var-or-throw :snowflake :user)
+    :password  (i/db-test-env-var-or-throw :snowflake :password)
+    :warehouse (i/db-test-env-var-or-throw :snowflake :warehouse)
+    ;; SESSION parameters
+    :timezone "UTC"}
+   ;; Snowflake JDBC driver ignores this, but we do use it in the `query-db-name` function in
+   ;; `metabase.driver.snowflake`
+   (when (= context :db)
+     {:db database-name})))
+
+
+;; Snowflake requires you identify an object with db-name.schema-name.table-name
+(defn- qualified-name-components
+  ([_ db-name table-name]            [db-name "PUBLIC" table-name])
+  ([_ db-name]                       [db-name])
+  ([_ db-name table-name field-name] [db-name "PUBLIC" table-name field-name]))
+
+(defn- create-db-sql [driver {:keys [database-name]}]
+  (let [db (generic/qualify+quote-name driver database-name)]
+    (format "DROP DATABASE IF EXISTS %s; CREATE DATABASE %s;" db db)))
+
+(defn- expected-base-type->actual [base-type]
+  (if (isa? base-type :type/Integer)
+    :type/Number
+    base-type))
+
+(defn- drop-database [_]) ; no-op since we shouldn't be trying to drop any databases anyway
+
+(defn- no-db-connection-spec
+  "Connection spec for connecting to our Snowflake instance without specifying a DB."
+  []
+  (sql/connection-details->spec snowflake-driver (database->connection-details nil nil)))
+
+(defn- existing-dataset-names []
+  (let [db-spec (no-db-connection-spec)]
+    (jdbc/with-db-metadata [metadata db-spec]
+      ;; for whatever dumb reason the Snowflake JDBC driver always returns these as uppercase despite us making them
+      ;; all lower-case
+      (set (map str/lower-case (sql/get-catalogs metadata))))))
+
+(let [datasets (atom nil)]
+  (defn- existing-datasets []
+    (when-not (seq @datasets)
+      (reset! datasets (existing-dataset-names))
+      (println "These Snowflake datasets have already been loaded:\n" (u/pprint-to-str (sort @datasets))))
+    @datasets)
+
+  (defn- add-existing-dataset! [database-name]
+    (swap! datasets conj database-name)))
+
+(defn- create-db!
+  ([db-def]
+   (create-db! snowflake-driver db-def))
+  ([driver {:keys [database-name] :as db-def}]
+   ;; ok, now check if already created. If already created, no-op
+   (when-not (contains? (existing-datasets) database-name)
+     ;; if not created, create the DB...
+     (try
+       (generic/default-create-db! driver db-def)
+       ;; and add it to the set of DBs that have been created
+       (add-existing-dataset! database-name)
+       ;; if creating the DB failed, DROP it so we don't get stuck with a DB full of bad data and skip trying to
+       ;; load it next time around
+       (catch Throwable e
+         (let [drop-db-sql (format "DROP DATABASE \"%s\";" database-name)]
+           (println "Creating DB failed; executing" drop-db-sql)
+           (jdbc/execute! (no-db-connection-spec) [drop-db-sql]))
+         (throw e))))))
+
+(u/strict-extend SnowflakeDriver
+  generic/IGenericSQLTestExtensions
+  (merge generic/DefaultsMixin
+         {:field-base-type->sql-type (u/drop-first-arg field-base-type->sql-type)
+          :create-db-sql             create-db-sql
+          :execute-sql!              generic/sequentially-execute-sql!
+          :pk-sql-type               (constantly "INTEGER AUTOINCREMENT")
+          :qualified-name-components qualified-name-components
+          :load-data!                generic/load-data-add-ids!})
+
+  i/IDriverTestExtensions
+  (merge generic/IDriverTestExtensionsMixin
+         {:database->connection-details (u/drop-first-arg database->connection-details)
+          :default-schema               (constantly "PUBLIC")
+          :engine                       (constantly :snowflake)
+          :id-field-type                (constantly :type/Number)
+          :expected-base-type->actual   (u/drop-first-arg expected-base-type->actual)
+          :create-db!                   create-db!}))