From e4a14a5c00cba260c4a1ef69733b87130054dd9e Mon Sep 17 00:00:00 2001
From: Cam Saul <cammsaul@gmail.com>
Date: Wed, 6 Dec 2017 16:28:16 -0800
Subject: [PATCH] Code cleanup :shower:

---
 src/metabase/api/card.clj                     |  2 +-
 src/metabase/api/dashboard.clj                |  2 +-
 src/metabase/api/embed.clj                    |  4 +-
 src/metabase/driver.clj                       |  4 +-
 src/metabase/driver/generic_sql.clj           |  8 +-
 src/metabase/driver/mongo.clj                 |  2 +-
 src/metabase/driver/postgres.clj              |  9 +-
 src/metabase/query_processor/annotate.clj     | 96 ++++++++++--------
 src/metabase/query_processor/interface.clj    | 98 ++++++++++++-------
 .../query_processor/middleware/resolve.clj    | 90 +++++++++++------
 src/metabase/types.clj                        |  7 +-
 src/metabase/util.clj                         |  2 +-
 src/metabase/util/honeysql_extensions.clj     |  2 +-
 test/metabase/api/dataset_test.clj            |  2 +-
 test/metabase/api/metric_test.clj             |  4 +-
 test/metabase/api/pulse_test.clj              |  4 +-
 test/metabase/api/segment_test.clj            |  4 +-
 test/metabase/driver/postgres_test.clj        | 18 ++--
 .../sync/analyze/fingerprint_test.clj         |  6 +-
 test/metabase/test/data/bigquery.clj          |  2 +-
 test/metabase/test/util.clj                   |  2 +-
 21 files changed, 227 insertions(+), 141 deletions(-)

diff --git a/src/metabase/api/card.clj b/src/metabase/api/card.clj
index ee0c7c6081c..ca33a519bc4 100644
--- a/src/metabase/api/card.clj
+++ b/src/metabase/api/card.clj
@@ -327,7 +327,7 @@
   "You must be a superuser to change the value of `enable_embedding` or `embedding_params`. Embedding must be
   enabled."
   [card enable-embedding? embedding-params]
-  (when (or (and (not (nil? enable-embedding?))
+  (when (or (and (some? enable-embedding?)
                  (not= enable-embedding? (:enable_embedding card)))
             (and embedding-params
                  (not= embedding-params (:embedding_params card))))
diff --git a/src/metabase/api/dashboard.clj b/src/metabase/api/dashboard.clj
index c880c3a9cc8..5cfa46e7aca 100644
--- a/src/metabase/api/dashboard.clj
+++ b/src/metabase/api/dashboard.clj
@@ -194,7 +194,7 @@
    archived                (s/maybe s/Bool)}
   (let [dash (api/write-check Dashboard id)]
     ;; you must be a superuser to change the value of `enable_embedding` or `embedding_params`. Embedding must be enabled
-    (when (or (and (not (nil? enable_embedding))
+    (when (or (and (some? enable_embedding)
                    (not= enable_embedding (:enable_embedding dash)))
               (and embedding_params
                    (not= embedding_params (:embedding_params dash))))
diff --git a/src/metabase/api/embed.clj b/src/metabase/api/embed.clj
index a6a404574d0..f4267f3788a 100644
--- a/src/metabase/api/embed.clj
+++ b/src/metabase/api/embed.clj
@@ -79,7 +79,7 @@
 (defn- valid-param?
   "Is V a valid param value? (Is it non-`nil`, and, if a String, non-blank?)"
   [v]
-  (and (not (nil? v))
+  (and (some? v)
        (or (not (string? v))
            (not (str/blank? v)))))
 
@@ -151,7 +151,7 @@
   [parameters parameter-values]
   (for [param parameters
         :let  [value (get parameter-values (keyword (:slug param)))]
-        :when (not (nil? value))]
+        :when (some? value)]
     (assoc (select-keys param [:type :target])
       :value value)))
 
diff --git a/src/metabase/driver.clj b/src/metabase/driver.clj
index 99a42e01323..029c81dcb82 100644
--- a/src/metabase/driver.clj
+++ b/src/metabase/driver.clj
@@ -16,7 +16,9 @@
             [metabase.config :as config]
             [metabase.models
              [database :refer [Database]]
-             [setting :refer [defsetting]]]
+             field
+             [setting :refer [defsetting]]
+             table]
             [metabase.sync.interface :as si]
             [metabase.util :as u]
             [schema.core :as s]
diff --git a/src/metabase/driver/generic_sql.clj b/src/metabase/driver/generic_sql.clj
index c416e4ac9a5..de8004f7e42 100644
--- a/src/metabase/driver/generic_sql.clj
+++ b/src/metabase/driver/generic_sql.clj
@@ -362,12 +362,16 @@
                                      (assoc field :pk? true))))))))
 
 (defn describe-database
-  "Default implementation of `describe-database` for JDBC-based drivers."
+  "Default implementation of `describe-database` for JDBC-based drivers. Uses various `ISQLDriver` methods and JDBC
+   metadata."
   [driver database]
   (with-metadata [metadata driver database]
     {:tables (active-tables driver, ^DatabaseMetaData metadata)}))
 
-(defn- describe-table [driver database table]
+(defn describe-table
+  "Default implementation of `describe-table` for JDBC-based drivers. Uses various `ISQLDriver` methods and JDBC
+   metadata."
+  [driver database table]
   (with-metadata [metadata driver database]
     (->> (assoc (select-keys table [:name :schema]) :fields (describe-table-fields metadata driver table))
          ;; find PKs and mark them
diff --git a/src/metabase/driver/mongo.clj b/src/metabase/driver/mongo.clj
index 87bf18118f5..eccdcb45a06 100644
--- a/src/metabase/driver/mongo.clj
+++ b/src/metabase/driver/mongo.clj
@@ -124,7 +124,7 @@
              :base-type     (driver/class->base-type most-common-object-type)}
       (= :_id field-kw)           (assoc :pk? true)
       (:special-types field-info) (assoc :special-type (->> (vec (:special-types field-info))
-                                                            (filter #(not (nil? (first %))))
+                                                            (filter #(some? (first %)))
                                                             (sort-by second)
                                                             last
                                                             first))
diff --git a/src/metabase/driver/postgres.clj b/src/metabase/driver/postgres.clj
index 90b70212c53..6dca119b95d 100644
--- a/src/metabase/driver/postgres.clj
+++ b/src/metabase/driver/postgres.clj
@@ -1,4 +1,6 @@
 (ns metabase.driver.postgres
+  "Database driver for PostgreSQL databases. Builds on top of the 'Generic SQL' driver, which implements most
+  functionality for JDBC-based drivers."
   (:require [clojure
              [set :as set :refer [rename-keys]]
              [string :as s]]
@@ -13,7 +15,7 @@
              [ssh :as ssh]])
   (:import java.util.UUID))
 
-(def ^:private ^:const column->base-type
+(def ^:private column->base-type
   "Map of Postgres column types -> Field base types.
    Add more mappings here as you come across them."
   {:bigint        :type/BigInteger
@@ -171,7 +173,10 @@
     #".*" ; default
     message))
 
-(defn- prepare-value [{value :value, {:keys [base-type]} :field}]
+(defn- prepare-value
+  "Prepare a value for compilation to SQL. This should return an appropriate HoneySQL form. See description in
+  `ISQLDriver` protocol for details."
+  [{value :value, {:keys [base-type database-type]} :field}]
   (if-not value
     value
     (cond
diff --git a/src/metabase/query_processor/annotate.clj b/src/metabase/query_processor/annotate.clj
index 3138b09f88e..f84d03ccd3c 100644
--- a/src/metabase/query_processor/annotate.clj
+++ b/src/metabase/query_processor/annotate.clj
@@ -1,6 +1,9 @@
 (ns metabase.query-processor.annotate
-  "Code that analyzes the results of running a query and adds relevant type information about results (including foreign key information)."
-  ;; TODO - The code in this namespace could definitely use a little cleanup to make it a little easier to wrap one's head around :)
+  "Code that analyzes the results of running a query and adds relevant type information about results (including
+  foreign key information). This also does things like taking lisp-case keys used in the QP and converting them back
+  to snake_case ones used in the frontend."
+  ;; TODO - The code in this namespace could definitely use a little cleanup to make it a little easier to wrap one's
+  ;;        head around :)
   ;; TODO - This namespace should be called something like `metabase.query-processor.middleware.annotate`
   (:require [clojure
              [set :as set]
@@ -23,11 +26,15 @@
 ;;; ## Field Resolution
 
 (defn- valid-collected-field? [keep-date-time-fields? f]
-  (or (instance? metabase.query_processor.interface.Field f)
-      (instance? metabase.query_processor.interface.FieldLiteral f)
-      (instance? metabase.query_processor.interface.ExpressionRef f)
-      (when keep-date-time-fields?
-        (instance? metabase.query_processor.interface.DateTimeField f))))
+  (or
+   ;; is `f` an instance of `Field`, `FieldLiteral`, or `ExpressionRef`?
+   (some (u/rpartial instance? f)
+         [metabase.query_processor.interface.Field
+          metabase.query_processor.interface.FieldLiteral
+          metabase.query_processor.interface.ExpressionRef])
+   ;; or if we're keeping DateTimeFields, is is an instance of `DateTimeField`?
+   (when keep-date-time-fields?
+     (instance? metabase.query_processor.interface.DateTimeField f))))
 
 (defn collect-fields
   "Return a sequence of all the `Fields` inside THIS, recursing as needed for collections.
@@ -74,21 +81,25 @@
        :base-type          :type/Float
        :special-type       :type/Number)]
 
-    ;; for every value in a map in the query we'll descend into the map and find all the fields contained therein and mark the key as each field's source.
-    ;; e.g. if we descend into the `:breakout` columns for a query each field returned will get a `:source` of `:breakout`
-    ;; The source is important since it is used to determine sort order for for columns
+    ;; for every value in a map in the query we'll descend into the map and find all the fields contained therein and
+    ;; mark the key as each field's source. e.g. if we descend into the `:breakout` columns for a query each field
+    ;; returned will get a `:source` of `:breakout` The source is important since it is used to determine sort order
+    ;; for for columns
     clojure.lang.IPersistentMap
     (for [[k v] (seq this)
           field (collect-fields v keep-date-time-fields?)
           :when field]
       (if (= k :source-query)
         ;; For columns collected from a source query...
-        ;; 1) Make sure they didn't accidentally pick up an integer ID if the fields clause was added implicitly. If it does
-        ;;    the frontend won't know how to use the field since it won't match up with the same field in the "virtual" table metadata.
-        ;; 2) Keep the original `:source` rather than replacing it with `:source-query` since the frontend doesn't know what to do with that.
+        ;; 1) Make sure they didn't accidentally pick up an integer ID if the fields clause was added implicitly. If
+        ;;     it does the frontend won't know how to use the field since it won't match up with the same field in the
+        ;;     "virtual" table metadata.
+        ;; 2) Keep the original `:source` rather than replacing it with `:source-query` since the frontend doesn't
+        ;;    know what to do with that.
         (if (= (:unit field) :year)
-          ;; if the field is broken out by year we don't want to advertise it as type/DateTime because you can't do a datetime breakout on the years that come back
-          ;; (they come back as text). So instead just tell people it's a Text column
+          ;; if the field is broken out by year we don't want to advertise it as type/DateTime because you can't do a
+          ;; datetime breakout on the years that come back (they come back as text). So instead just tell people it's
+          ;; a Text column
           (assoc field
             :field-id [:field-literal (:field-name field) :type/Text]
             :base-type :type/Text
@@ -126,7 +137,8 @@
                                             (if (instance? Expression arg)
                                               (str "(" (aggregation-name arg) ")")
                                               (aggregation-name arg)))))
-    ;; for unnamed normal aggregations, the column alias is always the same as the ag type except for `:distinct` with is called `:count` (WHY?)
+    ;; for unnamed normal aggregations, the column alias is always the same as the ag type except for `:distinct` with
+    ;; is called `:count` (WHY?)
     aggregation-type          (if (= (keyword aggregation-type) :distinct)
                                 "count"
                                 (name aggregation-type))))
@@ -148,13 +160,15 @@
             :field-display-name field-name
             :base-type          (:base-type ag-field)
             :special-type       (:special-type ag-field)})
-         ;; Always treat count or distinct count as an integer even if the DB in question returns it as something wacky like a BigDecimal or Float
+         ;; Always treat count or distinct count as an integer even if the DB in question returns it as something
+         ;; wacky like a BigDecimal or Float
          (when (contains? #{:count :distinct} ag-type)
            {:base-type    :type/Integer
             :special-type :type/Number})
-         ;; For the time being every Expression is an arithmetic operator and returns a floating-point number, so hardcoding these types is fine;
-         ;; In the future when we extend Expressions to handle more functionality we'll want to introduce logic that associates a return type with a given expression.
-         ;; But this will work for the purposes of a patch release.
+         ;; For the time being every Expression is an arithmetic operator and returns a floating-point number, so
+         ;; hardcoding these types is fine; In the future when we extend Expressions to handle more functionality
+         ;; we'll want to introduce logic that associates a return type with a given expression. But this will work
+         ;; for the purposes of a patch release.
          (when (instance? ExpressionRef ag-field)
            {:base-type    :type/Float
             :special-type :type/Number})))
@@ -163,7 +177,8 @@
   "Does QUERY have an aggregation?"
   [{aggregations :aggregation}]
   (or (empty? aggregations)
-      ;; TODO - Not sure this needs to be checked anymore since `:rows` is a legacy way to specifiy "no aggregations" and should be stripped out during preprocessing
+      ;; TODO - Not sure this needs to be checked anymore since `:rows` is a legacy way to specifiy "no aggregations"
+      ;; and should be stripped out during preprocessing
       (= (:aggregation-type (first aggregations)) :rows)))
 
 (defn- add-aggregate-fields-if-needed
@@ -188,8 +203,8 @@
    :field-name         k
    :field-display-name (humanization/name->human-readable-name (name k))})
 
-;; TODO - I'm not 100% sure the code reaches this point any more since the `collect-fields` logic now handles nested queries
-;; maybe this is used for queries where the source query is native?
+;; TODO - I'm not 100% sure the code reaches this point any more since the `collect-fields` logic now handles nested
+;; queries maybe this is used for queries where the source query is native?
 (defn- info-for-column-from-source-query
   "Return information about a column that comes back when we're using a source query.
    (This is basically the same as the generic information, but we also add `:id` and `:source`
@@ -202,9 +217,10 @@
 
 
 (defn- info-for-duplicate-field
-  "The Clojure JDBC driver automatically appends suffixes like `count_2` to duplicate columns if multiple columns come back with the same name;
-   since at this time we can't resolve those normally (#1786) fall back to using the metadata for the first column (e.g., `count`).
-   This is definitely a HACK, but in most cases this should be correct (or at least better than the generic info) for the important things like type information."
+  "The Clojure JDBC driver automatically appends suffixes like `count_2` to duplicate columns if multiple columns come
+  back with the same name; since at this time we can't resolve those normally (#1786) fall back to using the metadata
+  for the first column (e.g., `count`). This is definitely a HACK, but in most cases this should be correct (or at
+  least better than the generic info) for the important things like type information."
   [fields k]
   (when-let [[_ field-name-without-suffix] (re-matches #"^(.*)_\d+$" (name k))]
     (some (fn [{field-name :field-name, :as field}]
@@ -230,22 +246,19 @@
                         (assert (every? keyword? <>)))
         missing-keys  (set/difference (set actual-keys) expected-keys)]
     (when (seq missing-keys)
-      (log/warn (u/format-color 'yellow "There are fields we (maybe) weren't expecting in the results: %s\nExpected: %s\nActual: %s"
+      (log/warn (u/format-color 'yellow (str "There are fields we (maybe) weren't expecting in the results: %s\n"
+                                             "Expected: %s\nActual: %s")
                   missing-keys expected-keys (set actual-keys))))
     (concat fields (for [k     actual-keys
                          :when (contains? missing-keys k)]
                      (info-for-missing-key inner-query fields k (map k initial-rows))))))
 
 (defn- fixup-renamed-fields
-  "After executing the query, it's possible that java.jdbc changed the
-  name of the column that was originally in the query. This can happen
-  when java.jdbc finds two columns with the same name, it will append
-  an integer (like _2) on the end. When this is done on an existing
-  column in the query, this function fixes that up, updating the
-  column information we have with the new name that java.jdbc assigned
-  the column. The `add-unknown-fields-if-needed` function above is
-  similar, but is used when we don't have existing information on that
-  column and need to infer it."
+  "After executing the query, it's possible that java.jdbc changed the name of the column that was originally in the
+  query. This can happen when java.jdbc finds two columns with the same name, it will append an integer (like _2) on
+  the end. When this is done on an existing column in the query, this function fixes that up, updating the column
+  information we have with the new name that java.jdbc assigned the column. The `add-unknown-fields-if-needed`
+  function above is similar, but is used when we don't have existing information on that column and need to infer it."
   [query actual-keys]
   (let [expected-field-names (set (map (comp keyword name) (:fields query)))]
     (if (= expected-field-names (set actual-keys))
@@ -290,7 +303,8 @@
                                                    (integer? id))]
                                    id))
        (constantly nil)))
-  ;; Fetch the foreign key fields whose origin is in the returned Fields, create a map of origin-field-id->destination-field-id
+  ;; Fetch the foreign key fields whose origin is in the returned Fields, create a map of
+  ;; origin-field-id->destination-field-id
   ([fields fk-ids]
    (when (seq fk-ids)
      (fk-field->dest-fn fields fk-ids (db/select-id->field :fk_target_field_id Field
@@ -320,8 +334,8 @@
                         {}))))))
 
 (defn- resolve-sort-and-format-columns
-  "Collect the Fields referenced in INNER-QUERY, sort them according to the rules at the top
-   of this page, format them as expected by the frontend, and return the results."
+  "Collect the Fields referenced in INNER-QUERY, sort them according to the rules at the top of this page, format them
+  as expected by the frontend, and return the results."
   [inner-query result-keys initial-rows]
   {:pre [(sequential? result-keys)]}
   (when (seq result-keys)
@@ -352,8 +366,8 @@
   "Post-process a structured query to add metadata to the results. This stage:
 
   1.  Sorts the results according to the rules at the top of this page
-  2.  Resolves the Fields returned in the results and adds information like `:columns` and `:cols`
-      expected by the frontend."
+  2.  Resolves the Fields returned in the results and adds information like `:columns` and `:cols` expected by the
+      frontend."
   [query {:keys [columns rows], :as results}]
   (let [row-maps (for [row rows]
                    (zipmap columns row))
diff --git a/src/metabase/query_processor/interface.clj b/src/metabase/query_processor/interface.clj
index 524b0ff2942..45a7973f40f 100644
--- a/src/metabase/query_processor/interface.clj
+++ b/src/metabase/query_processor/interface.clj
@@ -1,7 +1,7 @@
 (ns metabase.query-processor.interface
   "Definitions of `Field`, `Value`, and other record types present in an expanded query.
-   This namespace should just contain definitions of various protocols and record types; associated logic
-   should go in `metabase.query-processor.middleware.expand`."
+   This namespace should just contain definitions of various protocols and record types; associated logic should go in
+  `metabase.query-processor.middleware.expand`."
   (:require [metabase.models
              [dimension :as dim]
              [field :as field]]
@@ -12,7 +12,7 @@
   (:import clojure.lang.Keyword
            java.sql.Timestamp))
 
-;;; # ------------------------------------------------------------ CONSTANTS ------------------------------------------------------------
+;;; --------------------------------------------------- CONSTANTS ----------------------------------------------------
 
 (def ^:const absolute-max-results
   "Maximum number of rows the QP should ever return.
@@ -22,11 +22,11 @@
   1048576)
 
 
-;;; # ------------------------------------------------------------ DYNAMIC VARS ------------------------------------------------------------
+;;; -------------------------------------------------- DYNAMIC VARS --------------------------------------------------
 
 (def ^:dynamic ^Boolean *disable-qp-logging*
-  "Should we disable logging for the QP? (e.g., during sync we probably want to turn it off to keep logs less cluttered)."
-  false)
+  "Should we disable logging for the QP? (e.g., during sync we probably want to turn it off to keep logs less
+  cluttered)." false)
 
 
 (def ^:dynamic *driver*
@@ -59,7 +59,7 @@
 ;; 2. Relevant Fields and Tables are fetched from the DB, and the placeholder objects are "resolved"
 ;;    and replaced with objects like Field, Value, etc.
 
-;;; # ------------------------------------------------------------ JOINING OBJECTS ------------------------------------------------------------
+;;; ------------------------------------------------ JOINING OBJECTS -------------------------------------------------
 
 ;; These are just used by the QueryExpander to record information about how joins should occur.
 
@@ -73,7 +73,7 @@
                         schema       :- (s/maybe su/NonBlankString)
                         join-alias   :- su/NonBlankString])
 
-;;; # ------------------------------------------------------------ PROTOCOLS ------------------------------------------------------------
+;;; --------------------------------------------------- PROTOCOLS ----------------------------------------------------
 
 (defprotocol IField
   "Methods specific to the Query Expander `Field` record type."
@@ -83,9 +83,10 @@
      `nil` as the first part.)"))
 ;; TODO - Yes, I know, that makes no sense. `annotate/qualify-field-name` expects it that way tho
 
-;;; +----------------------------------------------------------------------------------------------------------------------------------------------------------------+
-;;; |                                                                             FIELDS                                                                             |
-;;; +----------------------------------------------------------------------------------------------------------------------------------------------------------------+
+
+;;; +----------------------------------------------------------------------------------------------------------------+
+;;; |                                                     FIELDS                                                     |
+;;; +----------------------------------------------------------------------------------------------------------------+
 
 
 (s/defrecord FieldValues [field-value-id          :- su/IntGreaterThanZero
@@ -145,8 +146,13 @@
   "Valid units for a `RelativeDateTimeValue`."
   #{:minute :hour :day :week :month :quarter :year})
 
-(def DatetimeFieldUnit "Schema for datetime units that are valid for `DateTimeField` forms." (s/named (apply s/enum datetime-field-units)          "Valid datetime unit for a field"))
-(def DatetimeValueUnit "Schema for datetime units that valid for relative datetime values."  (s/named (apply s/enum relative-datetime-value-units) "Valid datetime unit for a relative datetime"))
+(def DatetimeFieldUnit
+  "Schema for datetime units that are valid for `DateTimeField` forms."
+  (s/named (apply s/enum datetime-field-units) "Valid datetime unit for a field"))
+
+(def DatetimeValueUnit
+  "Schema for datetime units that valid for relative datetime values."
+  (s/named (apply s/enum relative-datetime-value-units) "Valid datetime unit for a relative datetime"))
 
 (defn datetime-field-unit?
   "Is UNIT a valid datetime unit for a `DateTimeField` form?"
@@ -158,8 +164,9 @@
   [unit]
   (contains? relative-datetime-value-units (keyword unit)))
 
-;; TODO - maybe we should figure out some way to have the schema validate that the driver supports field literals, like we do for some of the other clauses.
-;; Ideally we'd do that in a more generic way (perhaps in expand, we could make the clauses specify required feature metadata and have that get checked automatically?)
+;; TODO - maybe we should figure out some way to have the schema validate that the driver supports field literals,
+;; like we do for some of the other clauses. Ideally we'd do that in a more generic way (perhaps in expand, we could
+;; make the clauses specify required feature metadata and have that get checked automatically?)
 (s/defrecord FieldLiteral [field-name    :- su/NonBlankString
                            base-type     :- su/FieldType]
   clojure.lang.Named
@@ -194,13 +201,21 @@
     [nil expression-name]))
 
 
-;;; Placeholder Types
+;;; Placeholder Types. See explaination above RE what these mean
+
+(def FKFieldID
+  "Schema for an ID for a foreign key Field. If `*driver*` is bound this will throw an Exception if this is non-nil
+  and the driver does not support foreign keys."
+  (s/constrained
+   su/IntGreaterThanZero
+   (fn [_] (or (assert-driver-supports :foreign-keys) true))
+   "foreign-keys is not supported by this driver."))
 
 ;; Replace Field IDs with these during first pass
+;; fk-field-id = the ID of the Field we point to (if any). For example if we are 'bird_id` then that is the ID of
+;; bird.id
 (s/defrecord FieldPlaceholder [field-id            :- su/IntGreaterThanZero
-                               fk-field-id         :- (s/maybe (s/constrained su/IntGreaterThanZero
-                                                                              (fn [_] (or (assert-driver-supports :foreign-keys) true)) ; assert-driver-supports will throw Exception if driver is bound
-                                                                              "foreign-keys is not supported by this driver."))         ; and driver does not support foreign keys
+                               fk-field-id         :- (s/maybe FKFieldID)
                                datetime-unit       :- (s/maybe DatetimeFieldUnit)
                                remapped-from       :- (s/maybe s/Str)
                                remapped-to         :- (s/maybe s/Str)
@@ -237,9 +252,9 @@
            "AnyField: field, ag field reference, expression, expression reference, or field literal."))
 
 
-;;; +----------------------------------------------------------------------------------------------------------------------------------------------------------------+
-;;; |                                                                             VALUES                                                                             |
-;;; +----------------------------------------------------------------------------------------------------------------------------------------------------------------+
+;;; +----------------------------------------------------------------------------------------------------------------+
+;;; |                                                     VALUES                                                     |
+;;; +----------------------------------------------------------------------------------------------------------------+
 
 (def LiteralDatetimeString
   "Schema for an MBQL datetime string literal, in ISO-8601 format."
@@ -247,19 +262,23 @@
 
 (def LiteralDatetime
   "Schema for an MBQL literal datetime value: and ISO-8601 string or `java.sql.Date`."
-  (s/named (s/cond-pre java.sql.Date LiteralDatetimeString) "Valid datetime literal (must be ISO-8601 string or java.sql.Date)"))
+  (s/named (s/cond-pre java.sql.Date LiteralDatetimeString)
+           "Valid datetime literal (must be ISO-8601 string or java.sql.Date)"))
 
 (def Datetime
   "Schema for an MBQL datetime value: an ISO-8601 string, `java.sql.Date`, or a relative dateitme form."
-  (s/named (s/cond-pre RelativeDatetime LiteralDatetime) "Valid datetime (must ISO-8601 string literal or a relative-datetime form)"))
+  (s/named (s/cond-pre RelativeDatetime LiteralDatetime)
+           "Valid datetime (must ISO-8601 string literal or a relative-datetime form)"))
 
 (def OrderableValueLiteral
   "Schema for something that is orderable value in MBQL (either a number or datetime)."
   (s/named (s/cond-pre s/Num Datetime) "Valid orderable value (must be number or datetime)"))
 
 (def AnyValueLiteral
-  "Schema for anything that is a considered a valid value literal in MBQL - `nil`, a `Boolean`, `Number`, `String`, or relative datetime form."
-  (s/named (s/maybe (s/cond-pre s/Bool su/NonBlankString OrderableValueLiteral)) "Valid value (must be nil, boolean, number, string, or a relative-datetime form)"))
+  "Schema for anything that is a considered a valid value literal in MBQL - `nil`, a `Boolean`, `Number`, `String`, or
+  relative datetime form."
+  (s/named (s/maybe (s/cond-pre s/Bool su/NonBlankString OrderableValueLiteral))
+           "Valid value (must be nil, boolean, number, string, or a relative-datetime form)"))
 
 
 ;; Value is the expansion of a value within a QL clause
@@ -278,7 +297,8 @@
                             field :- DateTimeField])
 
 (def OrderableValue
-  "Schema for an instance of `Value` whose `:value` property is itself orderable (a datetime or number, i.e. a `OrderableValueLiteral`)."
+  "Schema for an instance of `Value` whose `:value` property is itself orderable (a datetime or number, i.e. a
+  `OrderableValueLiteral`)."
   (s/named (s/cond-pre
             DateTimeValue
             RelativeDateTimeValue
@@ -287,7 +307,8 @@
            "Value that is orderable (Value whose :value is something orderable, like a datetime or number)"))
 
 (def StringValue
-  "Schema for an instance of `Value` whose `:value` property is itself a string (a datetime or string, i.e. a `OrderableValueLiteral`)."
+  "Schema for an instance of `Value` whose `:value` property is itself a string (a datetime or string, i.e. a
+  `OrderableValueLiteral`)."
   (s/named (s/constrained Value (comp string? :value))
            "Value that is a string (Value whose :value is a string)"))
 
@@ -317,7 +338,10 @@
 
 (def OrderableValuePlaceholder
   "`ValuePlaceholder` schema with the additional constraint that the value be orderable (a number or datetime)."
-  (s/constrained ValuePlaceholder (comp (complement (s/checker OrderableValueLiteral)) :value) ":value must be orderable (number or datetime)"))
+  (s/constrained
+   ValuePlaceholder
+   (comp (complement (s/checker OrderableValueLiteral)) :value)
+   ":value must be orderable (number or datetime)"))
 
 (def OrderableValueOrPlaceholder
   "Schema for an `OrderableValue` (instance of `Value` whose `:value` is orderable) or a placeholder for one."
@@ -342,9 +366,9 @@
   (s/named (s/cond-pre AnyField AnyValue) "Field or value"))
 
 
-;;; +----------------------------------------------------------------------------------------------------------------------------------------------------------------+
-;;; |                                                                             CLAUSES                                                                            |
-;;; +----------------------------------------------------------------------------------------------------------------------------------------------------------------+
+;;; +----------------------------------------------------------------------------------------------------------------+
+;;; |                                                    CLAUSES                                                     |
+;;; +----------------------------------------------------------------------------------------------------------------+
 
 ;;; aggregation
 
@@ -352,7 +376,8 @@
                                                                    "Valid aggregation type")
                                       custom-name      :- (s/maybe su/NonBlankString)])
 
-(s/defrecord AggregationWithField [aggregation-type :- (s/named (s/enum :avg :count :cumulative-sum :distinct :max :min :stddev :sum)
+(s/defrecord AggregationWithField [aggregation-type :- (s/named (s/enum :avg :count :cumulative-sum :distinct :max
+                                                                        :min :stddev :sum)
                                                                 "Valid aggregation type")
                                    field            :- (s/cond-pre AnyField
                                                                    Expression)
@@ -442,9 +467,10 @@
      (s/optional-key :template_tags) s/Any}
     (s/recursive #'Query)))
 
-;;; +----------------------------------------------------------------------------------------------------------------------------------------------------------------+
-;;; |                                                                             QUERY                                                                              |
-;;; +----------------------------------------------------------------------------------------------------------------------------------------------------------------+
+
+;;; +----------------------------------------------------------------------------------------------------------------+
+;;; |                                                     QUERY                                                      |
+;;; +----------------------------------------------------------------------------------------------------------------+
 
 (def Query
   "Schema for an MBQL query."
diff --git a/src/metabase/query_processor/middleware/resolve.clj b/src/metabase/query_processor/middleware/resolve.clj
index 2e05b72a00a..e63f26f3e3a 100644
--- a/src/metabase/query_processor/middleware/resolve.clj
+++ b/src/metabase/query_processor/middleware/resolve.clj
@@ -1,5 +1,8 @@
 (ns metabase.query-processor.middleware.resolve
-  "Resolve references to `Fields`, `Tables`, and `Databases` in an expanded query dictionary."
+  "Resolve references to `Fields`, `Tables`, and `Databases` in an expanded query dictionary. During the `expand`
+  phase of the Query Processor, forms like `[:field-id 10]` are replaced with placeholder objects of types like
+  `FieldPlaceholder` or similar. During this phase, we'll take those placeholder objects and fetch information from
+  the DB and replace them with actual objects like `Field`."
   (:refer-clojure :exclude [resolve])
   (:require [clj-time.coerce :as tcoerce]
             [clojure
@@ -53,11 +56,11 @@
 (defn- rename-field-value-keys
   [field-values]
   (set/rename-keys (into {} field-values)
-                   {:id                      :field-value-id
-                    :field_id                :field-id
-                    :human_readable_values   :human-readable-values
-                    :updated_at              :updated-at
-                    :created_at              :created-at}))
+                   {:id                    :field-value-id
+                    :field_id              :field-id
+                    :human_readable_values :human-readable-values
+                    :updated_at            :updated-at
+                    :created_at            :created-at}))
 
 (defn convert-db-field
   "Converts a field map from that database to a Field instance"
@@ -71,7 +74,7 @@
                           vals)))
       (update :dimensions (fn [dims]
                             (if (seq dims)
-                              (-> dims rename-dimension-keys i/map->Dimensions )
+                              (-> dims rename-dimension-keys i/map->Dimensions)
                               dims)))))
 
 ;;; ----------------------------------------------- IRESOLVE PROTOCOL ------------------------------------------------
@@ -104,12 +107,17 @@
 
 ;;; ----------------------------------------------------- FIELD ------------------------------------------------------
 
-(defn- field-unresolved-field-id [{:keys [parent parent-id]}]
+(defn- field-unresolved-field-id
+  "Return the ID of a unresolved Fields belonging to a Field. (This means we'll just return the ID of the parent if
+  it's not yet resolved.)"
+  [{:keys [parent parent-id]}]
   (or (unresolved-field-id parent)
       (when (instance? FieldPlaceholder parent)
         parent-id)))
 
-(defn- field-resolve-field [{:keys [parent parent-id], :as this} field-id->field]
+(defn- field-resolve-field
+  "Attempt to resolve the `:parent` of a `Field`, if there is one, if it's a `FieldPlaceholder`."
+  [{:keys [parent parent-id], :as this} field-id->field]
   (cond
     parent    (or (when (instance? FieldPlaceholder parent)
                     (when-let [resolved (resolve-field parent field-id->field)]
@@ -119,7 +127,11 @@
                                       (i/map->FieldPlaceholder {:field-id parent-id})))
     :else     this))
 
-(defn- field-resolve-table [{:keys [table-id fk-field-id field-id], :as this} fk-id+table-id->table]
+(defn- field-resolve-table
+  "Resolve the `Table` belonging to a resolved `Field`. (This is only used for resolving Tables referred to via
+  foreign keys.)"
+  [{:keys [table-id fk-field-id field-id], :as this} fk-id+table-id->table]
+  ;; TODO - use schema for this instead of preconditions :D
   {:pre [(map? fk-id+table-id->table) (every? vector? (keys fk-id+table-id->table))]}
   (let [table (or (fk-id+table-id->table [fk-field-id table-id])
                   ;; if we didn't find a matching table check and see whether we're trying to use a field from another
@@ -169,7 +181,9 @@
   [& maps]
   (apply merge-with #(or %2 %1) maps))
 
-(defn- field-ph-resolve-field [{:keys [field-id datetime-unit binning-strategy binning-param], :as this} field-id->field]
+(defn- field-ph-resolve-field
+  "Attempt to resolve the `Field` for a `FieldPlaceholder`. Return a resolved `Field` or `DateTimeField`."
+  [{:keys [field-id datetime-unit binning-strategy binning-param], :as this} field-id->field]
   (if-let [{:keys [base-type special-type], :as field} (some-> (field-id->field field-id)
                                                                convert-db-field
                                                                (merge-non-nils (select-keys this [:fk-field-id :remapped-from :remapped-to :field-display-name])))]
@@ -223,7 +237,8 @@
 
         (instance? RelativeDatetime value)
         (do (s/validate RelativeDatetime value)
-            (s/validate RelativeDateTimeValue (i/map->RelativeDateTimeValue {:field this, :amount (:amount value), :unit (:unit value)})))
+            (s/validate RelativeDateTimeValue (i/map->RelativeDateTimeValue
+                                               {:field this, :amount (:amount value), :unit (:unit value)})))
 
         (nil? value)
         nil
@@ -231,7 +246,9 @@
         :else
         (throw (Exception. (format "Invalid value '%s': expected a DateTime." value)))))))
 
-(defn- value-ph-resolve-field [{:keys [field-placeholder value]} field-id->field]
+(defn- value-ph-resolve-field
+  "Attempt to resolve the `Field` for a `ValuePlaceholder`. Return a resolved `Value` or `DateTimeValue`."
+  [{:keys [field-placeholder value]} field-id->field]
   (let [resolved-field (resolve-field field-placeholder field-id->field)]
     (when-not resolved-field
       (throw (Exception. (format "Unable to resolve field: %s" field-placeholder))))
@@ -261,9 +278,31 @@
   [expanded-query-dict]
   (assoc expanded-query-dict :fk-field-ids (collect-fk-field-ids expanded-query-dict)))
 
+(defn- add-parent-placeholder-if-needed
+  "If `field` has a `:parent-id` add a `FieldPlaceholder` for its parent."
+  [field]
+  (assoc field :parent (when-let [parent-id (:parent-id field)]
+                         (i/map->FieldPlaceholder {:field-id parent-id}))))
+
+(defn- fetch-fields
+  "Fetch a sequence of Fields with appropriate information from the database for the IDs in `field-ids`, excluding
+  'sensitive' Fields. Then hydrate information that will be used by the QP and transform the keys so they're
+  Clojure-style as expected by the rest of the QP code."
+  [field-ids]
+  (as-> (db/select [field/Field :name :display_name :base_type :special_type :visibility_type :table_id :parent_id
+                    :description :id :fingerprint]
+          :visibility_type [:not= "sensitive"]
+          :id              [:in field-ids]) fields
+    ;; hydrate values & dimensions for the `fields` we just fetched from the DB
+    (hydrate fields :values :dimensions)
+    ;; now for each Field call `rename-mb-field-keys` on it to take underscored DB-style names and replace them with
+    ;; hyphenated Clojure-style names. (I know, this is a questionable step!)
+    (map rename-mb-field-keys fields)
+    ;; now add a FieldPlaceholder for its parent if the Field has a parent-id so it can get resolved on the next pass
+    (map add-parent-placeholder-if-needed fields)))
+
 (defn- resolve-fields
-  "Resolve the `Fields` in an EXPANDED-QUERY-DICT.
-   Record `:table-ids` referenced in the Query."
+  "Resolve the `Fields` in an EXPANDED-QUERY-DICT. Record `:table-ids` referenced in the Query."
   [expanded-query-dict]
   (loop [max-iterations 5, expanded-query-dict expanded-query-dict]
     (when (neg? max-iterations)
@@ -273,22 +312,13 @@
         ;; If there are no more Field IDs to resolve we're done.
         expanded-query-dict
         ;; Otherwise fetch + resolve the Fields in question
-        (let [fields (->> (u/key-by :id (-> (db/select [field/Field :name :display_name :base_type :special_type
-                                                        :visibility_type :table_id :parent_id :description :id
-                                                        :fingerprint]
-                                              :visibility_type [:not= "sensitive"]
-                                              :id              [:in field-ids])
-                                            (hydrate :values)
-                                            (hydrate :dimensions)))
-                          (m/map-vals rename-mb-field-keys)
-                          (m/map-vals #(assoc % :parent (when-let [parent-id (:parent-id %)]
-                                                          (i/map->FieldPlaceholder {:field-id parent-id})))))]
+        (let [fields (fetch-fields field-ids)]
           (->>
            ;; Now record the IDs of Tables these fields references in the :table-ids property of the expanded query
            ;; dict. Those will be used for Table resolution in the next step.
-           (update expanded-query-dict :table-ids set/union (set (map :table-id (vals fields))))
+           (update expanded-query-dict :table-ids set/union (set (map :table-id fields)))
            ;; Walk the query and resolve all fields
-           (walk/postwalk (u/rpartial resolve-field fields))
+           (walk/postwalk (u/rpartial resolve-field (u/key-by :field-id fields)))
            ;; Recurse in case any new (nested) unresolved fields were found.
            (recur (dec max-iterations))))))))
 
@@ -317,7 +347,8 @@
   "Fetch info for PK/FK `Fields` for the JOIN-TABLES referenced in a Query."
   [source-table-id fk-field-ids]
   (when (seq fk-field-ids)
-    (vec (for [{:keys [source-field-name source-field-id target-field-id target-field-name target-table-id target-table-name target-table-schema]} (fk-field-ids->info source-table-id fk-field-ids)]
+    (vec (for [{:keys [source-field-name source-field-id target-field-id target-field-name target-table-id
+                       target-table-name target-table-schema]} (fk-field-ids->info source-table-id fk-field-ids)]
            (i/map->JoinTable {:table-id     target-table-id
                               :table-name   target-table-name
                               :schema       target-table-schema
@@ -337,7 +368,8 @@
     (update-in expanded-query-dict [:query :source-query] (fn [source-query]
                                                             (if (:native source-query)
                                                               source-query
-                                                              (:query (resolve-tables (assoc expanded-query-dict :query source-query))))))
+                                                              (:query (resolve-tables (assoc expanded-query-dict
+                                                                                        :query source-query))))))
     ;; otherwise we can resolve tables in the (current) top-level
     (let [table-ids             (conj table-ids source-table-id)
           joined-tables         (fk-field-ids->joined-tables source-table-id fk-field-ids)
diff --git a/src/metabase/types.clj b/src/metabase/types.clj
index 815d465cae6..4ad5b2f8d37 100644
--- a/src/metabase/types.clj
+++ b/src/metabase/types.clj
@@ -1,4 +1,9 @@
-(ns metabase.types)
+(ns metabase.types
+  "The Metabase Hierarchical Type System (MHTS). This is a hierarchy where types derive from one or more parent types,
+   which in turn derive from their own parents. This makes it possible to add new types without needing to add
+   corresponding mappings in the frontend or other places. For example, a Database may want a type called something
+   like `:type/CaseInsensitiveText`; we can add this type as a derivative of `:type/Text` and everywhere else can
+   continue to treat it as such until further notice.")
 
 (derive :type/Collection :type/*)
 
diff --git a/src/metabase/util.clj b/src/metabase/util.clj
index 6c19c683e29..b1511012bdb 100644
--- a/src/metabase/util.clj
+++ b/src/metabase/util.clj
@@ -837,7 +837,7 @@
      ;; -> {:a 100}"
   [m ks]
   (into {} (for [k     ks
-                 :when (not (nil? (get m k)))]
+                 :when (some? (get m k))]
              {k (get m k)})))
 
 (defn select-keys-when
diff --git a/src/metabase/util/honeysql_extensions.clj b/src/metabase/util/honeysql_extensions.clj
index 2b08b2a006c..6b7dee71846 100644
--- a/src/metabase/util/honeysql_extensions.clj
+++ b/src/metabase/util/honeysql_extensions.clj
@@ -107,7 +107,7 @@
 
 
 (defn cast
-  "Generate a statement like `cast(x AS c)`/"
+  "Generate a statement like `cast(x AS c)`."
   [c x]
   (hsql/call :cast x (hsql/raw (name c))))
 
diff --git a/test/metabase/api/dataset_test.clj b/test/metabase/api/dataset_test.clj
index f30842851a5..3b7f96056f9 100644
--- a/test/metabase/api/dataset_test.clj
+++ b/test/metabase/api/dataset_test.clj
@@ -35,7 +35,7 @@
                              (.endsWith (name k) "_id"))
                  (if (or (= :created_at k)
                          (= :updated_at k))
-                   [k (not (nil? v))]
+                   [k (some? v)]
                    [k (f v)]))))))
 
 (defn format-response [m]
diff --git a/test/metabase/api/metric_test.clj b/test/metabase/api/metric_test.clj
index 073645d4fb7..bf2fa44cd1b 100644
--- a/test/metabase/api/metric_test.clj
+++ b/test/metabase/api/metric_test.clj
@@ -45,8 +45,8 @@
   (-> (into {} metric)
       (dissoc :id :table_id)
       (update :creator #(into {} %))
-      (assoc :created_at (not (nil? created_at))
-             :updated_at (not (nil? updated_at)))))
+      (assoc :created_at (some? created_at)
+             :updated_at (some? updated_at))))
 
 
 ;; ## /api/metric/* AUTHENTICATION Tests
diff --git a/test/metabase/api/pulse_test.clj b/test/metabase/api/pulse_test.clj
index f1c7ab3445e..f410e46ef83 100644
--- a/test/metabase/api/pulse_test.clj
+++ b/test/metabase/api/pulse_test.clj
@@ -52,8 +52,8 @@
 (defn- pulse-response [{:keys [created_at updated_at], :as pulse}]
   (-> pulse
       (dissoc :id)
-      (assoc :created_at (not (nil? created_at))
-             :updated_at (not (nil? updated_at)))))
+      (assoc :created_at (some? created_at)
+             :updated_at (some? updated_at))))
 
 
 ;; ## /api/pulse/* AUTHENTICATION Tests
diff --git a/test/metabase/api/segment_test.clj b/test/metabase/api/segment_test.clj
index a783d9b4318..02aa9c063fc 100644
--- a/test/metabase/api/segment_test.clj
+++ b/test/metabase/api/segment_test.clj
@@ -35,8 +35,8 @@
   (-> (into {} segment)
       (dissoc :id :table_id)
       (update :creator #(into {} %))
-      (assoc :created_at (not (nil? created_at))
-             :updated_at (not (nil? updated_at)))))
+      (assoc :created_at (some? created_at)
+             :updated_at (some? updated_at))))
 
 
 ;; ## /api/segment/* AUTHENTICATION Tests
diff --git a/test/metabase/driver/postgres_test.clj b/test/metabase/driver/postgres_test.clj
index 6f8b194794c..f4900d14a52 100644
--- a/test/metabase/driver/postgres_test.clj
+++ b/test/metabase/driver/postgres_test.clj
@@ -1,4 +1,5 @@
 (ns metabase.driver.postgres-test
+  "Tests for features/capabilities specific to PostgreSQL driver, such as support for Postgres UUID or enum types."
   (:require [clojure.java.jdbc :as jdbc]
             [expectations :refer :all]
             [honeysql.core :as hsql]
@@ -27,8 +28,8 @@
 
 (def ^:private ^PostgresDriver pg-driver (PostgresDriver.))
 
-;; # Check that SSL params get added the connection details in the way we'd like
-;; ## no SSL -- this should *not* include the key :ssl (regardless of its value) since that will cause the PG driver to use SSL anyway
+;; Check that SSL params get added the connection details in the way we'd like # no SSL -- this should *not* include
+;; the key :ssl (regardless of its value) since that will cause the PG driver to use SSL anyway
 (expect
   {:user        "camsaul"
    :classname   "org.postgresql.Driver"
@@ -150,7 +151,8 @@
    [[(hsql/raw "'192.168.1.1'::inet")]
     [(hsql/raw "'10.4.4.15'::inet")]]])
 
-;; Filtering by inet columns should add the appropriate SQL cast, e.g. `cast('192.168.1.1' AS inet)` (otherwise this wouldn't work)
+;; Filtering by inet columns should add the appropriate SQL cast, e.g. `cast('192.168.1.1' AS inet)` (otherwise this
+;; wouldn't work)
 (expect-with-engine :postgres
   [[1]]
   (rows (data/dataset metabase.driver.postgres-test/ip-addresses
@@ -180,10 +182,6 @@
   {:tables #{{:schema "public", :name "test_mview"}}}
   (do
     (drop-if-exists-and-create-db! "materialized_views_test")
-    (jdbc/execute! (sql/connection-details->spec pg-driver (i/database->connection-details pg-driver :server nil))
-                   ["DROP DATABASE IF EXISTS materialized_views_test;
-                     CREATE DATABASE materialized_views_test;"]
-                   {:transaction? false})
     (let [details (i/database->connection-details pg-driver :db {:database-name "materialized_views_test"})]
       (jdbc/execute! (sql/connection-details->spec pg-driver details)
                      ["DROP MATERIALIZED VIEW IF EXISTS test_mview;
@@ -210,7 +208,8 @@
       (tt/with-temp Database [database {:engine :postgres, :details (assoc details :dbname "fdw_test")}]
         (driver/describe-database pg-driver database)))))
 
-;; make sure that if a view is dropped and recreated that the original Table object is marked active rather than a new one being created (#3331)
+;; make sure that if a view is dropped and recreated that the original Table object is marked active rather than a new
+;; one being created (#3331)
 (expect-with-engine :postgres
   [{:name "angry_birds", :active true}]
   (let [details (i/database->connection-details pg-driver :db {:database-name "dropped_views_test"})
@@ -260,7 +259,8 @@
   "America/Chicago"
   (get-timezone-with-report-timezone "America/Chicago"))
 
-;; ok, check that if we try to put in a fake timezone that the query still reëxecutes without a custom timezone. This should give us the same result as if we didn't try to set a timezone at all
+;; ok, check that if we try to put in a fake timezone that the query still reëxecutes without a custom timezone. This
+;; should give us the same result as if we didn't try to set a timezone at all
 (expect-with-engine :postgres
   (get-timezone-with-report-timezone nil)
   (get-timezone-with-report-timezone "Crunk Burger"))
diff --git a/test/metabase/sync/analyze/fingerprint_test.clj b/test/metabase/sync/analyze/fingerprint_test.clj
index 0ef8ebc51f4..c6c24fe4deb 100644
--- a/test/metabase/sync/analyze/fingerprint_test.clj
+++ b/test/metabase/sync/analyze/fingerprint_test.clj
@@ -98,10 +98,8 @@
      ;; no type/Float stuff should be included for 1
      [:and
       [:< :fingerprint_version 1]
-      [:in :base_type #{"type/SerializedJSON" "type/Name" "type/Text" "type/UUID" "type/State" "type/City"
-                        "type/Country" "type/URL" "type/Email" "type/ImageURL" "type/AvatarURL"
-                        "type/Description"}]]]]}
-  (with-redefs [i/fingerprint-version->types-that-should-be-re-fingerprinted {1 #{:type/Float :type/Text}
+      [:in :base_type #{"type/URL" "type/ImageURL" "type/AvatarURL"}]]]]}
+  (with-redefs [i/fingerprint-version->types-that-should-be-re-fingerprinted {1 #{:type/Float :type/URL}
                                                                               2 #{:type/Float}}]
     (#'fingerprint/honeysql-for-fields-that-need-fingerprint-updating)))
 
diff --git a/test/metabase/test/data/bigquery.clj b/test/metabase/test/data/bigquery.clj
index 1edbfd3d1e2..22a1f83d4b8 100644
--- a/test/metabase/test/data/bigquery.clj
+++ b/test/metabase/test/data/bigquery.clj
@@ -143,7 +143,7 @@
                                    (u/prog1 (if (instance? java.util.Date v)
                                               (DateTime. ^java.util.Date v) ; convert to Google version of DateTime, otherwise it doesn't work (!)
                                               v)
-                                            (assert (not (nil? <>)))))) ; make sure v is non-nil
+                                            (assert (some? <>))))) ; make sure v is non-nil
              :id (inc i)))))
 
 (defn- load-tabledef! [dataset-name {:keys [table-name field-definitions], :as tabledef}]
diff --git a/test/metabase/test/util.clj b/test/metabase/test/util.clj
index 6b9cfb7a12f..35ae98bd7f2 100644
--- a/test/metabase/test/util.clj
+++ b/test/metabase/test/util.clj
@@ -108,7 +108,7 @@
                    (if (map? maybe-map)
                      (reduce-kv (fn [acc k v]
                                   (if (pred k)
-                                    (assoc acc k (not (nil? v)))
+                                    (assoc acc k (some? v))
                                     (assoc acc k v)))
                                 {} maybe-map)
                      maybe-map))
-- 
GitLab