diff --git a/src/metabase/models/field_values.clj b/src/metabase/models/field_values.clj
index 3f425ee713bbee014f5a16859627aba164eec1f7..d99f8ee13b3fff2fef3a8d8178fe848fec8aa073 100644
--- a/src/metabase/models/field_values.clj
+++ b/src/metabase/models/field_values.clj
@@ -88,8 +88,9 @@
       (map #(get orig-remappings % (str %)) new-values))))
 
 (defn create-or-update-field-values!
-  "Create or update the FieldValues object for `field`. If the FieldValues object already exists, then update values for
-   it; otherwise create a new FieldValues object with the newly fetched values."
+  "Create or update the FieldValues object for 'field`. If the FieldValues object already exists, then update values for
+   it; otherwise create a new FieldValues object with the newly fetched values. Returns whether the field values were
+   created/updated/deleted as a result of this call."
   [field & [human-readable-values]]
   (let [field-values (FieldValues :field_id (u/get-id field))
         values       (distinct-values field)
@@ -118,7 +119,8 @@
         (log/debug (trs "Storing updated FieldValues for Field {0}..." field-name))
         (db/update-non-nil-keys! FieldValues (u/get-id field-values)
           :values                values
-          :human_readable_values (fixup-human-readable-values field-values values)))
+          :human_readable_values (fixup-human-readable-values field-values values))
+        ::fv-updated)
       ;; if FieldValues object doesn't exist create one
       values
       (do
@@ -126,10 +128,13 @@
         (db/insert! FieldValues
           :field_id              (u/get-id field)
           :values                values
-          :human_readable_values human-readable-values))
+          :human_readable_values human-readable-values)
+        ::fv-created)
       ;; otherwise this Field isn't eligible, so delete any FieldValues that might exist
       :else
-      (db/delete! FieldValues :field_id (u/get-id field)))))
+      (do
+        (db/delete! FieldValues :field_id (u/get-id field))
+        ::fv-deleted))))
 
 
 (defn field-values->pairs
@@ -148,7 +153,8 @@
   {:pre [(integer? field-id)]}
   (when (field-should-have-field-values? field)
     (or (FieldValues :field_id field-id)
-        (create-or-update-field-values! field human-readable-values))))
+        (when (contains? #{::fv-created ::fv-updated} (create-or-update-field-values! field human-readable-values))
+          (FieldValues :field_id field-id)))))
 
 (defn save-field-values!
   "Save the `FieldValues` for FIELD-ID, creating them if needed, otherwise updating them."
diff --git a/src/metabase/sync/analyze.clj b/src/metabase/sync/analyze.clj
index 956e6f03e09cbd7a66457298b3151e61909048ad..dc2a298bd4fce60430ff6f5ab2cd53d74f57a9c2 100644
--- a/src/metabase/sync/analyze.clj
+++ b/src/metabase/sync/analyze.clj
@@ -14,6 +14,7 @@
              #_[table-row-count :as table-row-count]]
             [metabase.util :as u]
             [metabase.util.date :as du]
+            [puppetlabs.i18n.core :refer [trs]]
             [schema.core :as s]
             [toucan.db :as db]))
 
@@ -92,6 +93,29 @@
       (when progress-bar-result
         (log/info (u/format-color 'blue "%s Analyzed %s %s" step progress-bar-result (sync-util/name-for-logging table)))))))
 
+(defn- fingerprint-fields-summary [{:keys [fingerprints-attempted updated-fingerprints no-data-fingerprints failed-fingerprints]}]
+  (trs "Fingerprint updates attempted {0}, updated {1}, no data found {2}, failed {3}"
+       fingerprints-attempted updated-fingerprints no-data-fingerprints failed-fingerprints))
+
+(defn- classify-fields-summary [{:keys [fields-classified fields-failed]}]
+  (trs "Total number of fields classified {0}, {1} failed"
+       fields-classified fields-failed))
+
+(defn- classify-tables-summary [{:keys [total-tables tables-classified]}]
+  (trs "Total number of tables classified {0}, {1} updated"
+       total-tables tables-classified))
+
+(defn ^:private make-analyze-steps [tables log-fn]
+  [(sync-util/create-sync-step "fingerprint-fields"
+                               #(fingerprint/fingerprint-fields-for-db! % tables log-fn)
+                               fingerprint-fields-summary)
+   (sync-util/create-sync-step "classify-fields"
+                               #(classify/classify-fields-for-db! % tables log-fn)
+                               classify-fields-summary)
+   (sync-util/create-sync-step "classify-tables"
+                               #(classify/classify-tables-for-db! % tables log-fn)
+                               classify-tables-summary)])
+
 (s/defn analyze-db!
   "Perform in-depth analysis on the data for all Tables in a given DATABASE.
    This is dependent on what each database driver supports, but includes things like cardinality testing and table row
@@ -100,8 +124,5 @@
   (sync-util/sync-operation :analyze database (format "Analyze data for %s" (sync-util/name-for-logging database))
     (let [tables (sync-util/db->sync-tables database)]
       (sync-util/with-emoji-progress-bar [emoji-progress-bar (inc (* 3 (count tables)))]
-        (let [log-progress-fn (maybe-log-progress emoji-progress-bar)]
-          (fingerprint/fingerprint-fields-for-db! database tables log-progress-fn)
-          (classify/classify-fields-for-db! database tables log-progress-fn)
-          (classify/classify-tables-for-db! database tables log-progress-fn)
-          (update-fields-last-analyzed-for-db! database tables))))))
+        (sync-util/run-sync-operation "analyze" database (make-analyze-steps tables (maybe-log-progress emoji-progress-bar)))
+        (update-fields-last-analyzed-for-db! database tables)))))
diff --git a/src/metabase/sync/analyze/classify.clj b/src/metabase/sync/analyze/classify.clj
index a1abb6d1af2a3bc756c47c9184d99da0551f9459..fa323457be2b1bbe89b1f58d5bb0aa77ce527ff5 100644
--- a/src/metabase/sync/analyze/classify.clj
+++ b/src/metabase/sync/analyze/classify.clj
@@ -62,7 +62,8 @@
                     Field
                     Table)
           (u/get-id original-model)
-        values-to-set))))
+        values-to-set)
+      true)))
 
 (def ^:private classifiers
   "Various classifier functions available. These should all take two args, a `FieldInstance` and a possibly `nil`
@@ -117,8 +118,13 @@
   like inferring (and setting) the special types and preview display status for Fields belonging to TABLE."
   [table :- i/TableInstance]
   (when-let [fields (fields-to-classify table)]
-    (doseq [field fields]
-      (classify! field))))
+    {:fields-classified (count fields)
+     :fields-failed     (sync-util/sum-numbers (fn [field]
+                                                 (let [result (classify! field)]
+                                                   (if (instance? Exception result)
+                                                     1
+                                                     0)))
+                                               fields)}))
 
 (s/defn ^:always-validate classify-table!
   "Run various classifiers on the TABLE. These do things like inferring (and
@@ -131,15 +137,24 @@
   [database :- i/DatabaseInstance
    tables :- [i/TableInstance]
    log-progress-fn]
-  (doseq [table tables]
-    (classify-table! table)
-    (log-progress-fn "clasify-tables" table)))
+  {:total-tables      (count tables)
+   :tables-classified (sync-util/sum-numbers (fn [table]
+                                               (let [result (classify-table! table)]
+                                                 (log-progress-fn "classify-tables" table)
+                                                 (if result
+                                                   1
+                                                   0)))
+                                             tables)})
 
 (s/defn classify-fields-for-db!
   "Classify all fields found in a given database"
   [database :- i/DatabaseInstance
    tables :- [i/TableInstance]
    log-progress-fn]
-  (doseq [table tables]
-    (classify-fields! table)
-    (log-progress-fn "classify-fields" table)))
+  (apply merge-with +
+         {:fields-classified 0, :fields-failed 0}
+         (map (fn [table]
+                (let [result (classify-fields! table)]
+                  (log-progress-fn "classify-fields" table)
+                  result))
+              tables)))
diff --git a/src/metabase/sync/analyze/fingerprint.clj b/src/metabase/sync/analyze/fingerprint.clj
index 8349cfed779d20f8205b9b200bb0da4b887d0ca5..c185f4172efa397cc5ae948a428d790226e40cf5 100644
--- a/src/metabase/sync/analyze/fingerprint.clj
+++ b/src/metabase/sync/analyze/fingerprint.clj
@@ -51,13 +51,26 @@
       :fingerprint_version i/latest-fingerprint-version
       :last_analyzed       nil)))
 
+(defn- empty-stats-map [fields-count]
+  {:no-data-fingerprints   0
+   :failed-fingerprints    0
+   :updated-fingerprints   0
+   :fingerprints-attempted fields-count})
+
 (s/defn ^:private fingerprint-table!
   [table :- i/TableInstance, fields :- [i/FieldInstance]]
-  (doseq [[field sample] (sample/sample-fields table fields)]
-    (when sample
-      (sync-util/with-error-handling (format "Error generating fingerprint for %s" (sync-util/name-for-logging field))
-        (let [fingerprint (fingerprint field sample)]
-          (save-fingerprint! field fingerprint))))))
+  (let [fields-to-sample (sample/sample-fields table fields)]
+    (reduce (fn [count-info [field sample]]
+              (if-not sample
+                (update count-info :no-data-fingerprints inc)
+                (let [result (sync-util/with-error-handling (format "Error generating fingerprint for %s"
+                                                                    (sync-util/name-for-logging field))
+                               (save-fingerprint! field (fingerprint field sample)))]
+                  (if (instance? Exception result)
+                    (update count-info :failed-fingerprints inc)
+                    (update count-info :updated-fingerprints inc)))))
+            (empty-stats-map (count fields-to-sample))
+            fields-to-sample)))
 
 
 ;;; +----------------------------------------------------------------------------------------------------------------+
@@ -154,14 +167,17 @@
 (s/defn fingerprint-fields!
   "Generate and save fingerprints for all the Fields in TABLE that have not been previously analyzed."
   [table :- i/TableInstance]
-  (when-let [fields (fields-to-fingerprint table)]
-    (fingerprint-table! table fields)))
+  (if-let [fields (fields-to-fingerprint table)]
+    (fingerprint-table! table fields)
+    (empty-stats-map 0)))
 
 (s/defn fingerprint-fields-for-db!
   "Invokes `fingerprint-fields!` on every table in `database`"
   [database :- i/DatabaseInstance
    tables :- [i/TableInstance]
    log-progress-fn]
-  (doseq [table tables]
-    (fingerprint-fields! table)
-    (log-progress-fn "fingerprint-fields" table)))
+  (apply merge-with + (for [table tables
+                            :let [result (fingerprint-fields! table)]]
+                        (do
+                          (log-progress-fn "fingerprint-fields" table)
+                          result))))
diff --git a/src/metabase/sync/field_values.clj b/src/metabase/sync/field_values.clj
index f894bc7f350772475460941078f116e84fc238b0..b27d6844879a4a42605545cb10443eec491384d0 100644
--- a/src/metabase/sync/field_values.clj
+++ b/src/metabase/sync/field_values.clj
@@ -8,6 +8,7 @@
              [interface :as i]
              [util :as sync-util]]
             [metabase.util :as u]
+            [puppetlabs.i18n.core :refer [trs]]
             [schema.core :as s]
             [toucan.db :as db]))
 
@@ -16,22 +17,48 @@
     (log/debug (format "Based on cardinality and/or type information, %s should no longer have field values.\n"
                        (sync-util/name-for-logging field))
                "Deleting FieldValues...")
-    (db/delete! FieldValues :field_id (u/get-id field))))
+    (db/delete! FieldValues :field_id (u/get-id field))
+    ::field-values/fv-deleted))
 
 (s/defn ^:private update-field-values-for-field! [field :- i/FieldInstance]
   (log/debug (u/format-color 'green "Looking into updating FieldValues for %s" (sync-util/name-for-logging field)))
   (field-values/create-or-update-field-values! field))
 
+(defn- update-field-value-stats-count [counts-map result]
+  (if (instance? Exception result)
+    (update counts-map :errors inc)
+    (case result
+      ::field-values/fv-created
+      (update counts-map :created inc)
+      ::field-values/fv-updated
+      (update counts-map :updated inc)
+      ::field-values/fv-deleted
+      (update counts-map :deleted inc)
+
+      counts-map)))
 
 (s/defn update-field-values-for-table!
   "Update the cached FieldValues for all Fields (as needed) for TABLE."
   [table :- i/TableInstance]
-  (doseq [field (db/select Field :table_id (u/get-id table), :active true, :visibility_type "normal")]
-    (sync-util/with-error-handling (format "Error updating field values for %s" (sync-util/name-for-logging field))
-      (if (field-values/field-should-have-field-values? field)
-        (update-field-values-for-field! field)
-        (clear-field-values-for-field! field)))))
+  (reduce (fn [fv-change-counts field]
+            (let [result (sync-util/with-error-handling (format "Error updating field values for %s" (sync-util/name-for-logging field))
+                           (if (field-values/field-should-have-field-values? field)
+                             (update-field-values-for-field! field)
+                             (clear-field-values-for-field! field)))]
+              (update-field-value-stats-count fv-change-counts result)))
+          {:errors 0, :created 0, :updated 0, :deleted 0}
+          (db/select Field :table_id (u/get-id table), :active true, :visibility_type "normal")))
+
+(s/defn ^:private update-field-values-for-database!
+  [database :- i/DatabaseInstance]
+  (apply merge-with + (map update-field-values-for-table! (sync-util/db->sync-tables database))))
+
+(defn- update-field-values-summary [{:keys [created updated deleted errors]}]
+  (trs "Updated {0} field value sets, created {1}, deleted {2} with {3} errors"
+       updated created deleted errors))
 
+(def ^:private field-values-steps
+  [(sync-util/create-sync-step "update-field-values" update-field-values-for-database! update-field-values-summary)])
 
 (s/defn update-field-values!
   "Update the cached FieldValues (distinct values for categories and certain other fields that are shown
@@ -39,5 +66,4 @@
   [database :- i/DatabaseInstance]
   (sync-util/sync-operation :cache-field-values database (format "Cache field values in %s"
                                                                  (sync-util/name-for-logging database))
-    (doseq [table (sync-util/db->sync-tables database)]
-      (update-field-values-for-table! table))))
+    (sync-util/run-sync-operation "field values scanning" database field-values-steps)))
diff --git a/src/metabase/sync/sync_metadata.clj b/src/metabase/sync/sync_metadata.clj
index e5a32f6d47d1c59fb3e08568126a4ba21aa4746a..be380d25680f85087d191ed586bdcffd5ff2b1d5 100644
--- a/src/metabase/sync/sync_metadata.clj
+++ b/src/metabase/sync/sync_metadata.clj
@@ -6,7 +6,9 @@
    2.  Sync fields (`metabase.sync.sync-metadata.fields`)
    3.  Sync FKs    (`metabase.sync.sync-metadata.fks`)
    4.  Sync Metabase Metadata table (`metabase.sync.sync-metadata.metabase-metadata`)"
-  (:require [metabase.sync
+  (:require [clj-time.core :as time]
+            [clojure.tools.logging :as log]
+            [metabase.sync
              [interface :as i]
              [util :as sync-util]]
             [metabase.sync.sync-metadata
@@ -15,23 +17,43 @@
              [metabase-metadata :as metabase-metadata]
              [sync-timezone :as sync-tz]
              [tables :as sync-tables]]
+            [metabase.util :as u]
+            [puppetlabs.i18n.core :refer [trs]]
             [schema.core :as s]))
 
+(defn- sync-fields-summary [{:keys [total-fields updated-fields] :as step-info}]
+  (trs "Total number of fields sync''d {0}, number of fields updated {1}"
+       total-fields updated-fields))
+
+(defn- sync-tables-summary [{:keys [total-tables updated-tables :as step-info]}]
+  (trs "Total number of tables sync''d {0}, number of tables updated {1}"
+       total-tables updated-tables))
+
+(defn- sync-timezone-summary [{:keys [timezone-id]}]
+  (trs "Found timezone id {0}" timezone-id))
+
+(defn- sync-fks-summary [{:keys [total-fks updated-fks total-failed]}]
+  (trs "Total number of foreign keys sync''d {0}, {1} updated and {2} tables failed to update"
+       total-fks updated-fks total-failed))
+
+(def ^:private sync-steps
+  [(sync-util/create-sync-step "sync-timezone" sync-tz/sync-timezone! sync-timezone-summary)
+   ;; Make sure the relevant table models are up-to-date
+   (sync-util/create-sync-step "sync-tables" sync-tables/sync-tables! sync-tables-summary)
+   ;; Now for each table, sync the fields
+   (sync-util/create-sync-step "sync-fields" sync-fields/sync-fields! sync-fields-summary)
+   ;; Now for each table, sync the FKS. This has to be done after syncing all the fields to make sure target fields exist
+   (sync-util/create-sync-step "sync-fks" sync-fks/sync-fks! sync-fks-summary)
+   ;; finally, sync the metadata metadata table if it exists.
+   (sync-util/create-sync-step "sync-metabase-metadata" metabase-metadata/sync-metabase-metadata!)])
+
 (s/defn sync-db-metadata!
   "Sync the metadata for a Metabase DATABASE. This makes sure child Table & Field objects are synchronized."
   [database :- i/DatabaseInstance]
   (sync-util/sync-operation :sync-metadata database (format "Sync metadata for %s" (sync-util/name-for-logging database))
-    (sync-tz/sync-timezone! database)
-    ;; Make sure the relevant table models are up-to-date
-    (sync-tables/sync-tables! database)
-    ;; Now for each table, sync the fields
-    (sync-fields/sync-fields! database)
-    ;; Now for each table, sync the FKS. This has to be done after syncing all the fields to make sure target fields exist
-    (sync-fks/sync-fks! database)
-    ;; finally, sync the metadata metadata table if it exists.
-    (metabase-metadata/sync-metabase-metadata! database)))
-
-(s/defn ^:always-validatge sync-table-metadata!
+    (sync-util/run-sync-operation "sync" database sync-steps)))
+
+(s/defn sync-table-metadata!
   "Sync the metadata for an individual TABLE -- make sure Fields and FKs are up-to-date."
   [table :- i/TableInstance]
   (sync-fields/sync-fields-for-table! table)
diff --git a/src/metabase/sync/sync_metadata/fields.clj b/src/metabase/sync/sync_metadata/fields.clj
index abb2584e0a9b3697af3b092a26e6502ddd957280..1aa22346f0148869fb7d004508d4ff36e59251ff 100644
--- a/src/metabase/sync/sync_metadata/fields.clj
+++ b/src/metabase/sync/sync_metadata/fields.clj
@@ -119,19 +119,23 @@
   "Update the metadata for a Metabase Field as needed if any of the info coming back from the DB has changed."
   [table :- i/TableInstance, metabase-field :- TableMetadataFieldWithID, field-metadata :- i/TableMetadataField]
   (let [{old-database-type :database-type, old-base-type :base-type} metabase-field
-        {new-database-type :database-type, new-base-type :base-type} field-metadata]
+        {new-database-type :database-type, new-base-type :base-type} field-metadata
+        new-db-type?                                                 (not= old-database-type new-database-type)
+        new-base-type?                                               (not= old-base-type new-base-type)]
     ;; If the driver is reporting a different `database-type` than what we have recorded in the DB, update it
-    (when-not (= old-database-type new-database-type)
+    (when new-db-type?
       (log/info (format "Database type of %s has changed from '%s' to '%s'."
                         (field-metadata-name-for-logging table metabase-field)
                         old-database-type new-database-type))
       (db/update! Field (u/get-id metabase-field), :database_type new-database-type))
     ;; Now do the same for `base-type`
-    (when-not (= old-base-type new-base-type)
+    (when new-base-type?
       (log/info (format "Base type of %s has changed from '%s' to '%s'."
                         (field-metadata-name-for-logging table metabase-field)
                         old-base-type new-base-type))
-      (db/update! Field (u/get-id metabase-field), :base_type new-base-type))))
+      (db/update! Field (u/get-id metabase-field), :base_type new-base-type))
+
+    (or new-db-type? new-base-type?)))
 
 
 ;;; +----------------------------------------------------------------------------------------------------------------+
@@ -165,43 +169,61 @@
 (declare sync-field-instances!)
 
 (s/defn ^:private update-field-chunk!
-  [table :- i/TableInstance, known-fields :- {s/Str TableMetadataFieldWithID}, fields-to-update :- [i/TableMetadataField]]
-  (doseq [our-field fields-to-update
-          :let [db-field (get known-fields (canonical-name our-field))]]
-    ;; if field exists in both metadata sets then make sure the data recorded about the Field such as database_type is
-    ;; up-to-date...
-    (update-field-metadata-if-needed! table db-field our-field)
+  [table :- i/TableInstance
+   known-fields :- {s/Str TableMetadataFieldWithID}
+   our-field :- i/TableMetadataField]
+  (let [db-field (get known-fields (canonical-name our-field))
+        ;; if field exists in both metadata sets then make sure the data recorded about the Field such as
+        ;; database_type is up-to-date...
+        updated-count (if (update-field-metadata-if-needed! table db-field our-field)
+                        ;; One field was updated
+                        1
+                        ;; No fields were updated
+                        0)]
     ;; ...and then recursively check the nested fields.
-    (when-let [db-nested-fields (seq (:nested-fields our-field))]
-      (sync-field-instances! table (set db-nested-fields) (:nested-fields db-field) (:id our-field)))))
+    (if-let [db-nested-fields (seq (:nested-fields our-field))]
+      ;; Any nested fields that were updated should be included in this chunk's count
+      (+ updated-count (sync-field-instances! table (set db-nested-fields) (:nested-fields db-field) (:id our-field)))
+      ;; No nested fields, so just the updated count from above
+      updated-count)))
 
 (s/defn ^:private sync-field-instances!
   "Make sure the instances of Metabase Field are in-sync with the DB-METADATA."
   [table :- i/TableInstance, db-metadata :- #{i/TableMetadataField}, our-metadata :- #{TableMetadataFieldWithID}
    parent-id :- ParentID]
   (let [known-fields (u/key-by canonical-name our-metadata)]
-    ;; Loop thru fields in DB-METADATA. Create/reactivate any fields that don't exist in OUR-METADATA.
-    (doseq [db-field-chunk (partition-all 1000 db-metadata)]
-      (sync-util/with-error-handling (format "Error checking if Fields '%s' needs to be created or reactivated"
-                                             (pr-str (map :name db-field-chunk)))
-        (let [known-field-pred (comp known-fields canonical-name)
-              fields-to-update (filter known-field-pred db-field-chunk)
-              new-fields       (remove known-field-pred db-field-chunk)]
-
-          (update-field-chunk! table known-fields fields-to-update)
-          ;; otherwise if field doesn't exist, create or reactivate it
-          (when (seq new-fields)
-            (create-or-reactivate-field-chunk! table new-fields parent-id))))))
-
-  ;; ok, loop thru Fields in OUR-METADATA. Mark Fields as inactive if they don't exist in DB-METADATA.
-  (doseq [our-field our-metadata]
-    (sync-util/with-error-handling (format "Error checking if '%s' needs to be retired" (:name our-field))
-      (if-let [db-field (matching-field-metadata our-field db-metadata)]
-        ;; if field exists in both metadata sets we just need to recursively check the nested fields
-        (when-let [our-nested-fields (seq (:nested-fields our-field))]
-          (sync-field-instances! table (:nested-fields db-field) (set our-nested-fields) (:id our-field)))
-        ;; otherwise if field exists in our metadata but not DB metadata time to make it inactive
-        (retire-field! table our-field)))))
+    (+
+     ;; Loop thru fields in DB-METADATA. Create/reactivate any fields that don't exist in OUR-METADATA.
+     (sync-util/sum-numbers (fn [db-field-chunk]
+                              (sync-util/with-error-handling (format "Error checking if Fields '%s' needs to be created or reactivated"
+                                                                     (pr-str (map :name db-field-chunk)))
+                                (let [known-field-pred    (comp known-fields canonical-name)
+                                      fields-to-update    (filter known-field-pred db-field-chunk)
+                                      new-fields          (remove known-field-pred db-field-chunk)
+                                      updated-chunk-count (sync-util/sum-numbers #(update-field-chunk! table known-fields %) fields-to-update)]
+
+                                  ;; otherwise if field doesn't exist, create or reactivate it
+                                  (when (seq new-fields)
+                                    (create-or-reactivate-field-chunk! table new-fields parent-id))
+                                  ;; Add the updated number of fields with the number of newly created fields
+                                  (+ updated-chunk-count (count new-fields)))))
+                            (partition-all 1000 db-metadata))
+
+     ;; ok, loop thru Fields in OUR-METADATA. Mark Fields as inactive if they don't exist in DB-METADATA.
+     (sync-util/sum-numbers (fn [our-field]
+                              (sync-util/with-error-handling (format "Error checking if '%s' needs to be retired" (:name our-field))
+                                (if-let [db-field (matching-field-metadata our-field db-metadata)]
+                                  ;; if field exists in both metadata sets we just need to recursively check the nested fields
+                                  (if-let [our-nested-fields (seq (:nested-fields our-field))]
+                                    (sync-field-instances! table (:nested-fields db-field) (set our-nested-fields) (:id our-field))
+                                    ;; No fields were updated
+                                    0)
+                                  ;; otherwise if field exists in our metadata but not DB metadata time to make it inactive
+                                  (do
+                                    (retire-field! table our-field)
+                                    ;; 1 field was updated (retired)
+                                    1))))
+                            our-metadata))))
 
 
 ;;; +----------------------------------------------------------------------------------------------------------------+
@@ -217,25 +239,32 @@
                                   :parent_id parent-id)
         field-name->db-metadata (u/key-by canonical-name db-metadata)]
     ;; Make sure special types are up-to-date for all the fields
-    (doseq [field existing-fields
-            :let  [db-field         (get field-name->db-metadata (canonical-name field))
-                   new-special-type (special-type db-field)]
-            :when (and db-field
-                       (or
-                        ;; If the base_type has changed, we need to updated it
-                        (not= (:base_type field) (:base-type db-field))
-                        ;; If the base_type hasn't changed, but we now have a special_type, we should update it. We
-                        ;; should not overwrite a special_type that is already present (could have been specified by
-                        ;; the user).
-                        (and (not (:special_type field)) new-special-type)))]
-      ;; update special type if one came back from DB metadata but Field doesn't currently have one
-      (db/update! Field (u/get-id field)
-                  (merge {:base_type (:base-type db-field)}
-                         (when-not (:special_type field)
-                           {:special_type new-special-type})))
-      ;; now recursively do the same for any nested fields
-      (when-let [db-nested-fields (seq (:nested-fields db-field))]
-        (update-metadata! table (set db-nested-fields) (u/get-id field))))))
+    (sync-util/sum-numbers (fn [field]
+                             (let [db-field         (get field-name->db-metadata (canonical-name field))
+                                   new-special-type (special-type db-field)]
+                               (if (and db-field
+                                        (or
+                                         ;; If the base_type has changed, we need to updated it
+                                         (not= (:base_type field) (:base-type db-field))
+                                         ;; If the base_type hasn't changed, but we now have a special_type, we should update
+                                         ;; it. We should not overwrite a special_type that is already present (could have
+                                         ;; been specified by the user).
+                                         (and (not (:special_type field)) new-special-type)))
+                                 (do
+                                   ;; update special type if one came back from DB metadata but Field doesn't currently have one
+                                   (db/update! Field (u/get-id field)
+                                     (merge {:base_type (:base-type db-field)}
+                                            (when-not (:special_type field)
+                                              {:special_type new-special-type})))
+                                   ;; now recursively do the same for any nested fields
+                                   (if-let [db-nested-fields (seq (:nested-fields db-field))]
+                                     ;; This field was updated + any nested fields
+                                     (+ 1 (update-metadata! table (set db-nested-fields) (u/get-id field)))
+                                     ;; No nested fields, so just this field was updated
+                                     1))
+                                 ;; The field was not updated
+                                 0)))
+                           existing-fields)))
 
 
 ;;; +----------------------------------------------------------------------------------------------------------------+
@@ -312,20 +341,23 @@
   ([database :- i/DatabaseInstance, {:keys [fields_hash] :as table} :- i/TableInstance]
    (sync-util/with-error-handling (format "Error syncing fields for %s" (sync-util/name-for-logging table))
      (let [db-field-metadata (db-metadata database table)
-           db-hash     (calculate-table-hash db-field-metadata)]
+           total-fields      (count db-field-metadata)
+           db-hash           (calculate-table-hash db-field-metadata)]
        (if (and fields_hash (= db-hash fields_hash))
-         (log/debugf "Hash of '%s' matches stored hash, skipping fields sync for table" (sync-util/name-for-logging table))
          (do
-           ;; make sure the instances of Field are in-sync
-           (sync-field-instances! table db-field-metadata (our-metadata table) nil)
-           ;; now that tables are synced and fields created as needed make sure field properties are in sync
-           (update-metadata! table db-field-metadata nil)
+            (log/debugf "Hash of '%s' matches stored hash, skipping fields sync for table" (sync-util/name-for-logging table))
+            {:updated-fields 0 :total-fields total-fields})
+         ;; make sure the instances of Field are in-sync
+         (let [updated-fields (+ (sync-field-instances! table db-field-metadata (our-metadata table) nil)
+                                 ;; now that tables are synced and fields created as needed make sure field properties are in sync
+                                 (update-metadata! table db-field-metadata nil))]
            ;; Either there was no hash or there has been some change, update the hash too
-           (let [new-metadata (db-metadata database table)]
-             (db/update! Table (u/get-id table) :fields_hash (calculate-table-hash new-metadata)))))))))
+           (db/update! Table (u/get-id table) :fields_hash (calculate-table-hash (db-metadata database table)))
+           {:updated-fields updated-fields
+            :total-fields   total-fields}))))))
 
 (s/defn sync-fields!
   "Sync the Fields in the Metabase application database for all the Tables in a DATABASE."
   [database :- i/DatabaseInstance]
-  (doseq [table (sync-util/db->sync-tables database)]
-    (sync-fields-for-table! database table)))
+  (let [tables (sync-util/db->sync-tables database)]
+    (apply merge-with + (map #(sync-fields-for-table! database %) tables))))
diff --git a/src/metabase/sync/sync_metadata/fks.clj b/src/metabase/sync/sync_metadata/fks.clj
index 54b61dd58c418043d0403d2b024051dc587a7e1a..4a41fe366eafadd84fe8d254156df5684ca97590 100644
--- a/src/metabase/sync/sync_metadata/fks.clj
+++ b/src/metabase/sync/sync_metadata/fks.clj
@@ -55,7 +55,8 @@
                 (sync-util/name-for-logging dest-field)))
     (db/update! Field (u/get-id source-field)
       :special_type       :type/FK
-      :fk_target_field_id (u/get-id dest-field))))
+      :fk_target_field_id (u/get-id dest-field))
+    true))
 
 
 (s/defn sync-fks-for-table!
@@ -64,12 +65,24 @@
    (sync-fks-for-table! (table/database table) table))
   ([database :- i/DatabaseInstance, table :- i/TableInstance]
    (sync-util/with-error-handling (format "Error syncing FKs for %s" (sync-util/name-for-logging table))
-     (doseq [fk (fetch-metadata/fk-metadata database table)]
-       (mark-fk! database table fk)))))
+     (let [fks-to-update (fetch-metadata/fk-metadata database table)]
+       {:total-fks (count fks-to-update)
+        :updated-fks (sync-util/sum-numbers (fn [fk]
+                                              (if (mark-fk! database table fk)
+                                                1
+                                                0))
+                                            fks-to-update)}))))
 
 (s/defn sync-fks!
   "Sync the foreign keys in a DATABASE. This sets appropriate values for relevant Fields in the Metabase application DB
    based on values from the `FKMetadata` returned by `describe-table-fks`."
   [database :- i/DatabaseInstance]
-  (doseq [table (sync-util/db->sync-tables database)]
-    (sync-fks-for-table! database table)))
+  (reduce (fn [update-info table]
+            (let [table-fk-info (sync-fks-for-table! database table)]
+              (if (instance? Exception table-fk-info)
+                (update update-info :total-failed inc)
+                (merge-with + update-info table-fk-info))))
+          {:total-fks    0
+           :updated-fks  0
+           :total-failed 0}
+          (sync-util/db->sync-tables database)))
diff --git a/src/metabase/sync/sync_metadata/metabase_metadata.clj b/src/metabase/sync/sync_metadata/metabase_metadata.clj
index f71d587115d281f702617056abbabb5255f4ed9b..4f2551874834d612ef50f3bf0b03a6a5ac2a075d 100644
--- a/src/metabase/sync/sync_metadata/metabase_metadata.clj
+++ b/src/metabase/sync/sync_metadata/metabase_metadata.clj
@@ -97,4 +97,5 @@
     ;; Hopefully this is never the case.
     (doseq [table (:tables (fetch-metadata/db-metadata database))]
       (when (is-metabase-metadata-table? table)
-        (sync-metabase-metadata-table! (driver/->driver database) database table)))))
+        (sync-metabase-metadata-table! (driver/->driver database) database table)))
+    {}))
diff --git a/src/metabase/sync/sync_metadata/sync_timezone.clj b/src/metabase/sync/sync_metadata/sync_timezone.clj
index 6712a98ace4b7d20df46b19f5d60c577f2dc7322..a02c5e053d0fceb93d60abb134b64afbef9f3464 100644
--- a/src/metabase/sync/sync_metadata/sync_timezone.clj
+++ b/src/metabase/sync/sync_metadata/sync_timezone.clj
@@ -20,6 +20,7 @@
                         (driver/current-db-time database)
                         extract-time-zone)]
       (when-not (= tz-id (:timezone database))
-        (db/update! Database (:id database) {:timezone tz-id})))
+        (db/update! Database (:id database) {:timezone tz-id}))
+      {:timezone-id tz-id})
     (catch Exception e
       (log/warn e "Error syncing database timezone"))))
diff --git a/src/metabase/sync/sync_metadata/tables.clj b/src/metabase/sync/sync_metadata/tables.clj
index 41dd2fef6256d985d48d1c4d6225c67ae783101b..da532fc92f5c4315ae47f3f800f220e4f94bd807 100644
--- a/src/metabase/sync/sync_metadata/tables.clj
+++ b/src/metabase/sync/sync_metadata/tables.clj
@@ -152,4 +152,6 @@
     ;; mark old tables as inactive
     (when (seq old-tables)
       (sync-util/with-error-handling (format "Error retiring tables for %s" (sync-util/name-for-logging database))
-        (retire-tables! database old-tables)))))
+        (retire-tables! database old-tables)))
+    {:updated-tables (+ (count new-tables) (count old-tables))
+     :total-tables   (count our-metadata)}))
diff --git a/src/metabase/sync/util.clj b/src/metabase/sync/util.clj
index d5988306c8f4992dd71b784a1f3d32f0fb60f630..54928f17a692d10ff3e738c5af2bf501ad4c2bb5 100644
--- a/src/metabase/sync/util.clj
+++ b/src/metabase/sync/util.clj
@@ -2,6 +2,9 @@
   "Utility functions and macros to abstract away some common patterns and operations across the sync processes, such
   as logging start/end messages."
   (:require [buddy.core.hash :as buddy-hash]
+            [clj-time
+             [coerce :as tcoerce]
+             [core :as time]]
             [clojure.math.numeric-tower :as math]
             [clojure.string :as str]
             [clojure.tools.logging :as log]
@@ -14,7 +17,9 @@
             [metabase.query-processor.interface :as qpi]
             [metabase.sync.interface :as i]
             [metabase.util.date :as du]
+            [puppetlabs.i18n.core :refer [trs]]
             [ring.util.codec :as codec]
+            [schema.core :as s]
             [taoensso.nippy :as nippy]
             [toucan.db :as db]))
 
@@ -84,6 +89,17 @@
                                                 :running_time total-time-ms}))
        nil))))
 
+(defn- with-start-and-finish-logging'
+  "Logs start/finish messages using `log-fn`, timing `f`"
+  {:style/indent 1}
+  [log-fn message f]
+  (let [start-time (System/nanoTime)
+        _          (log-fn (u/format-color 'magenta "STARTING: %s" message))
+        result     (f)]
+    (log-fn (u/format-color 'magenta "FINISHED: %s (%s)"
+              message
+              (du/format-nanoseconds (- (System/nanoTime) start-time))))
+    result))
 
 (defn- with-start-and-finish-logging
   "Log MESSAGE about a process starting, then run F, and then log a MESSAGE about it finishing.
@@ -91,13 +107,12 @@
   {:style/indent 1}
   [message f]
   (fn []
-    (let [start-time (System/nanoTime)]
-      (log/info (u/format-color 'magenta "STARTING: %s" message))
-      (f)
-      (log/info (u/format-color 'magenta "FINISHED: %s (%s)"
-                  message
-                  (du/format-nanoseconds (- (System/nanoTime) start-time)))))))
+    (with-start-and-finish-logging' #(log/info %) message f)))
 
+(defn with-start-and-finish-debug-logging
+  "Similar to `with-start-and-finish-logging except invokes `f` and returns its result and logs at the debug level"
+  [message f]
+  (with-start-and-finish-logging' #(log/debug %) message f))
 
 (defn- with-db-logging-disabled
   "Disable all QP and DB logging when running BODY. (This should be done for *all* sync-like processes to avoid
@@ -260,3 +275,131 @@
        buddy-hash/md5
        ;; Convert the hash bytes to a string for storage/comparison with the hash in the database
        codec/base64-encode))
+
+(s/defn calculate-duration :- s/Str
+  "Given two datetimes, caculate the time between them, return the result as a string"
+  [begin-time :- (s/protocol tcoerce/ICoerce)
+   end-time :- (s/protocol tcoerce/ICoerce)]
+  (-> (- (tcoerce/to-long end-time) (tcoerce/to-long begin-time))
+      ;; Millis -> Nanos
+      (* 1000000)
+      du/format-nanoseconds))
+
+(def StepSpecificMetadata
+  "A step function can return any metadata and is used by the related LogSummaryFunction to provide step-specific
+  details about run"
+  {s/Keyword s/Any})
+
+(def ^:private TimedSyncMetadata
+  "Metadata common to both sync steps and an entire sync/analyze operation run"
+  {:start-time     s/Str
+   :end-time       s/Str
+   :duration       s/Str})
+
+(def StepRunMetadata
+  "Map with metadata about the step. Contains both generic information like `start-time` and `end-time` and step
+  specific information"
+  (merge TimedSyncMetadata
+         {:log-summary-fn (s/maybe (s/=> s/Str StepRunMetadata) #_(s/pred ifn?))}
+         StepSpecificMetadata))
+
+(def StepNameWithMetadata
+  "Pair with the step name and metadata about the completed step run"
+  [(s/one s/Str "step name") (s/one StepRunMetadata "step metadata")])
+
+(def SyncOperationMetadata
+  "Timing and step information for the entire sync or analyze run"
+  (assoc TimedSyncMetadata :steps [StepNameWithMetadata]))
+
+(def LogSummaryFunction
+  "A log summary function takes a `StepRunMetadata` and returns a string with a step-specific log message"
+  (s/=> s/Str StepRunMetadata))
+
+(def StepDefinition
+  "Defines a step. `:sync-fn` runs the step, returns a map that contains step specific metadata. `log-summary-fn`
+  takes that metadata and turns it into a string for logging"
+  {:sync-fn        (s/=> StepRunMetadata i/DatabaseInstance)
+   :step-name      s/Str
+   :log-summary-fn (s/maybe LogSummaryFunction)})
+
+(defn create-sync-step
+  "Creates and returns a step suitable for `run-step-with-metadata`. See `StepDefinition` for more info."
+  ([step-name sync-fn]
+   (create-sync-step step-name sync-fn nil))
+  ([step-name sync-fn log-summary-fn]
+   {:sync-fn sync-fn
+    :step-name step-name
+    :log-summary-fn log-summary-fn}))
+
+(defn- datetime->str [datetime]
+  (du/->iso-8601-datetime datetime "UTC"))
+
+(s/defn run-step-with-metadata :- StepNameWithMetadata
+  "Runs `step` on `database returning metadata from the run"
+  [database :- i/DatabaseInstance
+   {:keys [step-name sync-fn log-summary-fn] :as step} :- StepDefinition]
+  (let [start-time (time/now)
+        results    (with-start-and-finish-debug-logging (trs "step ''{0}'' for {1}"
+                                                             step-name
+                                                             (name-for-logging database))
+                     #(sync-fn database))
+        end-time   (time/now)]
+    [step-name (assoc results
+                 :start-time (datetime->str start-time)
+                 :end-time (datetime->str end-time)
+                 :duration (calculate-duration start-time end-time)
+                 :log-summary-fn log-summary-fn)]))
+
+(s/defn ^:private  log-sync-summary
+  "Log a sync/analyze summary message with info from each step"
+  [operation :- s/Str
+   database :- i/DatabaseInstance
+   {:keys [start-time end-time duration steps log-summary-fn]} :- SyncOperationMetadata]
+  ;; Note this needs to either stay nested in the `debug` macro call or be guarded by an log/enabled?
+  ;; call. Constructing the log below requires some work, no need to incur that cost debug logging isn't enabled
+  (log/debug
+   (str
+    (format
+     (str "\n#################################################################\n"
+          "# %s\n"
+          "# %s\n"
+          "# %s\n"
+          "# %s\n")
+     (trs "Completed {0} on {1}" operation (:name database))
+     (trs "Start: {0}" start-time)
+     (trs "End: {0}" end-time)
+     (trs "Duration: {0}" duration))
+    (apply str (for [[step-name {:keys [start-time end-time duration log-summary-fn] :as step-info}] steps]
+                 (format (str "# ---------------------------------------------------------------\n"
+                              "# %s\n"
+                              "# %s\n"
+                              "# %s\n"
+                              "# %s\n"
+                              (when log-summary-fn
+                                (format "# %s\n" (log-summary-fn step-info))))
+                         (trs "Completed step ''{0}''" step-name)
+                         (trs "Start: {0}" start-time)
+                         (trs "End: {0}" end-time)
+                         (trs "Duration: {0}" duration))))
+    "#################################################################\n")))
+
+(s/defn run-sync-operation
+  "Run `sync-steps` and log a summary message"
+  [operation :- s/Str
+   database :- i/DatabaseInstance
+   sync-steps :- [StepDefinition]]
+  (let [start-time    (time/now)
+        step-metadata (mapv #(run-step-with-metadata database %) sync-steps)
+        end-time      (time/now)]
+    (log-sync-summary operation database {:start-time (datetime->str start-time)
+                                          :end-time   (datetime->str end-time)
+                                          :duration   (calculate-duration start-time end-time)
+                                          :steps      step-metadata})))
+
+(defn sum-numbers
+  "Similar to a 2-arg call to `map`, but will add all numbers that result from the invocations of `f`"
+  [f coll]
+  (apply + (for [item coll
+                 :let [result (f item)]
+                 :when (number? result)]
+             result)))
diff --git a/test/metabase/sync/analyze/fingerprint_test.clj b/test/metabase/sync/analyze/fingerprint_test.clj
index 0bef3751bceb5af730dacb79c6d5d55179fc40da..077bfd3dd00450971ba679ef571731341e7dc45a 100644
--- a/test/metabase/sync/analyze/fingerprint_test.clj
+++ b/test/metabase/sync/analyze/fingerprint_test.clj
@@ -141,63 +141,74 @@
                   fingerprint/save-fingerprint!                                (fn [& _] (reset! fingerprinted? true))]
       (tt/with-temp* [Table [table]
                       Field [_ (assoc field-properties :table_id (u/get-id table))]]
-        (fingerprint/fingerprint-fields! table))
-      @fingerprinted?)))
+        [(fingerprint/fingerprint-fields! table)
+         @fingerprinted?]))))
+
+(def ^:private default-stat-map
+  {:no-data-fingerprints 0, :failed-fingerprints 0, :updated-fingerprints 0, :fingerprints-attempted 0})
+
+(def ^:private one-updated-map
+  (merge default-stat-map {:updated-fingerprints 1, :fingerprints-attempted 1}))
 
 ;; Field is a subtype of newer fingerprint version
 (expect
+  [one-updated-map true]
   (field-was-fingerprinted?
     {2 #{:type/Float}}
     {:base_type :type/Decimal, :fingerprint_version 1}))
 
 ;; field is *not* a subtype of newer fingerprint version
 (expect
-  false
+  [default-stat-map false]
   (field-was-fingerprinted?
     {2 #{:type/Text}}
     {:base_type :type/Decimal, :fingerprint_version 1}))
 
 ;; Field is a subtype of one of several types for newer fingerprint version
 (expect
+  [one-updated-map true]
   (field-was-fingerprinted?
     {2 #{:type/Float :type/Text}}
     {:base_type :type/Decimal, :fingerprint_version 1}))
 
 ;; Field has same version as latest fingerprint version
 (expect
-  false
+  [default-stat-map false]
   (field-was-fingerprinted?
     {1 #{:type/Float}}
     {:base_type :type/Decimal, :fingerprint_version 1}))
 
 ;; field has newer version than latest fingerprint version (should never happen)
 (expect
-  false
+  [default-stat-map false]
   (field-was-fingerprinted?
     {1 #{:type/Float}}
     {:base_type :type/Decimal, :fingerprint_version 2}))
 
 ;; field has same exact type as newer fingerprint version
 (expect
+  [one-updated-map true]
   (field-was-fingerprinted?
     {2 #{:type/Float}}
     {:base_type :type/Float, :fingerprint_version 1}))
 
 ;; field is parent type of newer fingerprint version type
 (expect
-  false
+  [default-stat-map false]
   (field-was-fingerprinted?
     {2 #{:type/Decimal}}
     {:base_type :type/Float, :fingerprint_version 1}))
 
 ;; several new fingerprint versions exist
 (expect
+  [one-updated-map true]
   (field-was-fingerprinted?
     {2 #{:type/Float}
      3 #{:type/Text}}
     {:base_type :type/Decimal, :fingerprint_version 1}))
 
 (expect
+  [one-updated-map true]
   (field-was-fingerprinted?
     {2 #{:type/Text}
      3 #{:type/Float}}
@@ -206,9 +217,11 @@
 
 ;; Make sure the `fingerprint!` function is correctly updating the correct columns of Field
 (expect
-  {:fingerprint         {:experimental {:fake-fingerprint? true}}
-   :fingerprint_version 3
-   :last_analyzed       nil}
+  [{:no-data-fingerprints 0, :failed-fingerprints    0,
+    :updated-fingerprints 1, :fingerprints-attempted 1}
+   {:fingerprint         {:experimental {:fake-fingerprint? true}}
+    :fingerprint_version 3
+    :last_analyzed       nil}]
   (tt/with-temp Field [field {:base_type           :type/Integer
                               :table_id            (data/id :venues)
                               :fingerprint         nil
@@ -217,5 +230,5 @@
     (with-redefs [i/latest-fingerprint-version 3
                   sample/sample-fields         (constantly [[field [1 2 3 4 5]]])
                   fingerprint/fingerprint      (constantly {:experimental {:fake-fingerprint? true}})]
-      (#'fingerprint/fingerprint-table! (Table (data/id :venues)) [field])
-      (db/select-one [Field :fingerprint :fingerprint_version :last_analyzed] :id (u/get-id field)))))
+      [(#'fingerprint/fingerprint-table! (Table (data/id :venues)) [field])
+       (into {} (db/select-one [Field :fingerprint :fingerprint_version :last_analyzed] :id (u/get-id field)))])))
diff --git a/test/metabase/sync/field_values_test.clj b/test/metabase/sync/field_values_test.clj
index 96678cac862829148beadb27c783f3a38b63dd1e..6ae88c8b336ddf4d1577fe546af12d25782600a6 100644
--- a/test/metabase/sync/field_values_test.clj
+++ b/test/metabase/sync/field_values_test.clj
@@ -5,9 +5,11 @@
              [sync :as sync]
              [util :as u]]
             [metabase.models
+             [database :refer [Database]]
              [field :refer [Field]]
              [field-values :as field-values :refer [FieldValues]]
              [table :refer [Table]]]
+            [metabase.sync.util-test :as sut]
             [metabase.test.data :as data]
             [metabase.test.data.one-off-dbs :as one-off-dbs]
             [toucan.db :as db]))
@@ -19,22 +21,26 @@
 (expect
   {1 [1 2 3 4]
    2 nil
-   3 [1 2 3 4]}
+   3 {:errors 0, :created 1, :updated 5, :deleted 0}
+   4 [1 2 3 4]}
   (array-map
    ;; 1. Check that we have expected field values to start with
    1 (venues-price-field-values)
    ;; 2. Delete the Field values, make sure they're gone
    2 (do (db/delete! FieldValues :field_id (data/id :venues :price))
          (venues-price-field-values))
-   ;; 3. Now re-sync the table and make sure they're back
-   3 (do (sync/sync-table! (Table (data/id :venues)))
+   ;; 3. After the delete, a field values should be created, the rest updated
+   3 (sut/only-step-keys (sut/sync-database! "update-field-values" (Database (data/id))))
+   ;; 4. Now re-sync the table and make sure they're back
+   4 (do (sync/sync-table! (Table (data/id :venues)))
          (venues-price-field-values))))
 
 ;; Test that syncing will cause FieldValues to be updated
 (expect
   {1 [1 2 3 4]
    2 [1 2 3]
-   3 [1 2 3 4]}
+   3 {:errors 0, :created 0, :updated 6, :deleted 0}
+   4 [1 2 3 4]}
   (array-map
    ;; 1. Check that we have expected field values to start with
    1 (venues-price-field-values)
@@ -42,9 +48,10 @@
    2 (do (db/update! FieldValues (db/select-one-id FieldValues :field_id (data/id :venues :price))
            :values [1 2 3])
          (venues-price-field-values))
-   ;; 3. Now re-sync the table and make sure the value is back
-   3 (do (sync/sync-table! (Table (data/id :venues)))
-         (venues-price-field-values))))
+   ;; 3. Now re-sync the table and validate the field values updated
+   3 (sut/only-step-keys (sut/sync-database! "update-field-values" (Database (data/id))))
+   ;; 4. Make sure the value is back
+   4 (venues-price-field-values)))
 
 
 ;; A Field with 50 values should get marked as `auto-list` on initial sync, because it should be 'list', but was
diff --git a/test/metabase/sync/sync_metadata/fields_test.clj b/test/metabase/sync/sync_metadata/fields_test.clj
index 6cff1212d141c39fb8a3e74df16b5044e41c6dcc..c25c53170dd02d693a432d676f811c3a4db75751 100644
--- a/test/metabase/sync/sync_metadata/fields_test.clj
+++ b/test/metabase/sync/sync_metadata/fields_test.clj
@@ -11,6 +11,7 @@
              [database :refer [Database]]
              [field :refer [Field]]
              [table :refer [Table]]]
+            [metabase.sync.util-test :as sut]
             [metabase.test.data :as data]
             [metabase.test.data.one-off-dbs :as one-off-dbs]
             [toucan
@@ -150,23 +151,27 @@
   (db/select-one-field :fk_target_field_id Field, :id (data/id :venues :category_id)))
 
 ;; Check that sync-table! causes FKs to be set like we'd expect
-(expect [{:special_type :type/FK, :fk_target_field_id true}
+(expect [{:total-fks 3, :updated-fks 0, :total-failed 0}
+         {:special_type :type/FK, :fk_target_field_id true}
          {:special_type nil,      :fk_target_field_id false}
+         {:total-fks 3, :updated-fks 1, :total-failed 0}
          {:special_type :type/FK, :fk_target_field_id true}]
   (let [field-id (data/id :checkins :user_id)
         get-special-type-and-fk-exists? (fn []
                                           (into {} (-> (db/select-one [Field :special_type :fk_target_field_id],
                                                          :id field-id)
                                                        (update :fk_target_field_id #(db/exists? Field :id %)))))]
-    [ ;; FK should exist to start with
+    [
+     (sut/only-step-keys (sut/sync-database! "sync-fks" (Database (data/id))))
+     ;; FK should exist to start with
      (get-special-type-and-fk-exists?)
      ;; Clear out FK / special_type
      (do (db/update! Field field-id, :special_type nil, :fk_target_field_id nil)
          (get-special-type-and-fk-exists?))
+
      ;; Run sync-table and they should be set again
-     (let [table (Table (data/id :checkins))]
-       (sync/sync-table! table)
-       (get-special-type-and-fk-exists?))]))
+     (sut/only-step-keys (sut/sync-database! "sync-fks" (Database (data/id))))
+     (get-special-type-and-fk-exists?)]))
 
 
 ;;; +----------------------------------------------------------------------------------------------------------------+
@@ -182,14 +187,17 @@
       (update :max #(u/round-to-decimals 4 %))))
 
 (expect
-  [{:min -165.374 :max -73.9533}
+  [{:no-data-fingerprints 0,  :failed-fingerprints    0,
+    :updated-fingerprints 16, :fingerprints-attempted 16}
+   {:min -165.374 :max -73.9533}
    {:min 10.0646 :max 40.7794}]
   (tt/with-temp* [Database [database {:details (:details (Database (data/id))), :engine :h2}]
                   Table    [table    {:db_id (u/get-id database), :name "VENUES"}]]
-    (sync/sync-table! table)
-    (map narrow-to-min-max
-         [(db/select-one-field :fingerprint Field, :id (data/id :venues :longitude))
-          (db/select-one-field :fingerprint Field, :id (data/id :venues :latitude))])))
+    (cons
+     (sut/only-step-keys (sut/sync-database! "fingerprint-fields" database))
+     (map narrow-to-min-max
+          [(db/select-one-field :fingerprint Field, :id (data/id :venues :longitude))
+           (db/select-one-field :fingerprint Field, :id (data/id :venues :latitude))]))))
 
 
 ;;; +----------------------------------------------------------------------------------------------------------------+
diff --git a/test/metabase/sync/sync_metadata/sync_database_type_test.clj b/test/metabase/sync/sync_metadata/sync_database_type_test.clj
index 22621ec287f4eccd0986f40d0b341dcb16444694..9000f386cb2e42add729885066f47c55a7ee8cff 100644
--- a/test/metabase/sync/sync_metadata/sync_database_type_test.clj
+++ b/test/metabase/sync/sync_metadata/sync_database_type_test.clj
@@ -8,6 +8,7 @@
              [database :refer [Database]]
              [field :refer [Field]]
              [table :refer [Table]]]
+            [metabase.sync.util-test :as sut]
             [metabase.test.data :as data]
             metabase.test.util ; to make sure defaults for with-temp are registered
             [toucan.db :as db]
@@ -15,12 +16,13 @@
 
 ;; make sure that if a driver reports back a different database-type the Field gets updated accordingly
 (expect
-  #{{:name "NAME",        :database_type "VARCHAR"}
-    {:name "LATITUDE",    :database_type "DOUBLE"}
-    {:name "LONGITUDE",   :database_type "DOUBLE"}
-    {:name "ID",          :database_type "BIGINT"}
-    {:name "PRICE",       :database_type "INTEGER"}
-    {:name "CATEGORY_ID", :database_type "INTEGER"}}
+  [{:total-fields 16 :updated-fields 6}
+   #{{:name "NAME",        :database_type "VARCHAR"}
+     {:name "LATITUDE",    :database_type "DOUBLE"}
+     {:name "LONGITUDE",   :database_type "DOUBLE"}
+     {:name "ID",          :database_type "BIGINT"}
+     {:name "PRICE",       :database_type "INTEGER"}
+     {:name "CATEGORY_ID", :database_type "INTEGER"}}]
   ;; create a copy of the sample dataset :D
   (tt/with-temp Database [db (select-keys (data/db) [:details :engine])]
     (sync/sync-database! db)
@@ -30,28 +32,33 @@
       (db/update-where! Field {:table_id (u/get-id venues-table)}, :database_type "?")
       (db/update! Table (u/get-id venues-table) :fields_hash "something new")
       ;; now sync the DB again
-      (sync/sync-database! db)
-      ;; The database_type of these Fields should get set to the correct types. Let's see...
-      (set (map (partial into {})
-                (db/select [Field :name :database_type] :table_id (u/get-id venues-table)))))))
+      (let [after-update-step-info (sut/sync-database! "sync-fields" db)]
+        [(sut/only-step-keys after-update-step-info)
+         ;; The database_type of these Fields should get set to the correct types. Let's see...
+         (set (map (partial into {})
+                   (db/select [Field :name :database_type] :table_id (u/get-id venues-table))))]))))
 
 ;; make sure that if a driver reports back a different base-type the Field gets updated accordingly
 (expect
-  #{{:name "NAME",        :base_type :type/Text}
-    {:name "LATITUDE",    :base_type :type/Float}
-    {:name "PRICE",       :base_type :type/Integer}
-    {:name "ID",          :base_type :type/BigInteger}
-    {:name "LONGITUDE",   :base_type :type/Float}
-    {:name "CATEGORY_ID", :base_type :type/Integer}}
+  [{:updated-fields 16, :total-fields 16}
+   {:updated-fields 6, :total-fields 16}
+   #{{:name "NAME",        :base_type :type/Text}
+     {:name "LATITUDE",    :base_type :type/Float}
+     {:name "PRICE",       :base_type :type/Integer}
+     {:name "ID",          :base_type :type/BigInteger}
+     {:name "LONGITUDE",   :base_type :type/Float}
+     {:name "CATEGORY_ID", :base_type :type/Integer}}]
   ;; create a copy of the sample dataset :D
   (tt/with-temp Database [db (select-keys (data/db) [:details :engine])]
-    (sync/sync-database! db)
-    (let [venues-table (Table :db_id (u/get-id db), :display_name "Venues")]
+    (let [new-db-step-info (sut/sync-database! "sync-fields" db)
+          venues-table     (Table :db_id (u/get-id db), :display_name "Venues")]
       (db/update! Table (u/get-id venues-table) :fields_hash "something new")
       ;; ok, now give all the Fields `:type/*` as their `base_type`
       (db/update-where! Field {:table_id (u/get-id venues-table)}, :base_type "type/*")
       ;; now sync the DB again
-      (sync/sync-database! db)
-      ;; The database_type of these Fields should get set to the correct types. Let's see...
-      (set (map (partial into {})
-                (db/select [Field :name :base_type] :table_id (u/get-id venues-table)))))))
+      (let [after-update-step-info (sut/sync-database! "sync-fields" db)]
+        [(sut/only-step-keys new-db-step-info)
+         (sut/only-step-keys after-update-step-info)
+         ;; The database_type of these Fields should get set to the correct types. Let's see...
+         (set (map (partial into {})
+                   (db/select [Field :name :base_type] :table_id (u/get-id venues-table))))]))))
diff --git a/test/metabase/sync/sync_metadata/sync_timezone_test.clj b/test/metabase/sync/sync_metadata/sync_timezone_test.clj
index b374d79029231a7417a233637cbac5a847b8f68c..33268084d5a17a49a014857a58833c3fb9e0839d 100644
--- a/test/metabase/sync/sync_metadata/sync_timezone_test.clj
+++ b/test/metabase/sync/sync_metadata/sync_timezone_test.clj
@@ -2,6 +2,7 @@
   (:require [clj-time.core :as time]
             [metabase.models.database :refer [Database]]
             [metabase.sync.sync-metadata.sync-timezone :as sync-tz]
+            [metabase.sync.util-test :as sut]
             [metabase.test
              [data :as data]
              [util :as tu]]
@@ -16,16 +17,16 @@
 ;; sync happens automatically, so this test removes it first to ensure
 ;; that it gets set when missing
 (datasets/expect-with-engines #{:h2 :postgres}
-  [true true true]
+  [{:timezone-id "UTC"} true true true]
   (data/dataset test-data
     (let [db              (db/select-one Database [:name "test-data"])
           tz-on-load      (db-timezone db)
           _               (db/update! Database (:id db) :timezone nil)
           tz-after-update (db-timezone db)]
-      (sync-tz/sync-timezone! db)
 
-      ;; On startup is the timezone specified?
-      [(boolean (time/time-zone-for-id tz-on-load))
+      [(sut/only-step-keys (sut/sync-database! "sync-timezone" db))
+       ;; On startup is the timezone specified?
+       (boolean (time/time-zone-for-id tz-on-load))
        ;; Check to make sure the test removed the timezone
        (nil? tz-after-update)
        ;; Check that the value was set again after sync
diff --git a/test/metabase/sync/util_test.clj b/test/metabase/sync/util_test.clj
index 2b0cbbc5f25332366d74cd4f783752afb7537f44..2768b2971985dc1a6b94dcb4d6445c327aa7cae4 100644
--- a/test/metabase/sync/util_test.clj
+++ b/test/metabase/sync/util_test.clj
@@ -1,10 +1,14 @@
 (ns metabase.sync.util-test
   "Tests for the utility functions shared by all parts of sync, such as the duplicate ops guard."
-  (:require [expectations :refer :all]
+  (:require [clj-time.coerce :as tcoerce]
+            [expectations :refer :all]
             [metabase
              [driver :as driver]
              [sync :as sync]]
-            [metabase.models.database :refer [Database]]
+            [metabase.sync
+             [interface :as i]
+             [util :refer :all]]
+            [metabase.models.database :refer [Database] :as mdb]
             [toucan.util.test :as tt]))
 
 ;;; +----------------------------------------------------------------------------------------------------------------+
@@ -53,3 +57,57 @@
      (deref f2)
      ;; Check the number of syncs that took place. Should be 2 (just the first)
      @calls-to-describe-database)))
+
+(defn call-with-operation-info
+  "Call `f` with `log-sync-summary` redef'd to intercept the step metadata before the information is logged. This is
+  useful to validate that the metadata is correct as the message might not be logged at all (depending on the logging
+  level)."
+  [f]
+  (let [step-info-atom (atom [])
+        orig-fn (var-get #'metabase.sync.util/log-sync-summary)]
+    (with-redefs [metabase.sync.util/log-sync-summary (fn [operation database {:keys [steps] :as operation-metadata}]
+                                                        (swap! step-info-atom conj operation-metadata)
+                                                        (orig-fn operation database operation-metadata))]
+      (f))
+    @step-info-atom))
+
+(defn sync-database!
+  "Calls `sync-database!` and returns the the metadata for `step` as the result. This function is useful for
+  validating that each steps metadata correctly reflects the changes that were made via a test scenario."
+  [step db]
+  (let [operation-results (call-with-operation-info #(sync/sync-database! db))]
+    (-> (into {} (mapcat :steps operation-results))
+        (get step))))
+
+(defn only-step-keys
+  "This function removes the generic keys for the step metadata, returning only the step specific keypairs to make
+  validating the results for the given step easier."
+  [step-info]
+  (dissoc step-info :start-time :end-time :duration :log-summary-fn))
+
+(defn- date-string? [s]
+  (-> s tcoerce/from-string boolean))
+
+(defn- validate-times [m]
+  (and (-> m :start-time date-string?)
+       (-> m :end-time date-string?)
+       (-> m :duration string?)))
+
+(expect
+  [
+   ;; There should only be 1 operation info returned
+   true
+   ;; Validate that start/end/duration of the entire sync operation is included
+   true
+   ;; Each step should have a valid start/end/duration value
+   [true true]
+   ;; Each step name is included with the results, the order is preseverd
+   ["step1" "step2"]]
+  (let [sync-steps [(create-sync-step "step1" (fn [_] (Thread/sleep 10)))
+                    (create-sync-step "step2" (fn [_] (Thread/sleep 10)))]
+        mock-db    (mdb/map->DatabaseInstance {:name "test", :id  1, :engine :h2})
+        [results & none]    (call-with-operation-info #(run-sync-operation "sync" mock-db sync-steps))]
+    [(empty? none)
+     (validate-times results)
+     (map (comp validate-times second) (:steps results))
+     (map first (:steps results))]))