Skip to content
Snippets Groups Projects
Unverified Commit ca927548 authored by Cal Herries's avatar Cal Herries Committed by GitHub
Browse files

Rename unparallelizable serdes functions to have exclamation marks (#35539)

parent 109c5358
No related branches found
No related tags found
No related merge requests found
Showing
with 129 additions and 129 deletions
......@@ -213,7 +213,7 @@
(log/info (str "Loading Analytics Content from: plugins/instance_analytics"))
;; The EE token might not have :serialization enabled, but audit features should still be able to use it.
(let [report (log/with-no-logs
(serialization.cmd/v2-load-internal "plugins/instance_analytics"
(serialization.cmd/v2-load-internal! "plugins/instance_analytics"
{}
:token-check? false))]
(if (not-empty (:errors report))
......
......@@ -37,7 +37,7 @@
(throw (ex-info (tru "Invalid Collection ID(s). These Collections do not exist: {0}"
(pr-str (set/difference (set collection_ids) (set existing-collection-ids))))
{:status-code 404}))))
(serialization.cmd/v2-dump path {:collections collection_ids})
(serialization.cmd/v2-dump! path {:collections collection_ids})
;; TODO -- not 100% sure this response makes sense. We can change it later with something more meaningful maybe
{:status :ok})
......
......@@ -49,7 +49,7 @@
(defn- check-premium-token! []
(premium-features/assert-has-feature :serialization (trs "Serialization")))
(mu/defn v1-load
(mu/defn v1-load!
"Load serialized metabase instance as created by [[dump]] command from directory `path`."
[path context :- Context]
(plugins/load-plugins!)
......@@ -62,10 +62,10 @@
context)]
(try
(log/info (trs "BEGIN LOAD from {0} with context {1}" path context))
(let [all-res [(load/load (str path "/users") context)
(load/load (str path "/databases") context)
(load/load (str path "/collections") context)
(load/load-settings path context)]
(let [all-res [(load/load! (str path "/users") context)
(load/load! (str path "/databases") context)
(load/load! (str path "/collections") context)
(load/load-settings! path context)]
reload-fns (filter fn? all-res)]
(when (seq reload-fns)
(log/info (trs "Finished first pass of load; now performing second pass"))
......@@ -76,7 +76,7 @@
(log/error e (trs "ERROR LOAD from {0}: {1}" path (.getMessage e)))
(throw e)))))
(mu/defn v2-load-internal
(mu/defn v2-load-internal!
"SerDes v2 load entry point for internal users.
`opts` are passed to [[v2.load/load-metabase]]."
......@@ -94,15 +94,15 @@
; (log/warn (trs "Dump was produced using a different version of Metabase. Things may break!")))
(log/info (trs "Loading serialized Metabase files from {0}" path))
(serdes/with-cache
(v2.load/load-metabase (v2.ingest/ingest-yaml path) opts)))
(v2.load/load-metabase! (v2.ingest/ingest-yaml path) opts)))
(mu/defn v2-load
(mu/defn v2-load!
"SerDes v2 load entry point.
opts are passed to load-metabase"
[path :- :string
opts :- [:map [:abort-on-error {:optional true} [:maybe :boolean]]]]
(v2-load-internal path opts :token-check? true))
(v2-load-internal! path opts :token-check? true))
(defn- select-entities-in-collections
([model collections]
......@@ -156,7 +156,7 @@
(into base-collections))))))
(defn v1-dump
(defn v1-dump!
"Legacy Metabase app data dump"
[path {:keys [state user] :or {state :active} :as opts}]
(log/info (trs "BEGIN DUMP to {0} via user {1}" path user))
......@@ -183,7 +183,7 @@
(t2/select Metric :table_id [:in (map :id tables)] {:order-by [[:id :asc]]})
(t2/select Metric))
collections (select-collections users state)]
(dump/dump path
(dump/dump! path
databases
tables
(mapcat field/with-values (u/batches-of 32000 fields))
......@@ -195,11 +195,11 @@
(select-entities-in-collections Dashboard collections state)
(select-entities-in-collections Pulse collections state)
users))
(dump/dump-settings path)
(dump/dump-dimensions path)
(dump/dump-settings! path)
(dump/dump-dimensions! path)
(log/info (trs "END DUMP to {0} via user {1}" path user)))
(defn v2-dump
(defn v2-dump!
"Exports Metabase app data to directory at path"
[path {:keys [collection-ids] :as opts}]
(log/info (trs "Exporting Metabase to {0}" path) (u/emoji "🏭 🚛💨"))
......@@ -214,14 +214,14 @@
(log/info (trs "Export to {0} complete!" path) (u/emoji "🚛💨 📦"))
::v2-dump-complete)
(defn seed-entity-ids
(defn seed-entity-ids!
"Add entity IDs for instances of serializable models that don't already have them.
Returns truthy if all entity IDs were added successfully, or falsey if any errors were encountered."
[]
(v2.entity-ids/seed-entity-ids!))
(defn drop-entity-ids
(defn drop-entity-hds!
"Drop entity IDs for all instances of serializable models.
This is needed for some cases of migrating from v1 to v2 serdes. v1 doesn't dump `entity_id`, so they may have been
......
......@@ -56,7 +56,7 @@
(map? (first v))) (mapv #(serialization-deep-sort % (conj path k)) v)
:else v)]))))
(defn spit-yaml
(defn spit-yaml!
"Writes obj to filename and creates parent directories if necessary.
Writes (even nested) yaml keys in a deterministic fashion."
......@@ -71,7 +71,7 @@
(mi/instance-of? model instance))
[Pulse Dashboard Metric Segment Field User]))
(defn- spit-entity
(defn- spit-entity!
[path entity]
(let [filename (if (as-file? entity)
(format "%s%s.yaml" path (fully-qualified-name entity))
......@@ -80,35 +80,35 @@
(log/warn (str filename " is about to be overwritten."))
(log/debug (str "With object: " (pr-str entity))))
(spit-yaml filename (serialize/serialize entity))))
(spit-yaml! filename (serialize/serialize entity))))
(defn dump
(defn dump!
"Serialize entities into a directory structure of YAMLs at `path`."
[path & entities]
(doseq [entity (flatten entities)]
(try
(spit-entity path entity)
(spit-entity! path entity)
(catch Throwable e
(log/error e (trs "Error dumping {0}" (name-for-logging entity))))))
(spit-yaml (str path "/manifest.yaml")
(spit-yaml! (str path "/manifest.yaml")
{:serialization-version serialize/serialization-protocol-version
:metabase-version config/mb-version-info}))
(defn dump-settings
(defn dump-settings!
"Combine all settings into a map and dump it into YAML at `path`."
[path]
(spit-yaml (str path "/settings.yaml")
(spit-yaml! (str path "/settings.yaml")
(into {} (for [{:keys [key value]} (setting/admin-writable-site-wide-settings
:getter (partial setting/get-value-of-type :string))]
[key value]))))
(defn dump-dimensions
(defn dump-dimensions!
"Combine all dimensions into a vector and dump it into YAML at in the directory for the
corresponding schema starting at `path`."
[path]
(doseq [[table-id dimensions] (group-by (comp :table_id Field :field_id) (t2/select Dimension))
:let [table (t2/select-one Table :id table-id)]]
(spit-yaml (if (:schema table)
(spit-yaml! (if (:schema table)
(format "%s%s/schemas/%s/dimensions.yaml"
path
(->> table :db_id (fully-qualified-name Database))
......
......@@ -203,7 +203,7 @@
(format "at %s -> %s" (str/join "/" v) k))
(::unresolved-names model))))))
(defmulti load
(defmulti load!
"Load an entity of type `model` stored at `path` in the context `context`.
Passing in parent entities as context instead of decoding them from the path each time,
......@@ -212,7 +212,7 @@
(fn [path _context]
(terminal-dir path)))
(defn- load-dimensions
(defn- load-dimensions!
[path context]
(maybe-upsert-many! context Dimension
(for [dimension (yaml/from-file (str path "/dimensions.yaml"))]
......@@ -220,7 +220,7 @@
(update :human_readable_field_id (comp :field fully-qualified-name->context))
(update :field_id (comp :field fully-qualified-name->context))))))
(defmethod load "databases"
(defmethod load! "databases"
[path context]
(doseq [path (list-dirs path)]
;; If we failed to load the DB no use in trying to load its tables
......@@ -229,10 +229,10 @@
:let [context (merge context {:database db
:schema (when (not= inner-path path)
(terminal-dir path))})]]
(load (str inner-path "/tables") context)
(load-dimensions inner-path context)))))
(load! (str inner-path "/tables") context)
(load-dimensions! inner-path context)))))
(defmethod load "tables"
(defmethod load! "tables"
[path context]
(let [paths (list-dirs path)
table-ids (maybe-upsert-many! context Table
......@@ -242,19 +242,19 @@
(doseq [[path table-id] (map vector paths table-ids)
:when table-id]
(let [context (assoc context :table table-id)]
(load (str path "/fields") context)))
(load! (str path "/fields") context)))
;; ... then everything else so we don't have issues with cross-table referencess
(doseq [[path table-id] (map vector paths table-ids)
:when table-id]
(let [context (assoc context :table table-id)]
(load (str path "/fks") context)
(load (str path "/metrics") context)
(load (str path "/segments") context)))))
(load! (str path "/fks") context)
(load! (str path "/metrics") context)
(load! (str path "/segments") context)))))
(def ^:private fully-qualified-name->card-id
(comp :card fully-qualified-name->context))
(defn- load-fields
(defn- load-fields!
[path context]
(let [fields (slurp-dir path)
field-values (map :values fields)
......@@ -271,15 +271,15 @@
:when field-id]
(assoc field-value :field_id field-id)))))
(defmethod load "fields"
(defmethod load! "fields"
[path context]
(load-fields path context))
(load-fields! path context))
(defmethod load "fks"
(defmethod load! "fks"
[path context]
(load-fields path context))
(load-fields! path context))
(defmethod load "metrics"
(defmethod load! "metrics"
[path context]
(maybe-upsert-many! context Metric
(for [metric (slurp-dir path)]
......@@ -289,7 +289,7 @@
(assoc-in [:definition :source-table] (:table context))
(update :definition mbql-fully-qualified-names->ids)))))
(defmethod load "segments"
(defmethod load! "segments"
[path context]
(maybe-upsert-many! context Segment
(for [metric (slurp-dir path)]
......@@ -452,7 +452,7 @@
;; for a deprecated feature
(m/update-existing-in p [:values_source_config :card_id] fully-qualified-name->card-id)))
(defn load-dashboards
(defn load-dashboards!
"Loads `dashboards` (which is a sequence of maps parsed from a YAML dump of dashboards) in a given `context`."
{:added "0.40.0"}
[context dashboards]
......@@ -532,14 +532,14 @@
"Retrying dashboards for collection %s: %s"
(or (:collection context) "root")
(str/join ", " (map :name revisit-dashboards)))
(load-dashboards (assoc context :mode :update) revisit-dashboards)))))))
(load-dashboards! (assoc context :mode :update) revisit-dashboards)))))))
(defmethod load "dashboards"
(defmethod load! "dashboards"
[path context]
(binding [names/*suppress-log-name-lookup-exception* true]
(load-dashboards context (slurp-dir path))))
(load-dashboards! context (slurp-dir path))))
(defn- load-pulses [pulses context]
(defn- load-pulses! [pulses context]
(let [cards (map :cards pulses)
channels (map :channels pulses)
pulse-ids (maybe-upsert-many! context Pulse
......@@ -579,12 +579,12 @@
(fn []
(log/infof "Reloading pulses from collection %d" (:collection context))
(let [pulse-indexes (map ::pulse-index revisit)]
(load-pulses (map (partial nth pulses) pulse-indexes) (assoc context :mode :update))))))))
(load-pulses! (map (partial nth pulses) pulse-indexes) (assoc context :mode :update))))))))
(defmethod load "pulses"
(defmethod load! "pulses"
[path context]
(binding [names/*suppress-log-name-lookup-exception* true]
(load-pulses (slurp-dir path) context)))
(load-pulses! (slurp-dir path) context)))
(defn- resolve-source-query [query]
(if (:source-query query)
......@@ -654,7 +654,7 @@
:database (:database_id card)})
(dissoc ::unresolved-names)))
(defn load-cards
(defn load-cards!
"Loads cards in a given `context`, from a given sequence of `paths` (strings). If specified, then `only-cards` (maps
having the structure of cards loaded from YAML dumps) will be used instead of loading data from `paths` (to serve as
a retry mechanism)."
......@@ -690,12 +690,12 @@
(fn []
(log/infof "Attempting to reload cards in collection %d" (:collection context))
(let [revisit-indexes (::revisit-index grouped-cards)]
(load-cards (assoc context :mode :update) paths (mapv (partial nth cards) revisit-indexes))))))))
(load-cards! (assoc context :mode :update) paths (mapv (partial nth cards) revisit-indexes))))))))
(defmethod load "cards"
(defmethod load! "cards"
[path context]
(binding [names/*suppress-log-name-lookup-exception* true]
(load-cards context (list-dirs path) nil)))
(load-cards! context (list-dirs path) nil)))
(defn- pre-insert-user
"A function called on each User instance before it is inserted (via upsert)."
......@@ -725,7 +725,7 @@
(log/infof "Password reset email generated for user ID %d (%s)" user-id email)))
user-id)))
(defmethod load "users"
(defmethod load! "users"
[path context]
;; Currently we only serialize the new owner user, so it's fine to ignore mode setting
;; add :post-insert-fn post-insert-user back to start sending password reset emails
......@@ -747,7 +747,7 @@
(make-reload-fn (for [reload-fn new-fns]
(reload-fn))))))))
(defn- load-collections
(defn- load-collections!
[path context]
(let [subdirs (list-dirs path)
by-ns (group-by #(let [[_ coll-ns] (re-matches #".*/:([^:/]+)" %)]
......@@ -767,32 +767,32 @@
(maybe-upsert-many! context Collection)
first))]
(log/infof "Processing collection at path %s" path)
[(load (str path "/collections") context)
(load (str path "/cards") context)
(load (str path "/pulses") context)
(load (str path "/dashboards") context)
(load (str path "/snippets") context)]))
[(load! (str path "/collections") context)
(load! (str path "/cards") context)
(load! (str path "/pulses") context)
(load! (str path "/dashboards") context)
(load! (str path "/snippets") context)]))
load-ns-fns (for [[coll-ns [coll-ns-path]] ns-paths]
(do (log/infof "Loading %s namespace for collection at path %s" coll-ns coll-ns-path)
(load-collections coll-ns-path (assoc context :collection-namespace coll-ns))))]
(load-collections! coll-ns-path (assoc context :collection-namespace coll-ns))))]
(make-reload-fn (concat (apply concat results) ; these are each sequences, so need to flatten those first
load-ns-fns))))
(defmethod load "collections"
(defmethod load! "collections"
[path context]
(load-collections path context))
(load-collections! path context))
(defn- prepare-snippet [context snippet]
(assoc snippet :creator_id (default-user-id)
:collection_id (:collection context)))
(defmethod load "snippets"
(defmethod load! "snippets"
[path context]
(let [paths (list-dirs path)
snippets (map (partial prepare-snippet context) (slurp-many paths))]
(maybe-upsert-many! context NativeQuerySnippet snippets)))
(defn load-settings
(defn load-settings!
"Load a dump of settings."
[path context]
(doseq [[k v] (yaml/from-file (str path "/settings.yaml"))
......
......@@ -14,7 +14,7 @@
[toucan2.core :as t2]
[toucan2.model :as t2.model]))
(defn backfill-ids-for
(defn backfill-ids-for!
"Updates all rows of a particular model to have `:entity_id` set, based on the [[serdes/identity-hash]]."
[model]
(let [missing (t2/select model :entity_id nil)
......@@ -35,7 +35,7 @@
;; toucan2 models
(isa? model :hook/entity-id)))
(defn backfill-ids
(defn backfill-ids!
"Updates all rows of all models that are (a) serialized and (b) have `entity_id` columns to have the
`entity_id` set. If the `entity_id` is NULL, it is set based on the [[serdes/identity-hash]] for that
row."
......@@ -43,4 +43,4 @@
(doseq [model-name (concat serdes.models/exported-models serdes.models/inlined-models)
:let [model (t2.model/resolve-model (symbol model-name))]
:when (has-entity-id? model)]
(backfill-ids-for model)))
(backfill-ids-for! model)))
......@@ -192,7 +192,7 @@ Eg. if Dashboard B includes a Card A that is derived from a
(defn extract
"Returns a reducible stream of entities to serialize"
[{:keys [targets] :as opts}]
(serdes.backfill/backfill-ids)
(serdes.backfill/backfill-ids!)
(if (seq targets)
(extract-subtrees opts)
(extract-metabase opts)))
......@@ -9,9 +9,9 @@
[metabase.util.i18n :refer [trs]]
[metabase.util.log :as log]))
(declare load-one)
(declare load-one!)
(defn- load-deps
(defn- load-deps!
"Given a list of `deps` (hierarchies), [[load-one]] them all.
If [[load-one]] throws because it can't find that entity in the filesystem, check if it's already loaded in
our database."
......@@ -20,7 +20,7 @@
ctx
(letfn [(loader [ctx dep]
(try
(load-one ctx dep)
(load-one! ctx dep)
(catch Exception e
(if (and (= (:error (ex-data e)) ::not-found)
(serdes/load-find-local dep))
......@@ -30,7 +30,7 @@
(throw e)))))]
(reduce loader ctx deps))))
(defn- load-one
(defn- load-one!
"Loads a single entity, specified by its `:serdes/meta` abstract path, into the appdb, doing some bookkeeping to avoid
cycles.
......@@ -57,7 +57,7 @@
deps (serdes/dependencies ingested)
ctx (-> ctx
(update :expanding conj path)
(load-deps deps)
(load-deps! deps)
(update :seen conj path)
(update :expanding disj path))
;; Use the abstract path as attached by the ingestion process, not the original one we were passed.
......@@ -72,27 +72,27 @@
:deps-chain expanding}
e)))))))
(defn- try-load-one
(defn- try-load-one!
[ctx path]
(try
(load-one ctx path)
(load-one! ctx path)
(catch Exception e
(log/error (trs "Error importing {0}. Continuing..." (serdes/log-path-str path)))
(update ctx :errors conj e))))
(defn load-metabase
(defn load-metabase!
"Loads in a database export from an ingestion source, which is any Ingestable instance."
[ingestion & {:keys [abort-on-error] :or {abort-on-error true}}]
;; We proceed in the arbitrary order of ingest-list, deserializing all the files. Their declared dependencies guide
;; the import, and make sure all containers are imported before contents, etc.
(serdes.backfill/backfill-ids)
(serdes.backfill/backfill-ids!)
(let [contents (serdes.ingest/ingest-list ingestion)
ctx {:expanding #{}
:seen #{}
:ingestion ingestion
:from-ids (m/index-by :id contents)
:errors []}
result (reduce (if abort-on-error load-one try-load-one) ctx contents)]
result (reduce (if abort-on-error load-one! try-load-one!) ctx contents)]
(when-let [errors (seq (:errors result))]
(log/error (trs "Errors were encountered during import."))
(doseq [e errors]
......
(ns metabase-enterprise.serialization.v2.storage
(:require [clojure.java.io :as io]
[clojure.string :as str]
[metabase-enterprise.serialization.dump :refer [spit-yaml]]
[metabase-enterprise.serialization.dump :refer [spit-yaml!]]
[metabase.models.serialization :as serdes]
[metabase.util.i18n :refer [trs]]
[metabase.util.log :as log]))
......@@ -25,14 +25,14 @@
(defn- store-entity! [opts entity]
(log/info (trs "Storing {0}" (serdes/log-path-str (:serdes/meta entity))))
(spit-yaml (file opts entity) entity))
(spit-yaml! (file opts entity) entity))
(defn- store-settings! [{:keys [root-dir]} settings]
(when (seq settings)
(let [as-map (into (sorted-map)
(for [{:keys [key value]} settings]
[key value]))]
(spit-yaml (io/file root-dir "settings.yaml") as-map))))
(spit-yaml! (io/file root-dir "settings.yaml") as-map))))
(defn store!
"Helper for storing a serialized database to a tree of YAML files."
......
......@@ -4,7 +4,7 @@
[clojure.data :as data]
[clojure.java.io :as io]
[clojure.test :refer [deftest is testing use-fixtures]]
[metabase-enterprise.serialization.cmd :refer [v1-dump v1-load]]
[metabase-enterprise.serialization.cmd :refer [v1-dump! v1-load!]]
[metabase-enterprise.serialization.load :as load]
[metabase-enterprise.serialization.test-util :as ts]
[metabase.models
......@@ -327,7 +327,7 @@
:bigquery-cloud-sdk))
(premium-features-test/with-premium-features #{:serialization}
(let [fingerprint (ts/with-world
(v1-dump dump-dir {:user (:email (test.users/fetch-user :crowberto))
(v1-dump! dump-dir {:user (:email (test.users/fetch-user :crowberto))
:only-db-ids #{db-id}})
{:query-results (gather-orig-results [card-id
card-arch-id
......@@ -407,7 +407,7 @@
[Card (t2/select-one Card :id card-join-card-id)]
[Card (t2/select-one Card :id card-id-pivot-table)]]})]
(with-world-cleanup
(v1-load dump-dir {:on-error :continue :mode :skip})
(v1-load! dump-dir {:on-error :continue :mode :skip})
(mt/with-db (t2/select-one Database :name ts/temp-db-name)
(doseq [[model entity] (:entities fingerprint)]
(testing (format "%s \"%s\"" (type model) (:name entity))
......
......@@ -28,7 +28,7 @@
(is (every? nil? (all-eids))))
(testing "backfill now recreates them"
(serdes.backfill/backfill-ids-for Collection)
(serdes.backfill/backfill-ids-for! Collection)
(is (every? some? (all-eids))))))))
(deftest no-overwrite-test
......@@ -41,7 +41,7 @@
(t2/select-fn-set :entity_id Collection))))
(testing "backfill"
(serdes.backfill/backfill-ids-for Collection)
(serdes.backfill/backfill-ids-for! Collection)
(testing "sets a blank entity_id"
(is (some? (t2/select-one-fn :entity_id Collection :id c2-id))))
(testing "does not change the original entity_id"
......@@ -57,11 +57,11 @@
(t2/select-fn-set :entity_id Collection))))
(testing "backfilling twice"
(serdes.backfill/backfill-ids-for Collection)
(serdes.backfill/backfill-ids-for! Collection)
(let [first-eid (t2/select-one-fn :entity_id Collection :id c2-id)]
(t2/update! Collection c2-id {:entity_id nil})
(is (= #{c1-eid nil}
(t2/select-fn-set :entity_id Collection)))
(serdes.backfill/backfill-ids-for Collection)
(serdes.backfill/backfill-ids-for! Collection)
(testing "produces the same entity_id both times"
(is (= first-eid (t2/select-one-fn :entity_id Collection :id c2-id)))))))))
......@@ -318,7 +318,7 @@
(set (ingest/ingest-list (ingest/ingest-yaml dump-dir)))))))
(testing "doing ingestion"
(is (serdes/with-cache (serdes.load/load-metabase (ingest/ingest-yaml dump-dir)))
(is (serdes/with-cache (serdes.load/load-metabase! (ingest/ingest-yaml dump-dir)))
"successful"))
(testing "for Actions"
......@@ -494,7 +494,7 @@
(ts/with-dest-db
;; ingest
(testing "doing ingestion"
(is (serdes/with-cache (serdes.load/load-metabase (ingest/ingest-yaml dump-dir)))
(is (serdes/with-cache (serdes.load/load-metabase! (ingest/ingest-yaml dump-dir)))
"successful"))
(let [dash1d (t2/select-one Dashboard :name (:name dash1s))
......@@ -602,7 +602,7 @@
;; ingest
(ts/with-dest-db
(testing "doing ingestion"
(is (serdes/with-cache (serdes.load/load-metabase (ingest/ingest-yaml dump-dir)))
(is (serdes/with-cache (serdes.load/load-metabase! (ingest/ingest-yaml dump-dir)))
"successful"))
(doseq [[name model]
......@@ -667,7 +667,7 @@
(ts/with-dest-db
;; ingest
(testing "doing ingestion"
(is (serdes/with-cache (serdes.load/load-metabase (ingest/ingest-yaml dump-dir)))
(is (serdes/with-cache (serdes.load/load-metabase! (ingest/ingest-yaml dump-dir)))
"successful"))
(let [new-dashboard (-> (t2/select-one Dashboard :entity_id dashboard-eid)
(t2/hydrate :tabs :dashcards))
......@@ -708,13 +708,13 @@
;; preparation
(t2.with-temp/with-temp [Dashboard _ {:name "some dashboard"}]
(testing "export (v2-dump) command"
(is (cmd/v2-dump dump-dir {})
(is (cmd/v2-dump! dump-dir {})
"works"))
(testing "import (v2-load) command"
(ts/with-dest-db
(testing "doing ingestion"
(is (cmd/v2-load dump-dir {})
(is (cmd/v2-load! dump-dir {})
"works"))))))))))
(testing "without :serialization feature enabled"
......@@ -726,12 +726,12 @@
(t2.with-temp/with-temp [Dashboard _ {:name "some dashboard"}]
(testing "export (v2-dump) command"
(is (thrown-with-msg? Exception #"Please upgrade"
(cmd/v2-dump dump-dir {}))
(cmd/v2-dump! dump-dir {}))
"throws"))
(testing "import (v2-load) command"
(ts/with-dest-db
(testing "doing ingestion"
(is (thrown-with-msg? Exception #"Please upgrade"
(cmd/v2-load dump-dir {}))
(cmd/v2-load! dump-dir {}))
"throws")))))))))))
......@@ -63,7 +63,7 @@
(testing "loading into an empty database succeeds"
(ts/with-dest-db
(serdes.load/load-metabase (ingestion-in-memory @serialized))
(serdes.load/load-metabase! (ingestion-in-memory @serialized))
(let [colls (t2/select Collection)]
(is (= 1 (count colls)))
(is (= "Basic Collection" (:name (first colls))))
......@@ -71,7 +71,7 @@
(testing "loading again into the same database does not duplicate"
(ts/with-dest-db
(serdes.load/load-metabase (ingestion-in-memory @serialized))
(serdes.load/load-metabase! (ingestion-in-memory @serialized))
(let [colls (t2/select Collection)]
(is (= 1 (count colls)))
(is (= "Basic Collection" (:name (first colls))))
......@@ -99,7 +99,7 @@
(ts/with-dest-db
(ts/create! Collection :name "Unrelated Collection")
(ts/create! Collection :name "Parent Collection" :location "/" :entity_id (:entity_id @parent))
(serdes.load/load-metabase (ingestion-in-memory @serialized))
(serdes.load/load-metabase! (ingestion-in-memory @serialized))
(let [parent-dest (t2/select-one Collection :entity_id (:entity_id @parent))
child-dest (t2/select-one Collection :entity_id (:entity_id @child))
grandchild-dest (t2/select-one Collection :entity_id (:entity_id @grandchild))]
......@@ -158,7 +158,7 @@
(testing "deserialization works properly, keeping the same-named tables apart"
(ts/with-dest-db
(serdes.load/load-metabase (ingestion-in-memory @serialized))
(serdes.load/load-metabase! (ingestion-in-memory @serialized))
(reset! db1d (t2/select-one Database :name (:name @db1s)))
(reset! db2d (t2/select-one Database :name (:name @db2s)))
......@@ -235,7 +235,7 @@
(reset! user1d (ts/create! User :first_name "Tom" :last_name "Scholz" :email "tom@bost.on"))
;; Load the serialized content.
(serdes.load/load-metabase (ingestion-in-memory @serialized))
(serdes.load/load-metabase! (ingestion-in-memory @serialized))
;; Fetch the relevant bits
(reset! db1d (t2/select-one Database :name "my-db"))
......@@ -314,7 +314,7 @@
(reset! user1d (ts/create! User :first_name "Tom" :last_name "Scholz" :email "tom@bost.on"))
;; Load the serialized content.
(serdes.load/load-metabase (ingestion-in-memory @serialized))
(serdes.load/load-metabase! (ingestion-in-memory @serialized))
;; Fetch the relevant bits
(reset! db1d (t2/select-one Database :name "my-db"))
......@@ -389,7 +389,7 @@
(reset! user1d (ts/create! User :first_name "Tom" :last_name "Scholz" :email "tom@bost.on"))
;; Load the serialized content.
(serdes.load/load-metabase (ingestion-in-memory @serialized))
(serdes.load/load-metabase! (ingestion-in-memory @serialized))
;; Fetch the relevant bits
(reset! db1d (t2/select-one Database :name "my-db"))
......@@ -542,7 +542,7 @@
(reset! user1d (ts/create! User :first_name "Tom" :last_name "Scholz" :email "tom@bost.on"))
;; Load the serialized content.
(serdes.load/load-metabase (ingestion-in-memory @serialized))
(serdes.load/load-metabase! (ingestion-in-memory @serialized))
;; Fetch the relevant bits
(reset! db1d (t2/select-one Database :name "my-db"))
......@@ -662,7 +662,7 @@
:creator_id (:id @user1s) :timezone "America/New_York")
;; Load the serialized content.
(serdes.load/load-metabase (ingestion-in-memory @serialized))
(serdes.load/load-metabase! (ingestion-in-memory @serialized))
;; Fetch the relevant bits
(reset! timeline2d (t2/select-one Timeline :entity_id (:entity_id @timeline2s)))
......@@ -734,7 +734,7 @@
(reset! user1d (ts/create! User :first_name "Tom" :last_name "Scholz" :email "tom@bost.on"))
;; Load the serialized content.
(serdes.load/load-metabase (ingestion-in-memory @serialized))
(serdes.load/load-metabase! (ingestion-in-memory @serialized))
;; Fetch the relevant bits
(reset! metric1d (t2/select-one Metric :name "Large Users"))
......@@ -834,7 +834,7 @@
(reset! fv1d (ts/create! FieldValues :field_id (:id @field1d) :values ["WA" "NC" "NM" "WI"]))
;; Load the serialized content.
(serdes.load/load-metabase (ingestion-in-memory @serialized))
(serdes.load/load-metabase! (ingestion-in-memory @serialized))
;; Fetch the relevant bits
(reset! fv1d (t2/select-one FieldValues :field_id (:id @field1d)))
......@@ -883,7 +883,7 @@
:name "Some card"
:table_id ["my-db" nil "CUSTOMERS"]
:visualization_settings {}}])]
(is (some? (serdes.load/load-metabase ingestion)))))
(is (some? (serdes.load/load-metabase! ingestion)))))
(testing "depending on nonexisting values fails"
(let [ingestion (ingestion-in-memory [{:serdes/meta [{:model "Card" :id "0123456789abcdef_0123"}]
......@@ -900,7 +900,7 @@
:visualization_settings {}}])]
(is (thrown-with-msg? clojure.lang.ExceptionInfo
#"Failed to read file"
(serdes.load/load-metabase ingestion)))))))))
(serdes.load/load-metabase! ingestion)))))))))
(deftest card-with-snippet-test
(let [db1s (atom nil)
......@@ -933,7 +933,7 @@
(testing "when loading"
(let [new-eid (u/generate-nano-id)
ingestion (ingestion-in-memory [(assoc @extracted :entity_id new-eid)])]
(is (some? (serdes.load/load-metabase ingestion)))
(is (some? (serdes.load/load-metabase! ingestion)))
(is (= (:id @snippet1s)
(-> (t2/select-one Card :entity_id new-eid)
:dataset_query
......@@ -972,7 +972,7 @@
(is (string? (:type action-serialized))))))))
(testing "loading succeeds"
(ts/with-dest-db
(serdes.load/load-metabase (ingestion-in-memory @serialized))
(serdes.load/load-metabase! (ingestion-in-memory @serialized))
(let [action (t2/select-one Action :entity_id eid)]
(is (some? action))
(testing ":type should be a keyword again"
......
......@@ -183,13 +183,13 @@
:validate [#{:continue :abort} "Must be 'continue' or 'abort'"]]]}
[path & options]
(log/warn (u/colorize :red (trs "''load'' is deprecated and will be removed in a future release. Please migrate to ''import''.")))
(call-enterprise 'metabase-enterprise.serialization.cmd/v1-load path (get-parsed-options #'load options)))
(call-enterprise 'metabase-enterprise.serialization.cmd/v1-load! path (get-parsed-options #'load options)))
(defn ^:command import
{:doc "Load serialized Metabase instance as created by the [[export]] command from directory `path`."
:arg-spec [["-e" "--abort-on-error" "Stops import on any errors, default is to continue."]]}
[path & options]
(call-enterprise 'metabase-enterprise.serialization.cmd/v2-load path (get-parsed-options #'import options)))
(call-enterprise 'metabase-enterprise.serialization.cmd/v2-load! path (get-parsed-options #'import options)))
(defn ^:command dump
{:doc "Note: this command is deprecated. Use `export` instead.
......@@ -202,7 +202,7 @@
:validate [#{:active :all} "Must be 'active' or 'all'"]]]}
[path & options]
(log/warn (u/colorize :red (trs "''dump'' is deprecated and will be removed in a future release. Please migrate to ''export''.")))
(call-enterprise 'metabase-enterprise.serialization.cmd/v1-dump path (get-parsed-options #'dump options)))
(call-enterprise 'metabase-enterprise.serialization.cmd/v1-dump! path (get-parsed-options #'dump options)))
(defn ^:command export
{:doc "Serialize Metabase instance into directory at `path`."
......@@ -215,19 +215,19 @@
["-f" "--include-field-values" "Include field values along with field metadata."]
["-s" "--include-database-secrets" "Include database connection details (in plain text; use caution)."]]}
[path & options]
(call-enterprise 'metabase-enterprise.serialization.cmd/v2-dump path (get-parsed-options #'export options)))
(call-enterprise 'metabase-enterprise.serialization.cmd/v2-dump! path (get-parsed-options #'export options)))
(defn ^:command seed-entity-ids
"Add entity IDs for instances of serializable models that don't already have them."
[]
(when-not (call-enterprise 'metabase-enterprise.serialization.cmd/seed-entity-ids)
(when-not (call-enterprise 'metabase-enterprise.serialization.cmd/seed-entity-ids!)
(throw (Exception. "Error encountered while seeding entity IDs"))))
(defn ^:command drop-entity-ids
"Drop entity IDs for instances of serializable models. Useful for migrating from v1 serialization (x.46 and earlier)
to v2 (x.47+)."
[]
(when-not (call-enterprise 'metabase-enterprise.serialization.cmd/drop-entity-ids)
(when-not (call-enterprise 'metabase-enterprise.serialization.cmd/drop-entity-ids!)
(throw (Exception. "Error encountered while dropping entity IDs"))))
(defn ^:command rotate-encryption-key
......
......@@ -19,36 +19,36 @@
(do-with-captured-call-enterprise-calls!
(fn []
(testing "with no options"
(is (= '(metabase-enterprise.serialization.cmd/v1-load "/path/" {:mode :skip, :on-error :continue})
(is (= '(metabase-enterprise.serialization.cmd/v1-load! "/path/" {:mode :skip, :on-error :continue})
(cmd/load "/path/"))))
(testing "with options"
(is (= '(metabase-enterprise.serialization.cmd/v1-load "/path/" {:mode :skip, :on-error :abort})
(is (= '(metabase-enterprise.serialization.cmd/v1-load! "/path/" {:mode :skip, :on-error :abort})
(cmd/load "/path/" "--on-error" "abort")))))))
(deftest import-command-test
(do-with-captured-call-enterprise-calls!
(fn []
(testing "with no options"
(is (= '(metabase-enterprise.serialization.cmd/v2-load "/path/" {})
(is (= '(metabase-enterprise.serialization.cmd/v2-load! "/path/" {})
(cmd/import "/path/"))))
(testing "with options"
(is (= '(metabase-enterprise.serialization.cmd/v2-load "/path/" {:abort-on-error true})
(is (= '(metabase-enterprise.serialization.cmd/v2-load! "/path/" {:abort-on-error true})
(cmd/import "/path/" "--abort-on-error")))))))
(deftest dump-command-test
(do-with-captured-call-enterprise-calls!
(fn []
(testing "with no options"
(is (= '(metabase-enterprise.serialization.cmd/v1-dump "/path/" {:state :all})
(is (= '(metabase-enterprise.serialization.cmd/v1-dump! "/path/" {:state :all})
(cmd/dump "/path/"))))
(testing "with options"
(is (= '(metabase-enterprise.serialization.cmd/v1-dump "/path/" {:state :active})
(is (= '(metabase-enterprise.serialization.cmd/v1-dump! "/path/" {:state :active})
(cmd/dump "/path/" "--state" "active")))))))
(deftest export-command-arg-parsing-test
(do-with-captured-call-enterprise-calls!
(fn []
(are [cmd-args v2-dump-args] (= '(metabase-enterprise.serialization.cmd/v2-dump "/path/" v2-dump-args)
(are [cmd-args v2-dump-args] (= '(metabase-enterprise.serialization.cmd/v2-dump! "/path/" v2-dump-args)
(apply cmd/export "/path/" cmd-args))
nil
{}
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment