Skip to content
Snippets Groups Projects
Commit a33f3ddd authored by Cam Saül's avatar Cam Saül
Browse files

Test fixes & cleanup [ci drivers] :wrench: :shower:

parent 791dfdf9
No related branches found
No related tags found
No related merge requests found
Showing with 112 additions and 197 deletions
...@@ -43,3 +43,4 @@ bin/release/aws-eb/metabase-aws-eb.zip ...@@ -43,3 +43,4 @@ bin/release/aws-eb/metabase-aws-eb.zip
/plugins /plugins
/build.xml /build.xml
/test-report-* /test-report-*
/crate-*
...@@ -6,68 +6,6 @@ ...@@ -6,68 +6,6 @@
(metabase.test.data [datasets :refer [expect-with-engine]] (metabase.test.data [datasets :refer [expect-with-engine]]
[interface :refer [def-database-definition]]))) [interface :refer [def-database-definition]])))
;; Make sure that paging works correctly for the bigquery driver when fetching a list of tables
;; Default page size is 50 so if we have more than that number of tables make sure they all show up
(def-database-definition ^:private fifty-one-different-tables
["birds_1" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_2" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_3" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_4" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_5" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_6" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_7" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_8" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_9" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_10" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_11" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_12" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_13" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_14" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_15" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_16" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_17" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_18" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_19" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_20" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_21" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_22" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_23" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_24" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_25" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_26" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_27" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_28" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_29" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_30" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_31" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_32" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_33" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_34" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_35" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_36" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_37" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_38" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_39" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_40" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_41" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_42" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_43" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_44" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_45" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_46" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_47" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_48" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_49" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_50" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]]
["birds_51" [{:field-name "name", :base-type :type/Text}] [["Rasta"] ["Lucky"]]])
;; only run this test 1 out of every 5 times since it takes like 5-10 minutes just to sync the DB and we don't have all day
(when (> (rand) 0.80)
(expect-with-engine :bigquery
51
(data/with-temp-db [db fifty-one-different-tables]
(count (database/tables db)))))
;; Test native queries ;; Test native queries
(expect-with-engine :bigquery (expect-with-engine :bigquery
......
...@@ -5,11 +5,21 @@ ...@@ -5,11 +5,21 @@
(:require [clojure.set :as set] (:require [clojure.set :as set]
[expectations :refer :all] [expectations :refer :all]
[metabase.driver :as driver] [metabase.driver :as driver]
#_[metabase.query-processor :refer :all]
[metabase.test.data :as data] [metabase.test.data :as data]
[metabase.test.data.datasets :as datasets] [metabase.test.data.datasets :as datasets]
metabase.test.data.interface
[metabase.util :as u])) [metabase.util :as u]))
;; make sure all the driver test extension namespaces are loaded <3
;; if this isn't done some things will get loaded at the wrong time which can end up causing test databases to be created more than once, which fails
(doseq [engine (keys (driver/available-drivers))]
(let [test-ns (symbol (str "metabase.test.data." (name engine)))]
(try
(require test-ns)
(catch Throwable e
(println (format "Error loading %s: %s" test-ns (.getMessage e)))))))
;;; ------------------------------------------------------------ Helper Fns + Macros ------------------------------------------------------------ ;;; ------------------------------------------------------------ Helper Fns + Macros ------------------------------------------------------------
;; Event-Based DBs aren't tested here, but in `event-query-processor-test` instead. ;; Event-Based DBs aren't tested here, but in `event-query-processor-test` instead.
......
...@@ -166,95 +166,68 @@ ...@@ -166,95 +166,68 @@
;; ## Loading / Deleting Test Datasets ;; ## Loading / Deleting Test Datasets
(defn- add-extra-metadata!
"Add extra metadata like Field base-type, etc."
[database-definition db]
(doseq [^TableDefinition table-definition (:table-definitions database-definition)]
(let [table-name (:table-name table-definition)
table (delay (or (i/metabase-instance table-definition db)
(throw (Exception. (format "Table '%s' not loaded from definiton:\n%s\nFound:\n%s"
table-name
(u/pprint-to-str (dissoc table-definition :rows))
(u/pprint-to-str (db/select [Table :schema :name], :db_id (:id db))))))))]
(doseq [{:keys [field-name visibility-type special-type], :as field-definition} (:field-definitions table-definition)]
(let [field (delay (or (i/metabase-instance field-definition @table)
(throw (Exception. (format "Field '%s' not loaded from definition:\n"
field-name
(u/pprint-to-str field-definition))))))]
(when visibility-type
(log/debug (format "SET VISIBILITY TYPE %s.%s -> %s" table-name field-name visibility-type))
(db/update! Field (:id @field) :visibility_type (name visibility-type)))
(when special-type
(log/debug (format "SET SPECIAL TYPE %s.%s -> %s" table-name field-name special-type))
(db/update! Field (:id @field) :special_type (u/keyword->qualified-name special-type))))))))
(defn- create-database! [{:keys [database-name], :as database-definition} engine driver]
;; Create the database
(i/create-db! driver database-definition)
;; Add DB object to Metabase DB
(u/prog1 (db/insert! Database
:name database-name
:engine (name engine)
:details (i/database->connection-details driver :db database-definition))
;; sync newly added DB
(sync-database/sync-database! <>)
;; add extra metadata for fields
(add-extra-metadata! database-definition <>)))
(defn get-or-create-database! (defn get-or-create-database!
"Create DBMS database associated with DATABASE-DEFINITION, create corresponding Metabase `Databases`/`Tables`/`Fields`, and sync the `Database`. "Create DBMS database associated with DATABASE-DEFINITION, create corresponding Metabase `Databases`/`Tables`/`Fields`, and sync the `Database`.
DATASET-LOADER should be an object that implements `IDatasetLoader`; it defaults to the value returned by the method `dataset-loader` for the DRIVER should be an object that implements `IDatasetLoader`; it defaults to the value returned by the method `driver` for the
current dataset (`*driver*`), which is H2 by default." current dataset (`*driver*`), which is H2 by default."
([^DatabaseDefinition database-definition] ([database-definition]
(get-or-create-database! *driver* database-definition)) (get-or-create-database! *driver* database-definition))
([dataset-loader {:keys [database-name], :as ^DatabaseDefinition database-definition}] ([driver database-definition]
(let [engine (i/engine dataset-loader)] (let [engine (i/engine driver)]
(or (i/metabase-instance database-definition engine) (or (i/metabase-instance database-definition engine)
(do (create-database! database-definition engine driver)))))
;; Create the database
(i/create-db! dataset-loader database-definition)
;; Add DB object to Metabase DB
(let [db (db/insert! Database
:name database-name
:engine (name engine)
:details (i/database->connection-details dataset-loader :db database-definition))]
;; Sync the database
(sync-database/sync-database! db)
;; Add extra metadata like Field base-type, etc.
(doseq [^TableDefinition table-definition (:table-definitions database-definition)]
(let [table-name (:table-name table-definition)
table (delay (or (i/metabase-instance table-definition db)
(throw (Exception. (format "Table '%s' not loaded from definiton:\n%s\nFound:\n%s"
table-name
(u/pprint-to-str (dissoc table-definition :rows))
(u/pprint-to-str (db/select [Table :schema :name], :db_id (:id db))))))))]
(doseq [{:keys [field-name visibility-type special-type], :as field-definition} (:field-definitions table-definition)]
(let [field (delay (or (i/metabase-instance field-definition @table)
(throw (Exception. (format "Field '%s' not loaded from definition:\n"
field-name
(u/pprint-to-str field-definition))))))]
(when visibility-type
(log/debug (format "SET VISIBILITY TYPE %s.%s -> %s" table-name field-name visibility-type))
(db/update! Field (:id @field) :visibility_type (name visibility-type)))
(when special-type
(log/debug (format "SET SPECIAL TYPE %s.%s -> %s" table-name field-name special-type))
(db/update! Field (:id @field) :special_type (u/keyword->qualified-name special-type)))))))
db))))))
(defn remove-database!
"Delete Metabase `Database`, `Fields` and `Tables` associated with DATABASE-DEFINITION, then remove the physical database from the associated DBMS.
DATASET-LOADER should be an object that implements `IDatasetLoader`; by default it is the value returned by the method `dataset-loader` for the
current dataset, bound to `*driver*`."
([^DatabaseDefinition database-definition]
(remove-database! *driver* database-definition))
([dataset-loader ^DatabaseDefinition database-definition]
;; Delete the Metabase Database and associated objects
(db/cascade-delete! Database :id (:id (i/metabase-instance database-definition (i/engine dataset-loader))))
;; now delete the DBMS database
(i/destroy-db! dataset-loader database-definition)))
(def ^:private loader->loaded-db-def
(atom #{}))
(defn destroy-loaded-temp-dbs!
"Destroy all temporary databases created by `with-temp-db`."
{:expectations-options :after-run}
[]
(binding [db/*disable-db-logging* true]
(doseq [[loader dbdef] @loader->loaded-db-def]
(try
(remove-database! loader dbdef)
(catch Throwable e
(println "Error destroying database:" e)))))
(reset! loader->loaded-db-def #{}))
(defn do-with-temp-db (defn do-with-temp-db
"Execute F with DBDEF loaded as the current dataset. F takes a single argument, the `DatabaseInstance` that was loaded and synced from DBDEF." "Execute F with DBDEF loaded as the current dataset. F takes a single argument, the `DatabaseInstance` that was loaded and synced from DBDEF."
[^DatabaseDefinition dbdef, f] [^DatabaseDefinition dbdef, f]
(let [loader *driver* (let [driver *driver*
dbdef (i/map->DatabaseDefinition dbdef)] dbdef (i/map->DatabaseDefinition dbdef)]
(swap! loader->loaded-db-def conj [loader dbdef])
(binding [db/*disable-db-logging* true] (binding [db/*disable-db-logging* true]
(let [db (get-or-create-database! loader dbdef)] (let [db (get-or-create-database! driver dbdef)]
(assert db) (assert db)
(assert (db/exists? Database :id (:id db))) (assert (db/exists? Database :id (u/get-id db)))
(with-db db (with-db db
(f db)))))) (f db))))))
(defmacro with-temp-db (defmacro with-temp-db
"Load and sync DATABASE-DEFINITION with DATASET-LOADER and execute BODY with the newly created `Database` bound to DB-BINDING, "Load and sync DATABASE-DEFINITION with DRIVER and execute BODY with the newly created `Database` bound to DB-BINDING,
and make it the current database for `metabase.test.data` functions like `id`. and make it the current database for `metabase.test.data` functions like `id`.
(with-temp-db [db tupac-sightings] (with-temp-db [db tupac-sightings]
...@@ -264,9 +237,8 @@ ...@@ -264,9 +237,8 @@
:aggregation [\"count\"] :aggregation [\"count\"]
:filter [\"<\" (:id &events.timestamp) \"1765-01-01\"]}})) :filter [\"<\" (:id &events.timestamp) \"1765-01-01\"]}}))
A given Database is only created once per run of the test suite, and is automatically destroyed at the conclusion of the suite. A given Database is only created once per run of the test suite, and is automatically destroyed at the conclusion of the suite."
(The created Database is added to `loader->loaded-db-def`, which can be destroyed with `destroy-loaded-temp-dbs!`, which is automatically ran at the end of the test suite.)" [[db-binding, ^DatabaseDefinition database-definition] & body]
[[db-binding ^DatabaseDefinition database-definition] & body]
`(do-with-temp-db ~database-definition `(do-with-temp-db ~database-definition
(fn [~db-binding] (fn [~db-binding]
~@body))) ~@body)))
......
...@@ -205,5 +205,4 @@ ...@@ -205,5 +205,4 @@
(merge i/IDatasetLoaderDefaultsMixin (merge i/IDatasetLoaderDefaultsMixin
{:engine (constantly :bigquery) {:engine (constantly :bigquery)
:database->connection-details (u/drop-first-arg database->connection-details) :database->connection-details (u/drop-first-arg database->connection-details)
:create-db! (u/drop-first-arg create-db!) :create-db! (u/drop-first-arg create-db!)}))
:destroy-db! (constantly nil)}))
...@@ -20,7 +20,7 @@ ...@@ -20,7 +20,7 @@
:type/Time "timestamp"}) :type/Time "timestamp"})
(defn- timestamp->CrateDateTime (defn- timestamp->crate-datetime
[value] [value]
(cond (cond
(instance? java.sql.Timestamp value) (.getTime ^java.sql.Timestamp value) (instance? java.sql.Timestamp value) (.getTime ^java.sql.Timestamp value)
...@@ -34,7 +34,7 @@ ...@@ -34,7 +34,7 @@
(if (sequential? row-or-rows) (if (sequential? row-or-rows)
(map escape-field-names row-or-rows) (map escape-field-names row-or-rows)
(into {} (for [[k v] row-or-rows] (into {} (for [[k v] row-or-rows]
{(sql/escape-field-name k) (timestamp->CrateDateTime v)})))) {(sql/escape-field-name k) (timestamp->crate-datetime v)}))))
(defn- make-load-data-fn (defn- make-load-data-fn
"Create a `load-data!` function. This creates a function to actually insert a row or rows, wraps it with any WRAP-INSERT-FNS, "Create a `load-data!` function. This creates a function to actually insert a row or rows, wraps it with any WRAP-INSERT-FNS,
...@@ -44,7 +44,7 @@ ...@@ -44,7 +44,7 @@
(let [insert! ((apply comp wrap-insert-fns) (fn [row-or-rows] (let [insert! ((apply comp wrap-insert-fns) (fn [row-or-rows]
(jdbc/insert-multi! (jdbc/insert-multi!
(generic/database->spec driver :db dbdef) (generic/database->spec driver :db dbdef)
(keyword (:table-name tabledef)) (keyword (i/db-qualified-table-name (name (:database-name dbdef)) (name (:table-name tabledef))))
(escape-field-names row-or-rows) (escape-field-names row-or-rows)
{:transaction? false}))) {:transaction? false})))
rows (apply list (generic/load-data-get-rows driver dbdef tabledef))] rows (apply list (generic/load-data-get-rows driver dbdef tabledef))]
...@@ -63,9 +63,10 @@ ...@@ -63,9 +63,10 @@
:create-db-sql (constantly nil) :create-db-sql (constantly nil)
:add-fk-sql (constantly nil) :add-fk-sql (constantly nil)
:drop-db-if-exists-sql (constantly nil) :drop-db-if-exists-sql (constantly nil)
:load-data! (make-load-data-fn generic/load-data-add-ids)}) :load-data! (make-load-data-fn generic/load-data-add-ids)
:qualified-name-components (partial i/single-db-qualified-name-components "doc")})
i/IDatasetLoader i/IDatasetLoader
(merge generic/IDatasetLoaderMixin (merge generic/IDatasetLoaderMixin
{:database->connection-details database->connection-details {:database->connection-details database->connection-details
:engine (constantly :crate) :engine (constantly :crate)
:default-schema (constantly "doc")})) :default-schema (constantly "doc")}))
...@@ -21,8 +21,7 @@ ...@@ -21,8 +21,7 @@
(merge i/IDatasetLoaderDefaultsMixin (merge i/IDatasetLoaderDefaultsMixin
{:engine (constantly :druid) {:engine (constantly :druid)
:database->connection-details database->connection-details :database->connection-details database->connection-details
:create-db! (constantly nil) :create-db! (constantly nil)}))
:destroy-db! (constantly nil)}))
......
...@@ -129,12 +129,9 @@ ...@@ -129,12 +129,9 @@
(quot (pk-field-name driver))))) (quot (pk-field-name driver)))))
(defn- default-qualified-name-components (defn- default-qualified-name-components
([_ db-name] ([_ db-name] [db-name])
[db-name]) ([_ db-name table-name] [table-name])
([_ db-name table-name] ([_ db-name table-name field-name] [table-name field-name]))
[table-name])
([_ db-name table-name field-name]
[table-name field-name]))
(defn- default-quote-name [_ nm] (defn- default-quote-name [_ nm]
(str \" nm \")) (str \" nm \"))
...@@ -321,14 +318,10 @@ ...@@ -321,14 +318,10 @@
(doseq [tabledef table-definitions] (doseq [tabledef table-definitions]
(load-data! driver dbdef tabledef))) (load-data! driver dbdef tabledef)))
(defn- destroy-db! [driver dbdef]
(execute-sql! driver :server dbdef (drop-db-if-exists-sql driver dbdef)))
(def IDatasetLoaderMixin (def IDatasetLoaderMixin
"Mixin for `IGenericSQLDatasetLoader` types to implemnt `create-db!` and `destroy-db!` from `IDatasetLoader`." "Mixin for `IGenericSQLDatasetLoader` types to implement `create-db!` from `IDatasetLoader`."
(merge i/IDatasetLoaderDefaultsMixin (merge i/IDatasetLoaderDefaultsMixin
{:create-db! create-db! {:create-db! create-db!}))
:destroy-db! destroy-db!}))
;;; ## Various Util Fns ;;; ## Various Util Fns
......
...@@ -42,6 +42,20 @@ ...@@ -42,6 +42,20 @@
;; take up to last 30 characters because databases like Oracle have limits on the lengths of identifiers ;; take up to last 30 characters because databases like Oracle have limits on the lengths of identifiers
(apply str (take-last 30 (str/replace (str/lower-case (str database-name \_ table-name)) #"-" "_")))) (apply str (take-last 30 (str/replace (str/lower-case (str database-name \_ table-name)) #"-" "_"))))
(defn single-db-qualified-name-components
"Implementation of `qualified-name-components` for drivers like Oracle and Redshift that must use a single existing DB for testing.
This implementation simulates separate databases by doing two things:
1. Using a \"session schema\" to make sure each test run is isolated from other test runs
2. Embedding the name of the database into table names, e.g. to differentiate \"test_data_categories\" and \"tupac_sightings_categories\".
To use this implementation, partially bind this function with a SESSION-SCHEMA:
{:qualified-name-components (partial i/single-db-qualified-name-components my-session-schema-name)}"
([_ _ db-name] [db-name])
([session-schema _ db-name table-name] [session-schema (db-qualified-table-name db-name table-name)])
([session-schema _ db-name table-name field-name] [session-schema (db-qualified-table-name db-name table-name) field-name]))
(defprotocol IMetabaseInstance (defprotocol IMetabaseInstance
(metabase-instance [this context] (metabase-instance [this context]
...@@ -88,11 +102,6 @@ ...@@ -88,11 +102,6 @@
and add the appropriate data. This method should drop existing databases with the same name if applicable. and add the appropriate data. This method should drop existing databases with the same name if applicable.
(This refers to creating the actual *DBMS* database itself, *not* a Metabase `Database` object.)") (This refers to creating the actual *DBMS* database itself, *not* a Metabase `Database` object.)")
(destroy-db! [this, ^DatabaseDefinition database-definition]
"Destroy database, if any, associated with DATABASE-DEFINITION.
This refers to destroying a *DBMS* database -- removing an H2 file, dropping a Postgres database, etc.
This does not need to remove corresponding Metabase definitions -- this is handled by `DatasetLoader`.")
;; TODO - this would be more useful if DATABASE-DEFINITION was a parameter ;; TODO - this would be more useful if DATABASE-DEFINITION was a parameter
(default-schema ^String [this] (default-schema ^String [this]
"*OPTIONAL* Return the default schema name that tables for this DB should be expected to have.") "*OPTIONAL* Return the default schema name that tables for this DB should be expected to have.")
......
...@@ -44,9 +44,9 @@ ...@@ -44,9 +44,9 @@
i/IDatasetLoader i/IDatasetLoader
(merge i/IDatasetLoaderDefaultsMixin (merge i/IDatasetLoaderDefaultsMixin
{:create-db! (u/drop-first-arg create-db!) {:create-db! (u/drop-first-arg create-db!)
:destroy-db! (u/drop-first-arg destroy-db!)
:database->connection-details database->connection-details :database->connection-details database->connection-details
:engine (constantly :mongo) :engine (constantly :mongo)
:format-name (fn [_ table-or-field-name] :format-name (fn [_ table-or-field-name]
(if (= table-or-field-name "id") "_id" (if (= table-or-field-name "id")
table-or-field-name))})) "_id"
table-or-field-name))}))
...@@ -65,11 +65,6 @@ ...@@ -65,11 +65,6 @@
session-schema session-schema
(i/db-qualified-table-name database-name table-name))) (i/db-qualified-table-name database-name table-name)))
(defn- qualified-name-components
([db-name] [db-name])
([db-name table-name] [session-schema (i/db-qualified-table-name db-name table-name)])
([db-name table-name field-name] [session-schema (i/db-qualified-table-name db-name table-name) field-name]))
(defn- expected-base-type->actual [base-type] (defn- expected-base-type->actual [base-type]
;; Oracle doesn't have INTEGERs ;; Oracle doesn't have INTEGERs
(if (isa? base-type :type/Integer) (if (isa? base-type :type/Integer)
...@@ -87,7 +82,7 @@ ...@@ -87,7 +82,7 @@
:field-base-type->sql-type (u/drop-first-arg field-base-type->sql-type) :field-base-type->sql-type (u/drop-first-arg field-base-type->sql-type)
:load-data! generic/load-data-one-at-a-time-parallel! :load-data! generic/load-data-one-at-a-time-parallel!
:pk-sql-type (constantly "INTEGER GENERATED BY DEFAULT AS IDENTITY (START WITH 1 INCREMENT BY 1) NOT NULL") ; LOL :pk-sql-type (constantly "INTEGER GENERATED BY DEFAULT AS IDENTITY (START WITH 1 INCREMENT BY 1) NOT NULL") ; LOL
:qualified-name-components (u/drop-first-arg qualified-name-components)}) :qualified-name-components (partial i/single-db-qualified-name-components session-schema)})
i/IDatasetLoader i/IDatasetLoader
(merge generic/IDatasetLoaderMixin (merge generic/IDatasetLoaderMixin
......
...@@ -30,11 +30,27 @@ ...@@ -30,11 +30,27 @@
(when (= context :db) (when (= context :db)
{:db database-name}))) {:db database-name})))
(defn- kill-connections-to-db-sql
"Return a SQL `SELECT` statement that will kill all connections to a database with DATABASE-NAME."
^String [database-name]
(format (str "DO $$ BEGIN\n"
" PERFORM pg_terminate_backend(pg_stat_activity.pid)\n"
" FROM pg_stat_activity\n"
" WHERE pid <> pg_backend_pid()\n"
" AND pg_stat_activity.datname = '%s';\n"
"END $$;\n")
(name database-name)))
(defn- drop-db-if-exists-sql [driver {:keys [database-name], :as dbdef}]
(str (kill-connections-to-db-sql database-name)
(generic/default-drop-db-if-exists-sql driver dbdef)))
(u/strict-extend PostgresDriver (u/strict-extend PostgresDriver
generic/IGenericSQLDatasetLoader generic/IGenericSQLDatasetLoader
(merge generic/DefaultsMixin (merge generic/DefaultsMixin
{:drop-table-if-exists-sql generic/drop-table-if-exists-cascade-sql {:drop-db-if-exists-sql drop-db-if-exists-sql
:drop-table-if-exists-sql generic/drop-table-if-exists-cascade-sql
:field-base-type->sql-type (u/drop-first-arg field-base-type->sql-type) :field-base-type->sql-type (u/drop-first-arg field-base-type->sql-type)
:load-data! generic/load-data-all-at-once! :load-data! generic/load-data-all-at-once!
:pk-sql-type (constantly "SERIAL")}) :pk-sql-type (constantly "SERIAL")})
...@@ -45,11 +61,3 @@ ...@@ -45,11 +61,3 @@
:engine (constantly :postgres) :engine (constantly :postgres)
;; TODO: this is suspect, but it works ;; TODO: this is suspect, but it works
:has-questionable-timezone-support? (constantly true)})) :has-questionable-timezone-support? (constantly true)}))
;; it's super obnoxious when testing locally to have tests fail because someone is already connected to the test-data DB (meaning we can't drop it), so close all connections to it beforehand
(defn- kill-connections-to-test-data-db!
{:expectations-options :before-run}
[]
(generic/query-when-testing! :postgres (fn [] (sql/connection-details->spec (PostgresDriver.) (database->connection-details :server {})))
"SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity WHERE pid <> pg_backend_pid() AND pg_stat_activity.datname = 'test-data';"))
...@@ -49,13 +49,6 @@ ...@@ -49,13 +49,6 @@
(defonce ^:const session-schema-name (defonce ^:const session-schema-name
(str "schema_" session-schema-number)) (str "schema_" session-schema-number))
(defn- qualified-name-components
([_ db-name]
[db-name])
([_ _ table-name]
[session-schema-name table-name])
([_ _ table-name field-name]
[session-schema-name table-name field-name]))
(u/strict-extend RedshiftDriver (u/strict-extend RedshiftDriver
generic/IGenericSQLDatasetLoader generic/IGenericSQLDatasetLoader
...@@ -65,7 +58,7 @@ ...@@ -65,7 +58,7 @@
:drop-table-if-exists-sql generic/drop-table-if-exists-cascade-sql :drop-table-if-exists-sql generic/drop-table-if-exists-cascade-sql
:field-base-type->sql-type (u/drop-first-arg field-base-type->sql-type) :field-base-type->sql-type (u/drop-first-arg field-base-type->sql-type)
:pk-sql-type (constantly "INTEGER IDENTITY(1,1)") :pk-sql-type (constantly "INTEGER IDENTITY(1,1)")
:qualified-name-components qualified-name-components}) :qualified-name-components (partial i/single-db-qualified-name-components session-schema-name)})
i/IDatasetLoader i/IDatasetLoader
(merge generic/IDatasetLoaderMixin (merge generic/IDatasetLoaderMixin
......
...@@ -69,12 +69,9 @@ ...@@ -69,12 +69,9 @@
(format "IF object_id('%s.dbo.%s') IS NOT NULL DROP TABLE \"%s\".dbo.\"%s\";" db-name table-name db-name table-name))) (format "IF object_id('%s.dbo.%s') IS NOT NULL DROP TABLE \"%s\".dbo.\"%s\";" db-name table-name db-name table-name)))
(defn- qualified-name-components (defn- qualified-name-components
([db-name] ([db-name] [(+suffix db-name)])
[(+suffix db-name)]) ([db-name table-name] [(+suffix db-name) "dbo" table-name])
([db-name table-name] ([db-name table-name field-name] [(+suffix db-name) "dbo" table-name field-name]))
[(+suffix db-name) "dbo" table-name])
([db-name table-name field-name]
[(+suffix db-name) "dbo" table-name field-name]))
(u/strict-extend SQLServerDriver (u/strict-extend SQLServerDriver
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment