Skip to content
Snippets Groups Projects
Unverified Commit 149f31a7 authored by Cam Saul's avatar Cam Saul
Browse files

Merge branch 'master' into snowflake-support [ci drivers]

parents aae4ca12 83e821bf
No related branches found
No related tags found
No related merge requests found
...@@ -558,7 +558,8 @@ ...@@ -558,7 +558,8 @@
"Get the identifier used for `checkins` for the current driver by looking at what the driver uses when converting MBQL "Get the identifier used for `checkins` for the current driver by looking at what the driver uses when converting MBQL
to SQL. Different drivers qualify to different degrees (i.e. `table` vs `schema.table` vs `database.schema.table`)." to SQL. Different drivers qualify to different degrees (i.e. `table` vs `schema.table` vs `database.schema.table`)."
[] []
(second (re-find #"FROM\s([^\s]+)" (:query (qp/query->native (data/mbql-query checkins)))))) (let [sql (:query (qp/query->native (data/mbql-query checkins)))]
(second (re-find #"FROM\s([^\s()]+)" sql))))
;; as with the MBQL parameters tests Redshift and Crate fail for unknown reasons; disable their tests for now ;; as with the MBQL parameters tests Redshift and Crate fail for unknown reasons; disable their tests for now
(def ^:private ^:const sql-parameters-engines (def ^:private ^:const sql-parameters-engines
...@@ -647,7 +648,12 @@ ...@@ -647,7 +648,12 @@
(first-row (first-row
(process-native (process-native
:native {:query (case datasets/*engine* :native {:query (case datasets/*engine*
:oracle "SELECT cast({{date}} as date) from dual" :bigquery
"SELECT {{date}} as date"
:oracle
"SELECT cast({{date}} as date) from dual"
"SELECT cast({{date}} as date)") "SELECT cast({{date}} as date)")
:template-tags {"date" {:name "date" :display-name "Date" :type :date}}} :template-tags {"date" {:name "date" :display-name "Date" :type :date}}}
:parameters [{:type :date/single :target [:variable [:template-tag "date"]] :value "2018-04-18"}])))) :parameters [{:type :date/single :target [:variable [:template-tag "date"]] :value "2018-04-18"}]))))
......
...@@ -181,7 +181,7 @@ ...@@ -181,7 +181,7 @@
;; make sure we can do a query with breakout and aggregation using a SQL source query ;; make sure we can do a query with breakout and aggregation using a SQL source query
(datasets/expect-with-engines (non-timeseries-engines-with-feature :nested-queries) (datasets/expect-with-engines (non-timeseries-engines-with-feature :nested-queries)
breakout-results breakout-results
(rows+cols (rows+cols
(format-rows-by [int int] (format-rows-by [int int]
(qp/process-query (qp/process-query
......
...@@ -61,7 +61,7 @@ ...@@ -61,7 +61,7 @@
{:name (data/format-name "comment_after_sync"), :description "added comment"}} {:name (data/format-name "comment_after_sync"), :description "added comment"}}
(data/with-temp-db [db comment-after-sync] (data/with-temp-db [db comment-after-sync]
;; modify the source DB to add the comment and resync ;; modify the source DB to add the comment and resync
(i/create-db! ds/*driver* (assoc-in comment-after-sync [:table-definitions 0 :field-definitions 0 :field-comment] "added comment") true) (i/create-db! ds/*driver* (assoc-in comment-after-sync [:table-definitions 0 :field-definitions 0 :field-comment] "added comment"), {:skip-drop-db? true})
(sync/sync-table! (Table (data/id "comment_after_sync"))) (sync/sync-table! (Table (data/id "comment_after_sync")))
(db->fields db))) (db->fields db)))
...@@ -98,7 +98,7 @@ ...@@ -98,7 +98,7 @@
(ds/expect-with-engines #{:h2 :postgres} (ds/expect-with-engines #{:h2 :postgres}
#{{:name (data/format-name "table_with_comment_after_sync"), :description "added comment"}} #{{:name (data/format-name "table_with_comment_after_sync"), :description "added comment"}}
(data/with-temp-db [db (basic-table "table_with_comment_after_sync" nil)] (data/with-temp-db [db (basic-table "table_with_comment_after_sync" nil)]
;; modify the source DB to add the comment and resync ;; modify the source DB to add the comment and resync
(i/create-db! ds/*driver* (basic-table "table_with_comment_after_sync" "added comment") true) (i/create-db! ds/*driver* (basic-table "table_with_comment_after_sync" "added comment") {:skip-drop-db? true})
(metabase.sync.sync-metadata.tables/sync-tables! db) (metabase.sync.sync-metadata.tables/sync-tables! db)
(db->tables db))) (db->tables db)))
...@@ -200,35 +200,38 @@ ...@@ -200,35 +200,38 @@
(def ^:private existing-datasets (def ^:private existing-datasets
(atom #{})) (atom #{}))
(defn- create-db! [{:keys [database-name table-definitions]}] (defn- create-db!
{:pre [(seq database-name) (sequential? table-definitions)]} ([db-def]
;; fetch existing datasets if we haven't done so yet (create-db! db-def nil))
(when-not (seq @existing-datasets) ([{:keys [database-name table-definitions]} _]
(reset! existing-datasets (set (existing-dataset-names))) {:pre [(seq database-name) (sequential? table-definitions)]}
(println "These BigQuery datasets have already been loaded:\n" (u/pprint-to-str (sort @existing-datasets)))) ;; fetch existing datasets if we haven't done so yet
;; now check and see if we need to create the requested one (when-not (seq @existing-datasets)
(let [database-name (normalize-name database-name)] (reset! existing-datasets (set (existing-dataset-names)))
(when-not (contains? @existing-datasets database-name) (println "These BigQuery datasets have already been loaded:\n" (u/pprint-to-str (sort @existing-datasets))))
(try ;; now check and see if we need to create the requested one
(u/auto-retry 10 (let [database-name (normalize-name database-name)]
;; if the dataset failed to load successfully last time around, destroy whatever was loaded so we start (when-not (contains? @existing-datasets database-name)
;; again from a blank slate (try
(u/ignore-exceptions (u/auto-retry 10
(destroy-dataset! database-name)) ;; if the dataset failed to load successfully last time around, destroy whatever was loaded so we start
(create-dataset! database-name) ;; again from a blank slate
;; do this in parallel because otherwise it can literally take an hour to load something like (u/ignore-exceptions
;; fifty_one_different_tables (destroy-dataset! database-name))
(u/pdoseq [tabledef table-definitions] (create-dataset! database-name)
(load-tabledef! database-name tabledef)) ;; do this in parallel because otherwise it can literally take an hour to load something like
(swap! existing-datasets conj database-name) ;; fifty_one_different_tables
(println (u/format-color 'green "[OK]"))) (u/pdoseq [tabledef table-definitions]
;; if creating the dataset ultimately fails to complete, then delete it so it will hopefully work next time (load-tabledef! database-name tabledef))
;; around (swap! existing-datasets conj database-name)
(catch Throwable e (println (u/format-color 'green "[OK]")))
(u/ignore-exceptions ;; if creating the dataset ultimately fails to complete, then delete it so it will hopefully work next time
(println (u/format-color 'red "Failed to load BigQuery dataset '%s'." database-name)) ;; around
(destroy-dataset! database-name)) (catch Throwable e
(throw e)))))) (u/ignore-exceptions
(println (u/format-color 'red "Failed to load BigQuery dataset '%s'." database-name))
(destroy-dataset! database-name))
(throw e)))))))
;;; --------------------------------------------- IDriverTestExtensions ---------------------------------------------- ;;; --------------------------------------------- IDriverTestExtensions ----------------------------------------------
......
...@@ -362,9 +362,11 @@ ...@@ -362,9 +362,11 @@
(execute! driver context dbdef (s/replace statement #"⅋" ";")))))) (execute! driver context dbdef (s/replace statement #"⅋" ";"))))))
(defn default-create-db! (defn default-create-db!
([driver database-definition] "Default implementation of `create-db!` for SQL drivers."
(default-create-db! driver database-definition false)) ([driver db-def]
([driver {:keys [table-definitions], :as dbdef} skip-drop-db?] (create-db! driver db-def nil))
([driver {:keys [table-definitions], :as dbdef} {:keys [skip-drop-db?]
:or {skip-drop-db? false}}]
(when-not skip-drop-db? (when-not skip-drop-db?
;; Exec SQL for creating the DB ;; Exec SQL for creating the DB
(execute-sql! driver :server dbdef (str (drop-db-if-exists-sql driver dbdef) ";\n" (execute-sql! driver :server dbdef (str (drop-db-if-exists-sql driver dbdef) ";\n"
......
...@@ -110,17 +110,21 @@ ...@@ -110,17 +110,21 @@
"Return the connection details map that should be used to connect to this database (i.e. a Metabase `Database` "Return the connection details map that should be used to connect to this database (i.e. a Metabase `Database`
details map). CONTEXT is one of: details map). CONTEXT is one of:
* `:server` - Return details for making the connection in a way that isn't DB-specific (e.g., for * `:server` - Return details for making the connection in a way that isn't DB-specific (e.g., for
creating/destroying databases) creating/destroying databases)
* `:db` - Return details for connecting specifically to the DB.") * `:db` - Return details for connecting specifically to the DB.")
(create-db! [this, ^DatabaseDefinition database-definition] (create-db!
[this, ^DatabaseDefinition database-definition, ^Boolean skip-drop-db?] [this, ^DatabaseDefinition database-definition]
[this, ^DatabaseDefinition database-definition {:keys [skip-drop-db?]}]
"Create a new database from DATABASE-DEFINITION, including adding tables, fields, and foreign key constraints, "Create a new database from DATABASE-DEFINITION, including adding tables, fields, and foreign key constraints,
and add the appropriate data. This method should drop existing databases with the same name if applicable, and add the appropriate data. This method should drop existing databases with the same name if applicable, unless
unless the skip-drop-db? arg is true. This is to workaround a scenario where the postgres driver terminates the skip-drop-db? arg is true. This is to workaround a scenario where the postgres driver terminates the
the connection before dropping the DB and causes some tests to fail. connection before dropping the DB and causes some tests to fail.
(This refers to creating the actual *DBMS* database itself, *not* a Metabase `Database` object.)") (This refers to creating the actual *DBMS* database itself, *not* a Metabase `Database` object.)
Optional `options` as third param. Currently supported options include `skip-drop-db?`. If unspecified,`skip-drop-db?`
should default to `false`.")
;; TODO - this would be more useful if DATABASE-DEFINITION was a parameter ;; TODO - this would be more useful if DATABASE-DEFINITION was a parameter
(default-schema ^String [this] (default-schema ^String [this]
...@@ -129,7 +133,7 @@ ...@@ -129,7 +133,7 @@
(expected-base-type->actual [this base-type] (expected-base-type->actual [this base-type]
"*OPTIONAL*. Return the base type type that is actually used to store `Fields` of BASE-TYPE. "*OPTIONAL*. Return the base type type that is actually used to store `Fields` of BASE-TYPE.
The default implementation of this method is an identity fn. This is provided so DBs that don't support a given The default implementation of this method is an identity fn. This is provided so DBs that don't support a given
BASE-TYPE used in the test data can specifiy what type we should expect in the results instead. For example, BASE-TYPE used in the test data can specifiy what type we should expect in the results instead. For example,
Oracle has no `INTEGER` data types, so `:type/Integer` test values are instead stored as `NUMBER`, which we map Oracle has no `INTEGER` data types, so `:type/Integer` test values are instead stored as `NUMBER`, which we map
to `:type/Decimal`.") to `:type/Decimal`.")
...@@ -166,17 +170,19 @@ ...@@ -166,17 +170,19 @@
(defn create-table-definition (defn create-table-definition
"Convenience for creating a `TableDefinition`." "Convenience for creating a `TableDefinition`."
^TableDefinition [^String table-name, field-definition-maps rows] ^TableDefinition [^String table-name, field-definition-maps rows]
(s/validate TableDefinition (map->TableDefinition {:table-name table-name (s/validate TableDefinition (map->TableDefinition
:rows rows {:table-name table-name
:field-definitions (mapv create-field-definition field-definition-maps)}))) :rows rows
:field-definitions (mapv create-field-definition field-definition-maps)})))
(defn create-database-definition (defn create-database-definition
"Convenience for creating a new `DatabaseDefinition`." "Convenience for creating a new `DatabaseDefinition`."
{:style/indent 1} {:style/indent 1}
^DatabaseDefinition [^String database-name & table-name+field-definition-maps+rows] ^DatabaseDefinition [^String database-name & table-name+field-definition-maps+rows]
(s/validate DatabaseDefinition (map->DatabaseDefinition {:database-name database-name (s/validate DatabaseDefinition (map->DatabaseDefinition
:table-definitions (mapv (partial apply create-table-definition) {:database-name database-name
table-name+field-definition-maps+rows)}))) :table-definitions (mapv (partial apply create-table-definition)
table-name+field-definition-maps+rows)})))
(def ^:private ^:const edn-definitions-dir "./test/metabase/test/data/dataset_definitions/") (def ^:private ^:const edn-definitions-dir "./test/metabase/test/data/dataset_definitions/")
...@@ -184,12 +190,9 @@ ...@@ -184,12 +190,9 @@
(edn/read-string (slurp (str edn-definitions-dir dbname ".edn")))) (edn/read-string (slurp (str edn-definitions-dir dbname ".edn"))))
(defn update-table-def (defn update-table-def
"Function useful for modifying a table definition before it's "Function useful for modifying a table definition before it's applied. Will invoke `UPDATE-TABLE-DEF-FN` on the vector
applied. Will invoke `UPDATE-TABLE-DEF-FN` on the vector of column of column definitions and `UPDATE-ROWS-FN` with the vector of rows in the database definition. `TABLE-DEF` is the
definitions and `UPDATE-ROWS-FN` with the vector of rows in the database definition (typically used directly in a `def-database-definition` invocation)."
database definition. `TABLE-DEF` is the database
definition (typically used directly in a `def-database-definition`
invocation)."
[table-name-to-update update-table-def-fn update-rows-fn table-def] [table-name-to-update update-table-def-fn update-rows-fn table-def]
(vec (vec
(for [[table-name table-def rows :as orig-table-def] table-def] (for [[table-name table-def rows :as orig-table-def] table-def]
......
...@@ -18,26 +18,30 @@ ...@@ -18,26 +18,30 @@
(with-open [mongo-connection (mg/connect (database->connection-details dbdef))] (with-open [mongo-connection (mg/connect (database->connection-details dbdef))]
(mg/drop-db mongo-connection (i/escaped-name dbdef)))) (mg/drop-db mongo-connection (i/escaped-name dbdef))))
(defn- create-db! [{:keys [table-definitions], :as dbdef}] (defn- create-db!
(destroy-db! dbdef) ([db-def]
(with-mongo-connection [mongo-db (database->connection-details dbdef)] (create-db! db-def nil))
(doseq [{:keys [field-definitions table-name rows]} table-definitions] ([{:keys [table-definitions], :as dbdef} {:keys [skip-drop-db?], :or {skip-drop-db? false}}]
(let [field-names (for [field-definition field-definitions] (when-not skip-drop-db?
(keyword (:field-name field-definition)))] (destroy-db! dbdef))
;; Use map-indexed so we can get an ID for each row (index + 1) (with-mongo-connection [mongo-db (database->connection-details dbdef)]
(doseq [[i row] (map-indexed (partial vector) rows)] (doseq [{:keys [field-definitions table-name rows]} table-definitions]
(let [row (for [v row] (let [field-names (for [field-definition field-definitions]
;; Conver all the java.sql.Timestamps to java.util.Date, because the Mongo driver insists on being obnoxious and going from (keyword (:field-name field-definition)))]
;; using Timestamps in 2.x to Dates in 3.x ;; Use map-indexed so we can get an ID for each row (index + 1)
(if (instance? java.sql.Timestamp v) (doseq [[i row] (map-indexed (partial vector) rows)]
(java.util.Date. (.getTime ^java.sql.Timestamp v)) (let [row (for [v row]
v))] ;; Conver all the java.sql.Timestamps to java.util.Date, because the Mongo driver insists on being obnoxious and going from
(try ;; using Timestamps in 2.x to Dates in 3.x
;; Insert each row (if (instance? java.sql.Timestamp v)
(mc/insert mongo-db (name table-name) (assoc (zipmap field-names row) (java.util.Date. (.getTime ^java.sql.Timestamp v))
:_id (inc i))) v))]
;; If row already exists then nothing to do (try
(catch com.mongodb.MongoException _)))))))) ;; Insert each row
(mc/insert mongo-db (name table-name) (assoc (zipmap field-names row)
:_id (inc i)))
;; If row already exists then nothing to do
(catch com.mongodb.MongoException _)))))))))
(u/strict-extend MongoDriver (u/strict-extend MongoDriver
......
...@@ -71,18 +71,22 @@ ...@@ -71,18 +71,22 @@
query query
(unprepare/unprepare (cons query params) :quote-escape "'", :iso-8601-fn :from_iso8601_timestamp)))) (unprepare/unprepare (cons query params) :quote-escape "'", :iso-8601-fn :from_iso8601_timestamp))))
(defn- create-db! [{:keys [table-definitions] :as dbdef}] (defn- create-db!
(let [details (database->connection-details :db dbdef)] ([db-def]
(doseq [tabledef table-definitions (create-db! db-def nil))
:let [rows (:rows tabledef) ([{:keys [table-definitions] :as dbdef} {:keys [skip-drop-db?], :or {skip-drop-db? false}}]
;; generate an ID for each row because we don't have auto increments (let [details (database->connection-details :db dbdef)]
keyed-rows (map-indexed (fn [i row] (conj row (inc i))) rows) (doseq [tabledef table-definitions
;; make 100 rows batches since we have to inline everything :let [rows (:rows tabledef)
batches (partition 100 100 nil keyed-rows)]] ;; generate an ID for each row because we don't have auto increments
(#'presto/execute-presto-query! details (drop-table-if-exists-sql dbdef tabledef)) keyed-rows (map-indexed (fn [i row] (conj row (inc i))) rows)
(#'presto/execute-presto-query! details (create-table-sql dbdef tabledef)) ;; make 100 rows batches since we have to inline everything
(doseq [batch batches] batches (partition 100 100 nil keyed-rows)]]
(#'presto/execute-presto-query! details (insert-sql dbdef tabledef batch)))))) (when-not skip-drop-db?
(#'presto/execute-presto-query! details (drop-table-if-exists-sql dbdef tabledef)))
(#'presto/execute-presto-query! details (create-table-sql dbdef tabledef))
(doseq [batch batches]
(#'presto/execute-presto-query! details (insert-sql dbdef tabledef batch)))))))
;;; IDriverTestExtensions implementation ;;; IDriverTestExtensions implementation
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment