From 0556b2344e8b2c49aeffbf8ab53740d6cf77af71 Mon Sep 17 00:00:00 2001 From: Cam Saul <cam@geotip.com> Date: Mon, 29 Jun 2015 20:29:34 -0700 Subject: [PATCH] work towards testing w/ a Postgres test data set --- circle.yml | 2 +- .../driver/generic_sql/query_processor.clj | 23 ++- src/metabase/driver/h2.clj | 1 - src/metabase/driver/postgres.clj | 7 +- src/metabase/driver/query_processor.clj | 8 +- test/metabase/driver/query_processor_test.clj | 57 ++++--- test/metabase/test/data/datasets.clj | 13 +- test/metabase/test/data/generic_sql.clj | 73 +++++++++ test/metabase/test/data/h2.clj | 141 ++++++++---------- test/metabase/test/data/postgres.clj | 99 ++++++++++++ 10 files changed, 304 insertions(+), 120 deletions(-) create mode 100644 test/metabase/test/data/generic_sql.clj create mode 100644 test/metabase/test/data/postgres.clj diff --git a/circle.yml b/circle.yml index 2fa20f32457..6ef77e78377 100644 --- a/circle.yml +++ b/circle.yml @@ -11,5 +11,5 @@ test: # 2) runs Eastwood linter + Bikeshed linter # 3) runs JS linter + JS test # 4) runs lein uberjar - - case $CIRCLE_NODE_INDEX in 0) MB_TEST_DATASETS=generic-sql,mongo lein test ;; 1) MB_DB_TYPE=postgres MB_DB_DBNAME=circle_test MB_DB_PORT=5432 MB_DB_USER=ubuntu MB_DB_HOST=localhost lein test ;; 2) lein eastwood && lein bikeshed --max-line-length 240 ;; 3) npm run lint && npm run build && npm run test ;; 4) CI_DISABLE_WEBPACK_MINIFICATION=1 lein uberjar ;; esac: + - case $CIRCLE_NODE_INDEX in 0) MB_TEST_DATASETS=generic-sql,mongo,postgres lein test ;; 1) MB_DB_TYPE=postgres MB_DB_DBNAME=circle_test MB_DB_PORT=5432 MB_DB_USER=ubuntu MB_DB_HOST=localhost lein test ;; 2) lein eastwood && lein bikeshed --max-line-length 240 ;; 3) npm run lint && npm run build && npm run test ;; 4) CI_DISABLE_WEBPACK_MINIFICATION=1 lein uberjar ;; esac: parallel: true diff --git a/src/metabase/driver/generic_sql/query_processor.clj b/src/metabase/driver/generic_sql/query_processor.clj index f8bebfe9441..4261ff59f79 100644 --- a/src/metabase/driver/generic_sql/query_processor.clj +++ b/src/metabase/driver/generic_sql/query_processor.clj @@ -38,25 +38,32 @@ (binding [*query* query] (try ;; Process the expanded query and generate a korma form - (let [korma-form `(let [entity# (korma-entity ~database ~source-table)] - (select entity# ~@(->> (map apply-form (:query query)) - (filter identity) - (mapcat #(if (vector? %) % [%])))))] + (let [entity (gensym "ENTITY_") + korma-select-form `(select ~entity ~@(->> (map apply-form (:query query)) + (filter identity) + (mapcat #(if (vector? %) % [%])))) + set-timezone-sql (when-let [timezone (:timezone (:details database))] + (when-let [set-timezone-sql (:timezone->set-timezone-sql qp/*driver*)] + `(exec-raw ~(set-timezone-sql timezone)))) + korma-form `(let [~entity (korma-entity ~database ~source-table)] + ~(if set-timezone-sql `(korma.db/with-db (:db ~entity) + (korma.db/transaction + ~set-timezone-sql + ~korma-select-form)) + korma-select-form))] ;; Log generated korma form (when (config/config-bool :mb-db-logging) (log-korma-form korma-form)) - ;; Now eval the korma form. Then annotate the results - ;; TODO - why does this happen within the individual drivers still? Annotate should be moved out (let [results (eval korma-form)] {:results results :uncastify-fn uncastify})) (catch java.sql.SQLException e - (let [^String message (or (->> (.getMessage e) ; error message comes back like "Error message ... [status-code]" sometimes + (let [^String message (or (->> (.getMessage e) ; error message comes back like "Error message ... [status-code]" sometimes (re-find #"(?s)(^.*)\s+\[[\d-]+\]$") ; status code isn't useful and makes unit tests hard to write so strip it off - second) ; (?s) = Pattern.DOTALL - tell regex `.` to match newline characters as well + second) ; (?s) = Pattern.DOTALL - tell regex `.` to match newline characters as well (.getMessage e))] (throw (Exception. message))))))) diff --git a/src/metabase/driver/h2.clj b/src/metabase/driver/h2.clj index 217f19d0d92..3e95159dd05 100644 --- a/src/metabase/driver/h2.clj +++ b/src/metabase/driver/h2.clj @@ -100,7 +100,6 @@ :connection-details->connection-spec connection-details->connection-spec :database->connection-details database->connection-details :sql-string-length-fn :LENGTH - :timezone->set-timezone-sql nil :cast-timestamp-seconds-field-to-date-fn cast-timestamp-seconds-field-to-date-fn :cast-timestamp-milliseconds-field-to-date-fn cast-timestamp-milliseconds-field-to-date-fn :uncastify-timestamp-regex uncastify-timestamp-regex})) diff --git a/src/metabase/driver/postgres.clj b/src/metabase/driver/postgres.clj index 18a8763a077..308008c687a 100644 --- a/src/metabase/driver/postgres.clj +++ b/src/metabase/driver/postgres.clj @@ -111,15 +111,16 @@ (defn- cast-timestamp-seconds-field-to-date-fn [table-name field-name] {:pre [(string? table-name) (string? field-name)]} - (format "CAST(TO_TIMESTAMP(\"%s\".\"%s\") AS DATE)" table-name field-name)) + (format "(TIMESTAMP WITH TIME ZONE 'epoch' + (\"%s\".\"%s\" * INTERVAL '1 second'))::date" table-name field-name)) (defn- cast-timestamp-milliseconds-field-to-date-fn [table-name field-name] {:pre [(string? table-name) (string? field-name)]} - (format "CAST(TO_TIMESTAMP(\"%s\".\"%s\" / 1000) AS DATE)" table-name field-name)) + (format "(TIMESTAMP WITH TIME ZONE 'epoch' + (\"%s\".\"%s\" * INTERVAL '1 millisecond'))::date" table-name field-name)) (def ^:private ^:const uncastify-timestamp-regex - #"CAST\(TO_TIMESTAMP\([^.\s]+\.([^.\s]+)(?: / 1000)?\) AS DATE\)") + ;; TODO - this doesn't work + #"TO_TIMESTAMP\([^.\s]+\.([^.\s]+)(?: / 1000)?\)::date") ;; ## DRIVER diff --git a/src/metabase/driver/query_processor.clj b/src/metabase/driver/query_processor.clj index d720d20230d..3d5ea81038a 100644 --- a/src/metabase/driver/query_processor.clj +++ b/src/metabase/driver/query_processor.clj @@ -342,9 +342,11 @@ (= col-kw :count) {:base_type :IntegerField :special_type :number} ;; Otherwise something went wrong ! - :else (throw (Exception. (format "Annotation failed: don't know what to do with Field '%s'.\nExpected these Fields:\n%s" - col-kw - (u/pprint-to-str field-kw->field)))))))) + :else (do (log/error (u/format-color 'red "Annotation failed: don't know what to do with Field '%s'.\nExpected these Fields:\n%s" + col-kw + (u/pprint-to-str field-kw->field))) + {:base_type :UnknownField + :special_type nil}))))) ;; Add FK info the the resulting Fields add-fields-extra-info))) diff --git a/test/metabase/driver/query_processor_test.clj b/test/metabase/driver/query_processor_test.clj index 704fc114001..1654dfed893 100644 --- a/test/metabase/driver/query_processor_test.clj +++ b/test/metabase/driver/query_processor_test.clj @@ -782,7 +782,7 @@ :query ~query}))) ;; There were 9 "sad toucan incidents" on 2015-06-02 -(datasets/expect-with-datasets #{:generic-sql} +(datasets/expect-with-datasets #{:generic-sql :postgres} 9 (->> (query-with-temp-db defs/sad-toucan-incidents :source_table &incidents:id @@ -794,25 +794,42 @@ ;;; Unix timestamp breakouts -- SQL only -(datasets/expect-with-datasets #{:generic-sql} - [["2015-06-01" 6] - ["2015-06-02" 9] - ["2015-06-03" 5] - ["2015-06-04" 9] - ["2015-06-05" 8] - ["2015-06-06" 9] - ["2015-06-07" 8] - ["2015-06-08" 9] - ["2015-06-09" 7] - ["2015-06-10" 8]] - (->> (query-with-temp-db defs/sad-toucan-incidents - :source_table &incidents:id - :aggregation ["count"] - :breakout [&incidents.timestamp:id] - :limit 10) - :data :rows - (map (fn [[^java.util.Date date count]] - [(.toString date) (int count)])))) +(let [do-query (fn [] (->> (query-with-temp-db defs/sad-toucan-incidents + :source_table &incidents:id + :aggregation ["count"] + :breakout [&incidents.timestamp:id] + :limit 10) + :data :rows + (map (fn [[^java.util.Date date count]] + [(.toString date) (int count)]))))] + + (datasets/expect-with-dataset :generic-sql + [["2015-06-01" 6] + ["2015-06-02" 9] + ["2015-06-03" 5] + ["2015-06-04" 9] + ["2015-06-05" 8] + ["2015-06-06" 9] + ["2015-06-07" 8] + ["2015-06-08" 9] + ["2015-06-09" 7] + ["2015-06-10" 8]] + (do-query)) + + ;; postgres gives us *slightly* different answers because I think it's actually handling UNIX timezones properly (with timezone = UTC) + ;; as opposed to H2 which is giving us the wrong timezome. TODO - verify this + (datasets/expect-with-dataset :postgres + [["2015-06-01" 8] + ["2015-06-02" 9] + ["2015-06-03" 9] + ["2015-06-04" 4] + ["2015-06-05" 11] + ["2015-06-06" 8] + ["2015-06-07" 6] + ["2015-06-08" 10] + ["2015-06-09" 6] + ["2015-06-10" 10]] + (do-query))) ;; +------------------------------------------------------------------------------------------------------------------------+ diff --git a/test/metabase/test/data/datasets.clj b/test/metabase/test/data/datasets.clj index 8daf54e42d1..3aa0576107e 100644 --- a/test/metabase/test/data/datasets.clj +++ b/test/metabase/test/data/datasets.clj @@ -11,7 +11,8 @@ [table :refer [Table]]) (metabase.test.data [data :as data] [h2 :as h2] - [mongo :as mongo]) + [mongo :as mongo] + [postgres :as postgres]) [metabase.util :as u])) ;; # IDataset @@ -88,10 +89,11 @@ (def ^:private generic-sql-db (delay )) -(deftype GenericSqlDriverData [dbpromise] +(deftype GenericSqlDriverData [dataset-loader-fn + dbpromise] IDataset (dataset-loader [_] - (h2/dataset-loader)) + (dataset-loader-fn)) (load-data! [this] (when-not (realized? dbpromise) @@ -128,7 +130,10 @@ (def dataset-name->dataset "Map of dataset keyword name -> dataset instance (i.e., an object that implements `IDataset`)." {:mongo (MongoDriverData.) - :generic-sql (GenericSqlDriverData. (promise))}) + :generic-sql (GenericSqlDriverData. h2/dataset-loader (promise)) + + ;; TODO - make sure we have pg connection info + :postgres (GenericSqlDriverData. postgres/dataset-loader (promise))}) (def ^:const all-valid-dataset-names "Set of names of all valid datasets." diff --git a/test/metabase/test/data/generic_sql.clj b/test/metabase/test/data/generic_sql.clj new file mode 100644 index 00000000000..30dbe0f0521 --- /dev/null +++ b/test/metabase/test/data/generic_sql.clj @@ -0,0 +1,73 @@ +(ns metabase.test.data.generic-sql + "Common functionality for various Generic SQL dataset loaders." + (:require [clojure.tools.logging :as log] + [korma.core :as k] + [metabase.test.data.interface :as i]) + (:import (metabase.test.data.interface DatabaseDefinition + TableDefinition))) + +(defprotocol IGenericSQLDatasetLoader + "Methods that generic SQL dataset loaders should implement so they can use the shared functions in `metabase.test.data.generic-sql`." + (execute-sql! [this ^DatabaseDefinition database-definition ^String raw-sql] + "Execute RAW-SQL against database defined by DATABASE-DEFINITION.") + + (korma-entity [this ^DatabaseDefinition database-definition ^TableDefinition table-definition] + "Return a Korma entity (e.g., one that can be passed to `select` or `sel` for the table + defined by TABLE-DEFINITION in the database defined by DATABASE-DEFINITION.") + + (pk-sql-type ^String [this] + "SQL that should be used for creating the PK Table ID, e.g. `SERIAL` or `BIGINT AUTOINCREMENT`.") + + (field-base-type->sql-type ^String [this base-type] + "Given a `Field.base_type`, return the SQL type we should use for that column when creating a DB.")) + + +(defn create-physical-table! [dataset-loader database-definition {:keys [table-name field-definitions], :as table-definition}] + ;; Drop the table if it already exists + (i/drop-physical-table! dataset-loader database-definition table-definition) + + ;; Now create the new table + (execute-sql! dataset-loader database-definition + (format "CREATE TABLE \"%s\" (%s, \"ID\" %s, PRIMARY KEY (\"ID\"));" + table-name + (->> field-definitions + (map (fn [{:keys [field-name base-type]}] + (format "\"%s\" %s" field-name (field-base-type->sql-type dataset-loader base-type)))) + (interpose ", ") + (apply str)) + (pk-sql-type dataset-loader)))) + + +(defn drop-physical-table! [dataset-loader database-definition table-definition] + (execute-sql! dataset-loader database-definition + (format "DROP TABLE IF EXISTS \"%s\";" (:table-name table-definition)))) + + +(defn create-physical-db! [dataset-loader {:keys [table-definitions], :as database-definition}] + ;; Create all the Tables + (doseq [^TableDefinition table-definition table-definitions] + (log/info (format "Creating table '%s'..." (:table-name table-definition))) + (i/create-physical-table! dataset-loader database-definition table-definition)) + + ;; Now add the foreign key constraints + (doseq [{:keys [table-name field-definitions]} table-definitions] + (doseq [{dest-table-name :fk, field-name :field-name} field-definitions] + (when dest-table-name + (execute-sql! dataset-loader database-definition + (format "ALTER TABLE \"%s\" ADD CONSTRAINT \"FK_%s_%s\" FOREIGN KEY (\"%s\") REFERENCES \"%s\" (\"ID\");" + table-name + field-name dest-table-name + field-name + (name dest-table-name))))))) + + +(defn load-table-data! [dataset-loader database-definition table-definition] + (let [rows (:rows table-definition) + fields-for-insert (map :field-name (:field-definitions table-definition))] + (println (korma-entity dataset-loader database-definition table-definition)) + (-> (korma-entity dataset-loader database-definition table-definition) + (k/insert (k/values (->> (for [row rows] + (for [v row] + (if (instance? java.util.Date v) (java.sql.Timestamp. (.getTime ^java.util.Date v)) + v))) + (map (partial zipmap fields-for-insert)))))))) diff --git a/test/metabase/test/data/h2.clj b/test/metabase/test/data/h2.clj index ceaf1a6417e..458bfb7e7ac 100644 --- a/test/metabase/test/data/h2.clj +++ b/test/metabase/test/data/h2.clj @@ -5,69 +5,88 @@ [clojure.string :as s] (korma [core :as k] [db :as kdb]) - [metabase.test.data.interface :refer :all]) + (metabase.test.data [generic-sql :as generic] + [interface :refer :all])) (:import (metabase.test.data.interface DatabaseDefinition FieldDefinition TableDefinition))) +(def ^:private ^:const field-base-type->sql-type + {:BigIntegerField "BIGINT" + :BooleanField "BOOL" + :CharField "VARCHAR(254)" + :DateField "DATE" + :DateTimeField "DATETIME" + :DecimalField "DECIMAL" + :FloatField "FLOAT" + :IntegerField "INTEGER" + :TextField "TEXT" + :TimeField "TIME"}) + ;; ## DatabaseDefinition helper functions -(defn filename +(defn- filename "Return filename that should be used for connecting to H2 database defined by DATABASE-DEFINITION. This does not include the `.mv.db` extension." [^DatabaseDefinition database-definition] (format "%s/target/%s" (System/getProperty "user.dir") (escaped-name database-definition))) -(defn connection-details +(defn- connection-details "Return a Metabase `Database.details` for H2 database defined by DATABASE-DEFINITION." [^DatabaseDefinition database-definition] {:db (format (if (:short-lived? database-definition) "file:%s" ; for short-lived connections don't create a server thread and don't use a keep-alive connection "file:%s;AUTO_SERVER=TRUE;DB_CLOSE_DELAY=-1") (filename database-definition))}) -(defn korma-connection-pool +(defn- korma-connection-pool "Return an H2 korma connection pool to H2 database defined by DATABASE-DEFINITION." [^DatabaseDefinition database-definition] (kdb/create-db (kdb/h2 (assoc (connection-details database-definition) - :naming {:keys s/lower-case - :fields s/upper-case})))) + :naming {:keys s/lower-case + :fields s/upper-case})))) -(defn exec-sql - "Execute RAW-SQL against H2 instance of H2 database defined by DATABASE-DEFINITION." - [^DatabaseDefinition database-definition ^String raw-sql] - (log/info raw-sql) - (k/exec-raw (korma-connection-pool database-definition) raw-sql)) +;; ## Implementation +(defprotocol IH2DatasetFormat + (format-for-h2 [this] + "Format dataset definitions for H2, e.g. upcasing `Table` and `Field` names.")) -;; ## TableDefinition helper functions +(extend-protocol IH2DatasetFormat + DatabaseDefinition + (format-for-h2 [this] + (update-in this [:table-definitions] (partial map format-for-h2))) -(defn korma-entity - "Return a Korma entity (e.g., one that can be passed to `select` or `sel` for the table - defined by TABLE-DEFINITION in the H2 database defined by DATABASE-DEFINITION." - [^TableDefinition table-definition ^DatabaseDefinition database-definition] - (-> (k/create-entity (:table-name table-definition)) - (k/database (korma-connection-pool database-definition)))) + TableDefinition + (format-for-h2 [this] + (-> this + (update-in [:table-name] s/upper-case) + (update-in [:field-definitions] (partial map format-for-h2)))) + FieldDefinition + (format-for-h2 [this] + (cond-> (update-in this [:field-name] s/upper-case) + (:pk this) (update-in [:pk] (comp s/upper-case name))))) -;; ## Internal Stuff - -(def ^:private ^:const field-base-type->sql-type - "Map of `Field.base_type` to the SQL type we should use for that column when creating a DB." - {:BigIntegerField "BIGINT" - :BooleanField "BOOL" - :CharField "VARCHAR(254)" - :DateField "DATE" - :DateTimeField "DATETIME" - :DecimalField "DECIMAL" - :FloatField "FLOAT" - :IntegerField "INTEGER" - :TextField "TEXT" - :TimeField "TIME"}) ;; ## Public Concrete DatasetLoader instance ;; For some reason this doesn't seem to work if we define IDatasetLoader methods inline, but does work when we explicitly use extend-protocol -(defrecord H2DatasetLoader []) +(defrecord H2DatasetLoader [] + generic/IGenericSQLDatasetLoader + (generic/execute-sql! [_ database-definition raw-sql] + (log/info raw-sql) + (k/exec-raw (korma-connection-pool database-definition) raw-sql)) + + (generic/korma-entity [_ database-definition table-definition] + (-> (k/create-entity (:table-name table-definition)) + (k/database (korma-connection-pool database-definition)))) + + (generic/pk-sql-type [_] + "BIGINT AUTO_INCREMENT") + + (generic/field-base-type->sql-type [_ field-type] + (field-base-type->sql-type field-type))) + (extend-protocol IDatasetLoader H2DatasetLoader (engine [_] @@ -82,54 +101,16 @@ (.delete file)))) (create-physical-table! [this database-definition table-definition] - ;; Drop the table if it already exists - (drop-physical-table! this database-definition table-definition) - - ;; Now create the new table - (exec-sql - database-definition - (format "CREATE TABLE \"%s\" (%s, \"ID\" BIGINT AUTO_INCREMENT, PRIMARY KEY (\"ID\"));" - (s/upper-case (:table-name table-definition)) - (->> (:field-definitions table-definition) - (map (fn [{:keys [field-name base-type]}] - (format "\"%s\" %s" (s/upper-case field-name) (base-type field-base-type->sql-type)))) - (interpose ", ") - (apply str))))) + (generic/create-physical-table! this database-definition (format-for-h2 table-definition))) (create-physical-db! [this database-definition] - ;; Create all the Tables - (doseq [^TableDefinition table-definition (:table-definitions database-definition)] - (log/info (format "Creating table '%s'..." (:table-name table-definition))) - (create-physical-table! this database-definition table-definition)) - - ;; Now add the foreign key constraints - (doseq [^TableDefinition table-definition (:table-definitions database-definition)] - (let [table-name (s/upper-case (:table-name table-definition))] - (doseq [{dest-table-name :fk, field-name :field-name} (:field-definitions table-definition)] - (when dest-table-name - (let [field-name (s/upper-case field-name) - dest-table-name (s/upper-case (name dest-table-name))] - (exec-sql - database-definition - (format "ALTER TABLE \"%s\" ADD CONSTRAINT IF NOT EXISTS \"FK_%s_%s\" FOREIGN KEY (\"%s\") REFERENCES \"%s\" (\"ID\");" - table-name - field-name dest-table-name - field-name - dest-table-name)))))))) - - (load-table-data! [_ database-definition table-definition] - (let [rows (:rows table-definition) - fields-for-insert (map :field-name (:field-definitions table-definition))] - (-> (korma-entity table-definition database-definition) - (k/insert (k/values (map (partial zipmap fields-for-insert) - rows)))))) - - (drop-physical-table! [_ database-definition table-definition] - (exec-sql - database-definition - (format "DROP TABLE IF EXISTS \"%s\";" (s/upper-case (:table-name table-definition)))))) + (generic/create-physical-db! this (format-for-h2 database-definition))) + + (load-table-data! [this database-definition table-definition] + (generic/load-table-data! this database-definition table-definition)) + + (drop-physical-table! [this database-definition table-definition] + (generic/drop-physical-table! this database-definition (format-for-h2 table-definition)))) (defn dataset-loader [] - (let [loader (->H2DatasetLoader)] - (assert (satisfies? IDatasetLoader loader)) - loader)) + (->H2DatasetLoader)) diff --git a/test/metabase/test/data/postgres.clj b/test/metabase/test/data/postgres.clj new file mode 100644 index 00000000000..83f3313ebcd --- /dev/null +++ b/test/metabase/test/data/postgres.clj @@ -0,0 +1,99 @@ +(ns metabase.test.data.postgres + "Code for creating / destroying a Postgres database from a `DatabaseDefinition`." + (:require [clojure.java.jdbc :as jdbc] + [clojure.tools.logging :as log] + [environ.core :refer [env]] + (korma [core :as k] + [db :as kdb]) + (metabase.test.data [generic-sql :as generic] + [interface :refer :all])) + (:import (metabase.test.data.interface DatabaseDefinition + FieldDefinition + TableDefinition))) + +(def ^:private ^:const field-base-type->sql-type + {:BigIntegerField "BIGINT" + :BooleanField "BOOL" + :CharField "VARCHAR(254)" + :DateField "DATE" + :DateTimeField "TIMESTAMP" + :DecimalField "DECIMAL" + :FloatField "FLOAT" + :IntegerField "INTEGER" + :TextField "TEXT" + :TimeField "TIME"}) + +(defn- pg-connection-details [^DatabaseDefinition database-definition] + (merge {:host "localhost" + :port 5432} + ;; HACK + (when (env :circleci) + {:user "ubuntu"}))) + +(defn- db-connection-details [^DatabaseDefinition database-definition] + (assoc (pg-connection-details database-definition) + :db (:database-name database-definition))) + +(defn- execute! [scope ^DatabaseDefinition database-definition & format-strings] + (jdbc/execute! (-> ((case scope + :pg pg-connection-details + :db db-connection-details) database-definition) + kdb/postgres + (assoc :make-pool? false)) + [(apply format format-strings)] + :transaction? false)) + + +(defrecord PostgresDatasetLoader [] + generic/IGenericSQLDatasetLoader + (generic/execute-sql! [_ database-definition raw-sql] + (log/info raw-sql) + (execute! :db database-definition raw-sql)) + + (generic/korma-entity [_ database-definition table-definition] + (-> (k/create-entity (:table-name table-definition)) + (k/database (-> (db-connection-details database-definition) + kdb/postgres + (assoc :make-pool? false) + kdb/create-db)))) + + (generic/pk-sql-type [_] + "SERIAL") + + (generic/field-base-type->sql-type [_ field-type] + (field-base-type->sql-type field-type))) + +(extend-protocol IDatasetLoader + PostgresDatasetLoader + (engine [_] + :postgres) + + (database->connection-details [_ database-definition] + (assoc (db-connection-details database-definition) + :timezone :America/Los_Angeles)) + + (drop-physical-db! [_ database-definition] + (execute! :pg database-definition "DROP DATABASE IF EXISTS \"%s\";" (:database-name database-definition))) + + (drop-physical-table! [this database-definition table-definition] + (generic/drop-physical-table! this database-definition table-definition)) + + (create-physical-table! [this database-definition table-definition] + (generic/create-physical-table! this database-definition table-definition)) + + (create-physical-db! [this {:keys [database-name], :as database-definition}] + (execute! :pg database-definition "DROP DATABASE IF EXISTS \"%s\";" database-name) + (execute! :pg database-definition "CREATE DATABASE \"%s\";" database-name) + + ;; double check that we can connect to the newly created DB + (metabase.driver/can-connect-with-details? :postgres (db-connection-details database-definition) :rethrow-exceptions) + + ;; call the generic implementation to create Tables + FKs + (generic/create-physical-db! this database-definition)) + + (load-table-data! [this database-definition table-definition] + (generic/load-table-data! this database-definition table-definition))) + + +(defn dataset-loader [] + (->PostgresDatasetLoader)) -- GitLab