Skip to content
Snippets Groups Projects
Commit 1a95c9b9 authored by Cam Saül's avatar Cam Saül Committed by GitHub
Browse files

Merge pull request #2855 from metabase/switch-to-honeysql-part-9

Finish Korma -> HoneySQL Transition :honey_pot:
parents a7a6872b e4262205
Branches
Tags
No related merge requests found
Showing
with 367 additions and 326 deletions
......@@ -43,12 +43,12 @@
"v2-rev300-1.22.0"]
[com.h2database/h2 "1.4.191"] ; embedded SQL database
[com.mattbertolini/liquibase-slf4j "2.0.0"] ; Java Migrations lib
[com.mchange/c3p0 "0.9.5.2"] ; connection pooling library
[com.novemberain/monger "3.0.2"] ; MongoDB Driver
[compojure "1.5.0"] ; HTTP Routing library built on Ring
[environ "1.0.3"] ; easy environment management
[hiccup "1.0.5"] ; HTML templating
[honeysql "0.6.3"] ; Transform Clojure data structures to SQL
[korma "0.4.2"] ; SQL generation
[log4j/log4j "1.2.17" ; logging framework
:exclusions [javax.mail/mail
javax.jms/jms
......
......@@ -23,4 +23,4 @@ log4j.logger.metabase.middleware=DEBUG
log4j.logger.metabase.query-processor=DEBUG
log4j.logger.metabase.sync-database=DEBUG
# c3p0 connection pools tend to log useless warnings way too often; only log actual errors
log4j.logger.com.mchange.v2.resourcepool=ERROR
log4j.logger.com.mchange=ERROR
(ns metabase.sample-dataset.generate
(:require [clojure.java.io :as io]
(:require (clojure.java [io :as io]
[jdbc :as jdbc])
[clojure.math.numeric-tower :as math]
[clojure.string :as s]
(faker [address :as address]
......@@ -8,8 +9,7 @@
[internet :as internet]
[name :as name])
[incanter.distributions :as dist]
(korma [core :as k]
[db :as kdb])
[metabase.db.spec :as dbspec]
[metabase.util :as u])
(:import java.util.Date))
......@@ -369,52 +369,48 @@
(io/delete-file (str filename ".mv.db") :silently)
(io/delete-file (str filename ".trace.db") :silently)
(println "Creating db...")
(let [db (kdb/h2 {:db (format "file:%s;UNDO_LOG=0;CACHE_SIZE=131072;QUERY_CACHE_SIZE=128;COMPRESS=TRUE;MULTI_THREADED=TRUE;MVCC=TRUE;DEFRAG_ALWAYS=TRUE;MAX_COMPACT_TIME=5000;ANALYZE_AUTO=100"
filename)
:make-pool? false})]
(let [db (dbspec/h2 {:db (format "file:%s;UNDO_LOG=0;CACHE_SIZE=131072;QUERY_CACHE_SIZE=128;COMPRESS=TRUE;MULTI_THREADED=TRUE;MVCC=TRUE;DEFRAG_ALWAYS=TRUE;MAX_COMPACT_TIME=5000;ANALYZE_AUTO=100"
filename)
:make-pool? false})]
(doseq [[table-name field->type] (seq tables)]
(k/exec-raw db (create-table-sql table-name field->type)))
(jdbc/execute! db [(create-table-sql table-name field->type)]))
;; Add FK constraints
(println "Adding FKs...")
(doseq [{:keys [source-table field dest-table]} fks]
(k/exec-raw db (format "ALTER TABLE \"%s\" ADD CONSTRAINT \"FK_%s_%s_%s\" FOREIGN KEY (\"%s\") REFERENCES \"%s\" (\"ID\");"
source-table
source-table field dest-table
field
dest-table)))
(jdbc/execute! db [(format "ALTER TABLE \"%s\" ADD CONSTRAINT \"FK_%s_%s_%s\" FOREIGN KEY (\"%s\") REFERENCES \"%s\" (\"ID\");"
source-table
source-table field dest-table
field
dest-table)]))
;; Insert the data
(println "Inserting data...")
(doseq [[table rows] (seq data)]
(assert (keyword? table))
(assert (sequential? rows))
(let [entity (-> (k/create-entity (s/upper-case (name table)))
(k/database db))]
(k/insert entity (k/values (for [row rows]
(->> (for [[k v] (seq row)]
[(s/upper-case (name k)) v])
(into {})))))))
(let [table-name (s/upper-case (name table))]
(apply jdbc/insert! db table-name (for [row rows]
(into {} (for [[k v] (seq row)]
{(s/upper-case (name k)) v}))))))
;; Insert the _metabase_metadata table
(println "Inserting _metabase_metadata...")
(k/exec-raw db (format "CREATE TABLE \"_METABASE_METADATA\" (\"keypath\" VARCHAR(255), \"value\" VARCHAR(255), PRIMARY KEY (\"keypath\"));"))
(-> (k/create-entity "_METABASE_METADATA")
(k/database db)
(k/insert (k/values (reduce concat (for [[table-name {table-description :description, columns :columns}] annotations]
(let [table-name (s/upper-case (name table-name))]
(conj (for [[column-name kvs] columns
[k v] kvs]
{:keypath (format "%s.%s.%s" table-name (s/upper-case (name column-name)) (name k))
:value (name v)})
{:keypath (format "%s.description" table-name)
:value table-description})))))))
(jdbc/execute! db ["CREATE TABLE \"_METABASE_METADATA\" (\"KEYPATH\" VARCHAR(255), \"VALUE\" VARCHAR(255), PRIMARY KEY (\"KEYPATH\"));"])
(apply jdbc/insert! db "_METABASE_METADATA" (reduce concat (for [[table-name {table-description :description, columns :columns}] annotations]
(let [table-name (s/upper-case (name table-name))]
(conj (for [[column-name kvs] columns
[k v] kvs]
{:keypath (format "%s.%s.%s" table-name (s/upper-case (name column-name)) (name k))
:value (name v)})
{:keypath (format "%s.description" table-name)
:value table-description})))))
;; Create the 'GUEST' user
(println "Preparing database for export...")
(k/exec-raw db "CREATE USER GUEST PASSWORD 'guest';")
(jdbc/execute! db ["CREATE USER GUEST PASSWORD 'guest';"])
(doseq [table (conj (keys data) "_METABASE_METADATA")]
(k/exec-raw db (format "GRANT SELECT ON %s TO GUEST;" (s/upper-case (name table)))))
(jdbc/execute! db [(format "GRANT SELECT ON %s TO GUEST;" (s/upper-case (name table)))]))
(println "Done."))))
......
......@@ -25,7 +25,6 @@
(defendpoint GET "/recent_views"
"Get the list of 15 things the current user has been viewing most recently."
[]
;; use a custom Korma query because we are doing some groupings and aggregations
;; expected output of the query is a single row per unique model viewed by the current user
;; including a `:max_ts` which has the most recent view timestamp of the item and `:cnt` which has total views
;; and we order the results by most recently viewed then hydrate the basic details of the model
......
......@@ -3,8 +3,6 @@
(:require [clojure.java.jdbc :as jdbc]
[clojure.set :as set]
[colorize.core :as color]
(korma [core :as k]
[db :as kdb])
[medley.core :as m]
[metabase.config :as config]
[metabase.db :as db]
......@@ -79,69 +77,73 @@
"Entities that do NOT use an auto incrementing ID column."
#{Setting Session})
(def ^:private ^:dynamic *db-conn*
(def ^:private ^:dynamic *target-db-connection*
"Active database connection to the target database we are loading into."
nil)
(defn- insert-entity [e objs]
(print (u/format-color 'blue "Transfering %d instances of %s..." (count objs) (:name e)))
;; TODO - `e` is a bad variable name! This should be something like `entity`
(defn- insert-entity! [e objs]
(print (u/format-color 'blue "Transfering %d instances of %s..." (count objs) (:name e))) ; TODO - I don't think the print+flush is working as intended :/
(flush)
;; The connection closes prematurely on occasion when we're inserting thousands of rows at once. Break into smaller chunks so connection stays alive
(doseq [chunk (partition-all 300 objs)]
(print (color/blue \.))
(flush)
(k/insert e (k/values (if (= e DashboardCard)
;; mini-HACK to fix korma/h2 lowercasing these couple attributes
;; luckily this is the only place in our schema where we have camel case names
(mapv #(set/rename-keys % {:sizex :sizeX, :sizey :sizeY}) chunk)
chunk))))
(apply jdbc/insert! *target-db-connection* (:table e) (if (= e DashboardCard)
;; mini-HACK to fix h2 lowercasing these couple attributes
;; luckily this is the only place in our schema where we have camel case names
(mapv #(set/rename-keys % {:sizex :sizeX, :sizey :sizeY}) chunk)
chunk)))
(println (color/green "[OK]")))
(defn- insert-self-referencing-entity [e objs]
(let [self-ref-attr (condp = e
RawColumn :fk_target_column_id
Field :fk_target_field_id)
self-referencing (filter self-ref-attr objs)
(defn- insert-self-referencing-entity! [e objs]
(let [self-ref-attrs (condp = e
RawColumn #{:fk_target_column_id}
Field #{:fk_target_field_id :parent_id})
self-referencing (for [obj objs
:when (reduce #(or %1 %2) (for [attr self-ref-attrs] ; a self-referencing object is an object where *any* of the self-referencing attributes is non-nil
(attr obj)))]
obj)
others (set/difference (set objs) (set self-referencing))]
;; first insert the non-self-referencing objects
(insert-entity e others)
;; then insert the rest, which should be safe to insert now
(insert-entity e self-referencing)))
(insert-entity! e others)
;; then insert the rest, which *should* be safe to insert now (TODO - this could break if a self-referencing entity depends on another self-referencing entity </3)
(insert-entity! e self-referencing)))
(defn- set-postgres-sequence-values []
(defn- set-postgres-sequence-values! []
(print (u/format-color 'blue "Setting postgres sequence ids to proper values..."))
(flush)
(doseq [e (filter #(not (contains? entities-without-autoinc-ids %)) entities)
:let [table-name (:table e)
:let [table-name (name (:table e))
seq-name (str table-name "_id_seq")
sql (format "SELECT setval('%s', COALESCE((SELECT MAX(id) FROM %s), 1), true) as val" seq-name table-name)]]
(jdbc/db-query-with-resultset *db-conn* [sql] :val))
sql (format "SELECT setval('%s', COALESCE((SELECT MAX(id) FROM %s), 1), true) as val" seq-name (name table-name))]]
(jdbc/db-query-with-resultset *target-db-connection* [sql] :val))
(println (color/green "[OK]")))
(defn load-from-h2
(defn load-from-h2!
"Transfer data from existing H2 database to the newly created (presumably MySQL or Postgres) DB specified by env vars.
Intended as a tool for upgrading from H2 to a 'real' Database.
Defaults to using `@metabase.db/db-file` as the connection string."
[h2-connection-string-or-nil]
(db/setup-db)
(let [h2-filename (or h2-connection-string-or-nil @metabase.db/db-file)]
(let [h2-filename (or h2-connection-string-or-nil @metabase.db/db-file)
target-db-spec (db/jdbc-details @db/db-connection-details)]
;; NOTE: would be nice to add `ACCESS_MODE_DATA=r` but it doesn't work with `AUTO_SERVER=TRUE`
;; connect to H2 database, which is what we are migrating from
(jdbc/with-db-connection [h2-conn (db/jdbc-details {:type :h2, :db (str h2-filename ";IFEXISTS=TRUE")})]
(kdb/transaction
(doseq [e entities
:let [objs (->> (jdbc/query h2-conn [(str "SELECT * FROM " (:table e))])
;; we apply jdbc-clob->str to all row values because H2->Postgres
;; gets messed up if the value is left as a clob
(map #(m/map-vals u/jdbc-clob->str %)))]
:when (seq objs)]
(if-not (contains? self-referencing-entities e)
(insert-entity e objs)
(insert-self-referencing-entity e objs)))))
(jdbc/with-db-transaction [target-db-conn target-db-spec]
(binding [*target-db-connection* target-db-conn]
(doseq [e entities
:let [rows (for [row (jdbc/query h2-conn [(str "SELECT * FROM " (name (:table e)))])]
(m/map-vals u/jdbc-clob->str row))]
:when (seq rows)]
(if-not (contains? self-referencing-entities e)
(insert-entity! e rows)
(insert-self-referencing-entity! e rows))))))
;; if we are loading into a postgres db then we need to update sequence nextvals
(when (= (config/config-str :mb-db-type) "postgres")
(jdbc/with-db-transaction [targetdb-conn (db/jdbc-details @db/db-connection-details)]
(binding [*db-conn* targetdb-conn]
(set-postgres-sequence-values))))))
(jdbc/with-db-transaction [target-db-conn target-db-spec]
(binding [*target-db-connection* target-db-conn]
(set-postgres-sequence-values!))))))
......@@ -225,7 +225,7 @@
(db/migrate @db/db-connection-details (keyword direction)))
:load-from-h2 (fn [& [h2-connection-string-or-nil]]
(require 'metabase.cmd.load-from-h2)
((resolve 'metabase.cmd.load-from-h2/load-from-h2) h2-connection-string-or-nil))})
((resolve 'metabase.cmd.load-from-h2/load-from-h2!) h2-connection-string-or-nil))})
(defn- run-cmd [cmd & args]
(let [f (or (cmd->fn cmd)
......
......@@ -8,16 +8,16 @@
(honeysql [core :as hsql]
[format :as hformat]
[helpers :as h])
(korma [core :as k]
[db :as kdb])
[medley.core :as m]
[ring.util.codec :as codec]
[metabase.config :as config]
[metabase.db.spec :as dbspec]
[metabase.models.interface :as models]
[metabase.util :as u]
metabase.util.honeysql-extensions) ; this needs to be loaded so the `:h2` quoting style gets added
(:import java.io.StringWriter
java.sql.Connection
com.mchange.v2.c3p0.ComboPooledDataSource
liquibase.Liquibase
(liquibase.database DatabaseFactory Database)
liquibase.database.jvm.JdbcConnection
......@@ -87,15 +87,15 @@
:password (config/config-str :mb-db-pass)}))))
(defn jdbc-details
"Takes our own MB details map and formats them properly for connection details for Korma / JDBC."
"Takes our own MB details map and formats them properly for connection details for JDBC."
[db-details]
{:pre [(map? db-details)]}
;; TODO: it's probably a good idea to put some more validation here and be really strict about what's in `db-details`
(case (:type db-details)
:h2 (kdb/h2 (assoc db-details :naming {:keys s/lower-case
:fields s/upper-case}))
:mysql (kdb/mysql (assoc db-details :db (:dbname db-details)))
:postgres (kdb/postgres (assoc db-details :db (:dbname db-details)))))
:h2 (dbspec/h2 (assoc db-details :naming {:keys s/lower-case
:fields s/upper-case}))
:mysql (dbspec/mysql (assoc db-details :db (:dbname db-details)))
:postgres (dbspec/postgres (assoc db-details :db (:dbname db-details)))))
;; ## MIGRATE
......@@ -131,7 +131,73 @@
(throw (DatabaseException. e)))))
;; ## SETUP-DB
;;; +------------------------------------------------------------------------------------------------------------------------+
;;; | DB CONNECTION / TRANSACTION STUFF |
;;; +------------------------------------------------------------------------------------------------------------------------+
(defn connection-pool
"Create a C3P0 connection pool for the given database SPEC."
[{:keys [subprotocol subname classname minimum-pool-size idle-connection-test-period excess-timeout]
:or {minimum-pool-size 3
idle-connection-test-period 0
excess-timeout (* 30 60)}
:as spec}]
{:datasource (doto (ComboPooledDataSource.)
(.setDriverClass classname)
(.setJdbcUrl (str "jdbc:" subprotocol ":" subname))
(.setMaxIdleTimeExcessConnections excess-timeout)
(.setMaxIdleTime (* 3 60 60))
(.setInitialPoolSize 3)
(.setMinPoolSize minimum-pool-size)
(.setMaxPoolSize 15)
(.setIdleConnectionTestPeriod idle-connection-test-period)
(.setTestConnectionOnCheckin false)
(.setTestConnectionOnCheckout false)
(.setPreferredTestQuery nil)
(.setProperties (u/prog1 (java.util.Properties.)
(doseq [[k v] (dissoc spec :make-pool? :classname :subprotocol :subname :naming :delimiters :alias-delimiter
:excess-timeout :minimum-pool-size :idle-connection-test-period)]
(.setProperty <> (name k) (str v))))))})
(def ^:private db-connection-pool
(atom nil))
(defn- create-connection-pool!
[spec]
(reset! db-connection-pool (connection-pool spec)))
(def ^:private ^:dynamic *transaction-connection*
"Transaction connection to the *Metabase* backing DB connection pool. Used internally by `transaction`."
nil)
(declare setup-db-if-needed)
(defn- db-connection
"Get a JDBC connection spec for the Metabase DB."
[]
(setup-db-if-needed)
(or *transaction-connection*
@db-connection-pool
(throw (Exception. "DB is not setup."))))
(defn do-in-transaction
"Execute F inside a DB transaction. Prefer macro form `transaction` to using this directly."
[f]
(jdbc/with-db-transaction [conn (db-connection)]
(binding [*transaction-connection* conn]
(f))))
(defmacro transaction
"Execute all queries within the body in a single transaction."
{:arglists '([body] [options & body])}
[& body]
`(do-in-transaction (fn [] ~@body)))
;;; +------------------------------------------------------------------------------------------------------------------------+
;;; | DB SETUP & MIGRATIONS |
;;; +------------------------------------------------------------------------------------------------------------------------+
(def ^:private setup-db-has-been-called?
(atom false))
......@@ -188,8 +254,8 @@
(throw (java.lang.Exception. "Database requires manual upgrade."))))
(log/info "Database Migrations Current ... ✅")
;; Establish our 'default' Korma DB Connection
(kdb/default-connection (kdb/create-db (jdbc-details db-details)))
;; Establish our 'default' DB Connection
(create-connection-pool! (jdbc-details db-details))
;; Do any custom code-based migrations now that the db structure is up to date
;; NOTE: we use dynamic resolution to prevent circular dependencies
......@@ -246,13 +312,6 @@
(symbol? entity) (resolve-entity-from-symbol entity)
:else (throw (Exception. (str "Invalid entity:" entity)))))
(defn- db-connection
"Get a JDBC connection spec for the Metabase DB."
[]
(setup-db-if-needed)
(or korma.db/*current-conn*
(korma.db/get-connection (or korma.db/*current-db* @korma.db/_default))))
(defn- quoting-style
"Style of `:quoting` that should be passed to HoneySQL `format`."
^clojure.lang.Keyword []
......@@ -269,7 +328,7 @@
(def ^:private ^:dynamic *call-count*
"Atom used as a counter for DB calls when enabled.
This number isn't *perfectly* accurate, only mostly; DB calls made directly to JDBC or via old korma patterns won't be logged."
This number isn't *perfectly* accurate, only mostly; DB calls made directly to JDBC won't be logged."
nil)
(defn do-with-call-counting
......
(ns metabase.db.migrations
"Clojure-land data migration definitions and fns for running them."
(:require [clojure.tools.logging :as log]
(korma [core :as k]
[db :as kdb])
(metabase [db :as db]
[driver :as driver])
[metabase.events.activity-feed :refer [activity-feed-topics]]
......@@ -12,6 +10,7 @@
[database :refer [Database]]
[field :refer [Field]]
[foreign-key :refer [ForeignKey]]
[interface :refer [defentity]]
[raw-column :refer [RawColumn]]
[raw-table :refer [RawTable]]
[table :refer [Table] :as table]
......@@ -20,24 +19,23 @@
;;; # Migration Helpers
(defentity DataMigrations :data_migrations)
(defn- migration-ran? [migration-name]
(-> (k/select :data_migrations
(k/aggregate (count :*) :count)
(k/where {:id (name migration-name)}))
first :count (> 0)))
(db/exists? DataMigrations :id (name migration-name)))
(defn- run-migration-if-needed
(defn- run-migration-if-needed!
"Run migration defined by MIGRATION-VAR if needed.
(run-migration-if-needed #'set-card-database-and-table-ids)"
(run-migration-if-needed! #'set-card-database-and-table-ids)"
[migration-var]
(let [migration-name (name (:name (meta migration-var)))]
(when-not (migration-ran? migration-name)
(log/info (format "Running data migration '%s'..." migration-name))
(@migration-var)
(k/insert "data_migrations"
(k/values {:id migration-name
:timestamp (u/new-sql-timestamp)})))))
(db/insert! DataMigrations
:id migration-name
:timestamp (u/new-sql-timestamp)))))
(def ^:private data-migrations (atom []))
......@@ -51,7 +49,8 @@
"Run all data migrations defined by `defmigration`."
[]
(log/info "Running all necessary data migrations, this may take a minute.")
(dorun (map run-migration-if-needed @data-migrations))
(doseq [migration @data-migrations]
(run-migration-if-needed! migration))
(log/info "Finished running data migrations."))
......@@ -63,7 +62,8 @@
;; the values for `:database_id`, `:table_id`, and `:query_type` if possible.
(defmigration set-card-database-and-table-ids
;; only execute when `:database_id` column on all cards is `nil`
(when (= 0 (:cnt (first (k/select Card (k/aggregate (count :*) :cnt) (k/where (not= :database_id nil))))))
(when (zero? (db/select-one-count Card
:database_id [:not= nil]))
(doseq [{id :id {:keys [type] :as dataset-query} :dataset_query} (db/select [Card :id :dataset_query])]
(when type
;; simply resave the card with the dataset query which will automatically set the database, table, and type
......@@ -83,12 +83,10 @@
(defmigration set-default-schemas
(doseq [[engine default-schema] [["postgres" "public"]
["h2" "PUBLIC"]]]
(k/update Table
(k/set-fields {:schema default-schema})
(k/where {:schema nil
:db_id [in (k/subselect Database
(k/fields :id)
(k/where {:engine engine}))]}))))
(when-let [db-ids (db/select-ids Database :engine engine)]
(db/update-where! Table {:schema nil
:db_id [:in db-ids]}
:schema default-schema))))
;; Populate the initial value for the `:admin-email` setting for anyone who hasn't done it yet
......@@ -103,8 +101,7 @@
;; Remove old `database-sync` activity feed entries
(defmigration remove-database-sync-activity-entries
(when-not (contains? activity-feed-topics :database-sync-begin)
(k/delete Activity
(k/where {:topic "database-sync"}))))
(db/delete! Activity :topic "database-sync")))
;; Clean up duplicate FK entries
......@@ -122,122 +119,118 @@
;; NOTE: this scales the dashboards by 4x in the Y-scale and 3x in the X-scale
(defmigration update-dashboards-to-new-grid
(doseq [{:keys [id row col sizeX sizeY]} (db/select DashboardCard)]
(k/update DashboardCard
(k/set-fields {:row (when row (* row 4))
:col (when col (* col 3))
:sizeX (when sizeX (* sizeX 3))
:sizeY (when sizeY (* sizeY 4))})
(k/where {:id id}))))
(db/update! DashboardCard id
:row (when row (* row 4))
:col (when col (* col 3))
:sizeX (when sizeX (* sizeX 3))
:sizeY (when sizeY (* sizeY 4)))))
;; migrate data to new visibility_type column on field
(defmigration migrate-field-visibility-type
(when (< 0 (:cnt (first (k/select Field (k/aggregate (count :*) :cnt) (k/where (= :visibility_type "unset"))))))
(when-not (zero? (db/select-one-count Field :visibility_type "unset"))
;; start by marking all inactive fields as :retired
(k/update Field
(k/set-fields {:visibility_type "retired"})
(k/where {:visibility_type "unset"
:active false}))
(db/update-where! Field {:visibility_type "unset"
:active false}
:visibility_type "retired")
;; anything that is active with field_type = :sensitive gets visibility_type :sensitive
(k/update Field
(k/set-fields {:visibility_type "sensitive"})
(k/where {:visibility_type "unset"
:active true
:field_type "sensitive"}))
(db/update-where! Field {:visibility_type "unset"
:active true
:field_type "sensitive"}
:visibility_type "sensitive")
;; if field is active but preview_display = false then it becomes :details-only
(k/update Field
(k/set-fields {:visibility_type "details-only"})
(k/where {:visibility_type "unset"
:active true
:preview_display false}))
(db/update-where! Field {:visibility_type "unset"
:active true
:preview_display false}
:visibility_type "details-only")
;; everything else should end up as a :normal field
(k/update Field
(k/set-fields {:visibility_type "normal"})
(k/where {:visibility_type "unset"
:active true}))))
(db/update-where! Field {:visibility_type "unset"
:active true}
:visibility_type "normal")))
;; deal with dashboard cards which have NULL `:row` or `:col` values
(defmigration fix-dashboard-cards-without-positions
(when-let [bad-dashboards (not-empty (k/select DashboardCard (k/fields [:dashboard_id]) (k/modifier "DISTINCT") (k/where (or (= :row nil) (= :col nil)))))]
(log/info "Looks like we need to fix unpositioned cards in these dashboards:" (mapv :dashboard_id bad-dashboards))
(when-let [bad-dashboard-ids (db/select-field :dashboard_id DashboardCard {:where [:or [:= :row nil]
[:= :col nil]]})]
(log/info "Looks like we need to fix unpositioned cards in these dashboards:" bad-dashboard-ids)
;; we are going to take the easy way out, which is to put bad-cards at the bottom of the dashboard
(doseq [{dash-to-fix :dashboard_id} bad-dashboards]
(let [max-row (or (:row (first (k/select DashboardCard (k/aggregate (max :row) :row) (k/where {:dashboard_id dash-to-fix})))) 0)
max-size (or (:size (first (k/select DashboardCard (k/aggregate (max :sizeY) :size) (k/where {:dashboard_id dash-to-fix, :row max-row})))) 0)
(doseq [dash-to-fix bad-dashboard-ids]
(let [max-row (or (:max (db/select-one [DashboardCard [:%max.row :max]] :dashboard_id dash-to-fix))
0)
max-size (or (:size (db/select-one [DashboardCard [:%max.sizeY :size]] :dashboard_id dash-to-fix, :row max-row))
0)
max-y (+ max-row max-size)
bad-cards (k/select DashboardCard (k/fields :id :sizeY) (k/where {:dashboard_id dash-to-fix}) (k/where (or (= :row nil) (= :col nil))))]
bad-cards (db/select [DashboardCard :id :sizeY]
{:where [:and [:= :dashboard_id dash-to-fix]
[:or [:= :row nil]
[:= :col nil]]]})]
(loop [[bad-card & more] bad-cards
row-target max-y]
(k/update DashboardCard
(k/set-fields {:row row-target
:col 0})
(k/where {:id (:id bad-card)}))
(db/update! DashboardCard bad-card
:row row-target
:col 0)
(when more
(recur more (+ row-target (:sizeY bad-card)))))))))
;; migrate FK information from old ForeignKey model to Field.fk_target_field_id
(defmigration migrate-fk-metadata
(when (> 1 (:cnt (first (k/select Field (k/aggregate (count :*) :cnt) (k/where (not= :fk_target_field_id nil))))))
(when (> 1 (db/select-one-count Field :fk_target_field_id [:not= nil]))
(when-let [fks (not-empty (db/select ForeignKey))]
(doseq [{:keys [origin_id destination_id]} fks]
(k/update Field
(k/set-fields {:fk_target_field_id destination_id})
(k/where {:id origin_id}))))))
(db/update! Field origin_id
:fk_target_field_id destination_id)))))
;; populate RawTable and RawColumn information
;; NOTE: we only handle active Tables/Fields and we skip any FK relationships (they can safely populate later)
(defmigration create-raw-tables
(when (= 0 (:cnt (first (k/select RawTable (k/aggregate (count :*) :cnt)))))
(when (zero? (db/select-one-count RawTable))
(binding [db/*disable-db-logging* true]
(kdb/transaction
(doseq [{database-id :id, :keys [name engine]} (db/select Database)]
(when-let [tables (not-empty (db/select Table, :db_id database-id, :active true))]
(log/info (format "Migrating raw schema information for %s database '%s'" engine name))
(let [processed-tables (atom #{})]
(doseq [{table-id :id, table-schema :schema, table-name :name} tables]
;; this check gaurds against any table that appears in the schema multiple times
(if (contains? @processed-tables {:schema table-schema, :name table-name})
;; this is a dupe of this table, retire it and it's fields
(table/retire-tables! #{table-id})
;; this is the first time we are encountering this table, so migrate it
(do
;; add this table to the set of tables we've processed
(swap! processed-tables conj {:schema table-schema, :name table-name})
;; create the RawTable
(let [{raw-table-id :id} (db/insert! RawTable
:database_id database-id
:schema table-schema
:name table-name
:details {}
:active true)]
;; update the Table and link it with the RawTable
(k/update Table
(k/set-fields {:raw_table_id raw-table-id})
(k/where {:id table-id}))
;; migrate all Fields in the Table (skipping :dynamic-schema dbs)
(when-not (driver/driver-supports? (driver/engine->driver engine) :dynamic-schema)
(let [processed-fields (atom #{})]
(doseq [{field-id :id, column-name :name, :as field} (db/select Field, :table_id table-id, :visibility_type [:not= "retired"])]
;; guard against duplicate fields with the same name
(if (contains? @processed-fields column-name)
;; this is a dupe, disable it
(k/update Field
(k/set-fields {:visibility_type "retired"})
(k/where {:id field-id}))
;; normal unmigrated field, so lets use it
(let [{raw-column-id :id} (db/insert! RawColumn
:raw_table_id raw-table-id
:name column-name
:is_pk (= :id (:special_type field))
:details {:base-type (:base_type field)}
:active true)]
;; update the Field and link it with the RawColumn
(k/update Field
(k/set-fields {:raw_column_id raw-column-id
:last_analyzed (u/new-sql-timestamp)})
(k/where {:id field-id}))
;; add this column to the set we've processed already
(swap! processed-fields conj column-name)))))))))))))))))
(db/transaction
(doseq [{database-id :id, :keys [name engine]} (db/select Database)]
(when-let [tables (not-empty (db/select Table, :db_id database-id, :active true))]
(log/info (format "Migrating raw schema information for %s database '%s'" engine name))
(let [processed-tables (atom #{})]
(doseq [{table-id :id, table-schema :schema, table-name :name} tables]
;; this check gaurds against any table that appears in the schema multiple times
(if (contains? @processed-tables {:schema table-schema, :name table-name})
;; this is a dupe of this table, retire it and it's fields
(table/retire-tables! #{table-id})
;; this is the first time we are encountering this table, so migrate it
(do
;; add this table to the set of tables we've processed
(swap! processed-tables conj {:schema table-schema, :name table-name})
;; create the RawTable
(let [{raw-table-id :id} (db/insert! RawTable
:database_id database-id
:schema table-schema
:name table-name
:details {}
:active true)]
;; update the Table and link it with the RawTable
(db/update! Table table-id
:raw_table_id raw-table-id)
;; migrate all Fields in the Table (skipping :dynamic-schema dbs)
(when-not (driver/driver-supports? (driver/engine->driver engine) :dynamic-schema)
(let [processed-fields (atom #{})]
(doseq [{field-id :id, column-name :name, :as field} (db/select Field, :table_id table-id, :visibility_type [:not= "retired"])]
;; guard against duplicate fields with the same name
(if (contains? @processed-fields column-name)
;; this is a dupe, disable it
(db/update! Field field-id
:visibility_type "retired")
;; normal unmigrated field, so lets use it
(let [{raw-column-id :id} (db/insert! RawColumn
:raw_table_id raw-table-id
:name column-name
:is_pk (= :id (:special_type field))
:details {:base-type (:base_type field)}
:active true)]
;; update the Field and link it with the RawColumn
(db/update! Field field-id
:raw_column_id raw-column-id
:last_analyzed (u/new-sql-timestamp))
;; add this column to the set we've processed already
(swap! processed-fields conj column-name)))))))))))))))))
(ns metabase.db.spec
"Functions for creating JDBC DB specs for a given engine.")
(defn h2
"Create a database specification for a h2 database. Opts should include a key
for :db which is the path to the database file."
[{:keys [db make-pool?]
:or {db "h2.db", make-pool? true}
:as opts}]
(merge {:classname "org.h2.Driver" ; must be in classpath
:subprotocol "h2"
:subname db
:make-pool? make-pool?}
(dissoc opts :db)))
(defn postgres
"Create a database specification for a postgres database. Opts should include
keys for :db, :user, and :password. You can also optionally set host and
port."
[{:keys [host port db make-pool?]
:or {host "localhost", port 5432, db "", make-pool? true}
:as opts}]
(merge {:classname "org.postgresql.Driver" ; must be in classpath
:subprotocol "postgresql"
:subname (str "//" host ":" port "/" db)
:make-pool? make-pool?}
(dissoc opts :host :port :db)))
(defn mysql
"Create a database specification for a mysql database. Opts should include keys
for :db, :user, and :password. You can also optionally set host and port.
Delimiters are automatically set to \"`\"."
[{:keys [host port db make-pool?]
:or {host "localhost", port 3306, db "", make-pool? true}
:as opts}]
(merge {:classname "com.mysql.jdbc.Driver" ; must be in classpath
:subprotocol "mysql"
:subname (str "//" host ":" port "/" db)
:delimiters "`"
:make-pool? make-pool?}
(dissoc opts :host :port :db)))
;; TODO - These other ones can acutally be moved directly into their respective drivers themselves since they're not supported as backing DBs
(defn mssql
"Create a database specification for a mssql database. Opts should include keys
for :db, :user, and :password. You can also optionally set host and port."
[{:keys [user password db host port make-pool?]
:or {user "dbuser", password "dbpassword", db "", host "localhost", port 1433, make-pool? true}
:as opts}]
(merge {:classname "com.microsoft.sqlserver.jdbc.SQLServerDriver" ; must be in classpath
:subprotocol "sqlserver"
:subname (str "//" host ":" port ";database=" db ";user=" user ";password=" password)
:make-pool? make-pool?}
(dissoc opts :host :port :db)))
(defn sqlite3
"Create a database specification for a SQLite3 database. Opts should include a
key for :db which is the path to the database file."
[{:keys [db make-pool?]
:or {db "sqlite.db", make-pool? true}
:as opts}]
(merge {:classname "org.sqlite.JDBC" ; must be in classpath
:subprotocol "sqlite"
:subname db
:make-pool? make-pool?}
(dissoc opts :db)))
......@@ -5,10 +5,9 @@
[clojure.tools.logging :as log]
(honeysql [core :as hsql]
[helpers :as h])
[korma.db :as kdb]
(metabase [config :as config]
[db :as db]
[driver :as driver])
[metabase.config :as config]
[metabase.db :as db]
[metabase.driver :as driver]
[metabase.driver.generic-sql :as sql]
[metabase.driver.generic-sql.query-processor :as sqlqp]
(metabase.models [database :refer [Database]]
......
......@@ -3,8 +3,7 @@
[clojure.set :as set]
[honeysql.core :as hsql]
[metabase.driver :as driver]
(metabase.driver.crate [analyze :as analyze]
[query-processor :as qp]
(metabase.driver.crate [query-processor :as qp]
[util :as crate-util])
[metabase.driver.generic-sql :as sql]
[metabase.util :as u]))
......@@ -67,8 +66,7 @@
(u/strict-extend CrateDriver
driver/IDriver
(merge (sql/IDriverSQLDefaultsMixin)
{:analyze-table analyze/analyze-table
:can-connect? (u/drop-first-arg can-connect?)
{:can-connect? (u/drop-first-arg can-connect?)
:date-interval crate-util/date-interval
:details-fields (constantly [{:name "hosts"
:display-name "Hosts"
......
(ns metabase.driver.crate.analyze
(:require [korma.core :as k]
[metabase.driver.generic-sql :as sql]
[metabase.models.field :as field]
[metabase.sync-database.analyze :as analyze]))
(defn- field-avg-length [field]
(or (some-> (sql/korma-entity (field/table field))
(k/select (k/aggregate (avg (k/sqlfn :CHAR_LENGTH
(sql/escape-field-name (:name field))))
:len))
first
:len
int)
0))
(defn- field-percent-urls [field]
(or (let [korma-table (sql/korma-entity (field/table field))]
(when-let [total-non-null-count (:count (first (k/select korma-table
(k/aggregate (count (k/raw "*")) :count)
(k/where {(sql/escape-field-name (:name field)) [not= nil]}))))]
(when (> total-non-null-count 0)
(when-let [url-count (:count (first (k/select korma-table
(k/aggregate (count (k/raw "*")) :count)
(k/where {(sql/escape-field-name (:name field)) [like "http%://_%.__%"]}))))]
(float (/ url-count total-non-null-count))))))
0.0))
(defn analyze-table
"Default implementation of `analyze-table` for SQL drivers."
[driver table new-table-ids]
((analyze/make-analyze-table driver
:field-avg-length-fn field-avg-length
:field-percent-urls-fn field-percent-urls)
driver
table
new-table-ids))
......@@ -5,14 +5,13 @@
[clojure.tools.logging :as log]
(honeysql [core :as hsql]
[format :as hformat])
(korma [core :as k]
[db :as kdb])
[metabase.driver :as driver]
[metabase.sync-database.analyze :as analyze]
metabase.query-processor.interface
(metabase [db :as db]
[driver :as driver])
(metabase.models [field :as field]
raw-table
[table :as table])
metabase.query-processor.interface
[metabase.sync-database.analyze :as analyze]
[metabase.util :as u]
[metabase.util.honeysql-extensions :as hx])
(:import java.sql.DatabaseMetaData
......@@ -53,10 +52,10 @@
"Given a `Database` DETAILS-MAP, return a JDBC connection spec.")
(current-datetime-fn [this]
"*OPTIONAL*. Korma form that should be used to get the current `DATETIME` (or equivalent). Defaults to `:%now`.")
"*OPTIONAL*. HoneySQL form that should be used to get the current `DATETIME` (or equivalent). Defaults to `:%now`.")
(date [this, ^Keyword unit, field-or-value]
"Return a korma form for truncating a date or timestamp field or value to a given resolution, or extracting a date component.")
"Return a HoneySQL form for truncating a date or timestamp field or value to a given resolution, or extracting a date component.")
(excluded-schemas ^java.util.Set [this]
"*OPTIONAL*. Set of string names of schemas to skip syncing tables from.")
......@@ -73,9 +72,9 @@
Return `nil` to prevent FIELD from being aliased.")
(prepare-value [this, ^Value value]
"*OPTIONAL*. Prepare a value (e.g. a `String` or `Integer`) that will be used in a korma form. By default, this returns VALUE's `:value` as-is, which
"*OPTIONAL*. Prepare a value (e.g. a `String` or `Integer`) that will be used in a HoneySQL form. By default, this returns VALUE's `:value` as-is, which
is eventually passed as a parameter in a prepared statement. Drivers such as BigQuery that don't support prepared statements can skip this
behavior by returning a korma `raw` form instead, or other drivers can perform custom type conversion as appropriate.")
behavior by returning a HoneySQL `raw` form instead, or other drivers can perform custom type conversion as appropriate.")
(quote-style ^clojure.lang.Keyword [this]
"*OPTIONAL*. Return the quoting style that should be used by [HoneySQL](https://github.com/jkk/honeysql) when building a SQL statement.
......@@ -99,7 +98,7 @@
(hsql/call :length (hx/cast :VARCHAR field-key))")
(unix-timestamp->timestamp [this, field-or-value, ^Keyword seconds-or-milliseconds]
"Return a korma form appropriate for converting a Unix timestamp integer field or value to an proper SQL `Timestamp`.
"Return a HoneySQL form appropriate for converting a Unix timestamp integer field or value to an proper SQL `Timestamp`.
SECONDS-OR-MILLISECONDS refers to the resolution of the int in question and with be either `:seconds` or `:milliseconds`."))
......@@ -118,11 +117,12 @@
[{:keys [id engine details]}]
(log/debug (u/format-color 'magenta "Creating new connection pool for database %d ..." id))
(let [spec (connection-details->spec (driver/engine->driver engine) details)]
(kdb/connection-pool (assoc spec :minimum-pool-size 1
;; prevent broken connections closed by dbs by testing them every 3 mins
:idle-connection-test-period (* 3 60)
;; prevent overly large pools by condensing them when connections are idle for 15m+
:excess-timeout (* 15 60)))))
(db/connection-pool (assoc spec
:minimum-pool-size 1
;; prevent broken connections closed by dbs by testing them every 3 mins
:idle-connection-test-period (* 3 60)
;; prevent overly large pools by condensing them when connections are idle for 15m+
:excess-timeout (* 15 60)))))
(defn- notify-database-updated
"We are being informed that a DATABASE has been updated, so lets shut down the connection pool (if it exists) under
......@@ -159,7 +159,7 @@
(defn escape-field-name
"Escape dots in a field name so Korma doesn't get confused and separate them. Returns a keyword."
"Escape dots in a field name so HoneySQL doesn't get confused and separate them. Returns a keyword."
^clojure.lang.Keyword [k]
(keyword (hx/escape-dots (name k))))
......@@ -183,13 +183,14 @@
"Convert HONEYSQL-FORM to a vector of SQL string and params, like you'd pass to JDBC."
[driver honeysql-form]
{:pre [(map? honeysql-form)]}
(try (binding [hformat/*subquery?* false]
(hsql/format honeysql-form
:quoting (quote-style driver)
:allow-dashed-names? true))
(catch Throwable e
(log/error (u/format-color 'red "Invalid HoneySQL form:\n%s" (u/pprint-to-str honeysql-form)))
(throw e))))
(let [[sql & args] (try (binding [hformat/*subquery?* false]
(hsql/format honeysql-form
:quoting (quote-style driver)
:allow-dashed-names? true))
(catch Throwable e
(log/error (u/format-color 'red "Invalid HoneySQL form:\n%s" (u/pprint-to-str honeysql-form)))
(throw e)))]
(into [(hx/unescape-dots sql)] args)))
(defn- qualify+escape ^clojure.lang.Keyword
([table]
......@@ -240,8 +241,7 @@
(defn- table-rows-seq [driver database table]
(query driver database table {:select [:*]}))
(defn- field-avg-length
[driver field]
(defn- field-avg-length [driver field]
(let [table (field/table field)
db (table/database table)]
(or (some-> (query driver db table {:select [[(hsql/call :avg (string-length-fn driver (qualify+escape table field))) :len]]})
......@@ -427,39 +427,3 @@
:mbql->native (resolve 'metabase.driver.generic-sql.query-processor/mbql->native)
:notify-database-updated notify-database-updated
:table-rows-seq table-rows-seq}))
;;; ### Util Fns
(defn create-db
"Like `korma.db/create-db`, but adds a fn to unescape escaped dots when generating SQL."
[spec]
(update-in (kdb/create-db spec) [:options :naming :fields] comp hx/unescape-dots))
(defn- db->korma-db
"Return a Korma DB spec for Metabase DATABASE."
[{:keys [details engine], :as database}]
(let [spec (connection-details->spec (driver/engine->driver engine) details)]
(assoc (create-db spec)
:pool (db->jdbc-connection-spec database))))
(defn create-entity
"Like `korma.db/create-entity`, but takes a sequence of name components instead; escapes dots in names as well."
[name-components]
(k/create-entity (apply str (interpose "." (for [s name-components
:when (seq s)]
(name (hx/escape-dots (name s))))))))
(defn korma-entity
"Return a Korma entity for [DB and] TABLE.
(-> (Table :id 100)
korma-entity
(select (aggregate (count :*) :count)))"
([table] (korma-entity (table/database table) table))
([db table] (let [{schema :schema, table-name :name} table]
(k/database
(create-entity [schema table-name])
(db->korma-db db)))))
......@@ -2,8 +2,8 @@
;; TODO - This namespace should be reworked to use `u/drop-first-arg` like newer drivers
(:require [clojure.string :as s]
[honeysql.core :as hsql]
[korma.db :as kdb]
[metabase.db :as db]
[metabase.db.spec :as dbspec]
[metabase.driver :as driver]
[metabase.driver.generic-sql :as sql]
[metabase.util :as u]
......@@ -104,8 +104,9 @@
"ACCESS_MODE_DATA" "r"}))))
(defn- connection-details->spec [_ details]
(kdb/h2 (if db/*allow-potentailly-unsafe-connections* details
(update details :db connection-string-set-safe-options))))
(dbspec/h2 (if db/*allow-potentailly-unsafe-connections*
details
(update details :db connection-string-set-safe-options))))
(defn- unix-timestamp->timestamp [_ expr seconds-or-milliseconds]
......
......@@ -2,7 +2,7 @@
(:require (clojure [set :as set]
[string :as s])
[honeysql.core :as hsql]
[korma.db :as kdb]
[metabase.db.spec :as dbspec]
[metabase.driver :as driver]
[metabase.driver.generic-sql :as sql]
[metabase.util :as u]
......@@ -58,7 +58,7 @@
(defn- connection-details->spec [details]
(-> details
(set/rename-keys {:dbname :db})
kdb/mysql
dbspec/mysql
(update :subname #(str % connection-args-string (when-not (:ssl details)
"&useSSL=false"))))) ; newer versions of MySQL will complain if you don't explicitly disable SSL
......
......@@ -5,7 +5,7 @@
[string :as s])
[clojure.tools.logging :as log]
[honeysql.core :as hsql]
[korma.db :as kdb]
[metabase.db.spec :as dbspec]
[metabase.driver :as driver]
[metabase.driver.generic-sql :as sql]
[metabase.util :as u]
......@@ -103,7 +103,7 @@
ssl-params
disable-ssl-params))
(rename-keys {:dbname :db})
kdb/postgres))
dbspec/postgres))
(defn- unix-timestamp->timestamp [expr seconds-or-milliseconds]
(case seconds-or-milliseconds
......
......@@ -2,16 +2,16 @@
"Amazon Redshift Driver."
(:require [clojure.java.jdbc :as jdbc]
[honeysql.core :as hsql]
[korma.db :as kdb]
(metabase [config :as config]
[driver :as driver])
[metabase.config :as config]
[metabase.db.spec :as dbspec]
[metabase.driver :as driver]
(metabase.driver [generic-sql :as sql]
[postgres :as postgres])
[metabase.util :as u]
[metabase.util.honeysql-extensions :as hx]))
(defn- connection-details->spec [details]
(kdb/postgres (merge details postgres/ssl-params))) ; always connect to redshift over SSL
(dbspec/postgres (merge details postgres/ssl-params))) ; always connect to redshift over SSL
(defn- date-interval [unit amount]
(hsql/call :+ :%getdate (hsql/raw (format "INTERVAL '%d %s'" (int amount) (name unit)))))
......
......@@ -2,8 +2,8 @@
(:require [clojure.set :as set]
(honeysql [core :as hsql]
[format :as hformat])
[korma.db :as kdb]
[metabase.config :as config]
[metabase.db.spec :as dbspec]
[metabase.driver :as driver]
[metabase.driver.generic-sql :as sql]
[metabase.util :as u]
......@@ -143,7 +143,7 @@
(merge (sql/ISQLDriverDefaultsMixin)
{:active-tables sql/post-filtered-active-tables
:column->base-type (sql/pattern-based-column->base-type pattern->type)
:connection-details->spec (u/drop-first-arg kdb/sqlite3)
:connection-details->spec (u/drop-first-arg dbspec/sqlite3)
:current-datetime-fn (constantly (hsql/raw "datetime('now')"))
:date (u/drop-first-arg date)
:prepare-value (u/drop-first-arg prepare-value)
......
(ns metabase.driver.sqlserver
(:require [clojure.string :as s]
[honeysql.core :as hsql]
[korma.db :as kdb]
[metabase.db.spec :as dbspec]
[metabase.driver :as driver]
[metabase.driver.generic-sql :as sql]
[metabase.util :as u]
......@@ -50,9 +50,9 @@
(defn- connection-details->spec [{:keys [domain instance ssl], :as details}]
(-> ;; Having the `:ssl` key present, even if it is `false`, will make the driver attempt to connect with SSL
(kdb/mssql (if ssl
details
(dissoc details :ssl)))
(dbspec/mssql (if ssl
details
(dissoc details :ssl)))
;; swap out Microsoft Driver details for jTDS ones
(assoc :classname "net.sourceforge.jtds.jdbc.Driver"
:subprotocol "jtds:sqlserver")
......
(ns metabase.models.dashboard-card
(:require [clojure.set :as set]
[korma.db :as kdb]
[metabase.db :as db]
[metabase.events :as events]
[metabase.models.card :refer [Card]]
......@@ -88,7 +87,7 @@
(u/nil-or-sequence-of-maps? parameter_mappings)
(every? integer? series)]}
(let [{:keys [sizeX sizeY row col series]} (merge {:series []} dashboard-card)]
(kdb/transaction
(db/transaction
;; update the dashcard itself (positional attributes)
(when (and sizeX sizeY row col)
(db/update-non-nil-keys! DashboardCard id, :sizeX sizeX, :sizeY sizeY, :row row, :col col, :parameter_mappings parameter_mappings))
......@@ -109,7 +108,7 @@
(u/nil-or-sequence-of-maps? parameter_mappings)]}
(let [{:keys [sizeX sizeY row col series]} (merge {:sizeX 2, :sizeY 2, :series []}
dashboard-card)]
(kdb/transaction
(db/transaction
(let [{:keys [id] :as dashboard-card} (db/insert! DashboardCard
:dashboard_id dashboard_id
:card_id card_id
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment