Skip to content
Snippets Groups Projects
Commit 241eb146 authored by Allen Gilliland's avatar Allen Gilliland
Browse files

wrapping up the updates to the sync process and updating unit tests to account for the tweaks.

parent 27141e9b
Branches
Tags
No related merge requests found
Showing
with 549 additions and 1049 deletions
......@@ -5,7 +5,8 @@
[metabase.db :refer :all]
[metabase.driver :as driver]
(metabase.models [database :refer [Database]]
[table :refer [Table]])))
[table :refer [Table]])
[metabase.sync-database :as sync-database]))
(defendpoint POST "/db/:id"
......@@ -15,10 +16,10 @@
(let-404 [database (Database id)]
(cond
table_id (when-let [table (sel :one Table :db_id id :id (int table_id))]
(future (driver/sync-table! table)))
(future (sync-database/sync-table! table)))
table_name (when-let [table (sel :one Table :db_id id :name table_name)]
(future (driver/sync-table! table)))
:else (future (driver/sync-database! database))))
(future (sync-database/sync-table! table)))
:else (future (sync-database/sync-database! database))))
{:success true})
......
......@@ -4,10 +4,11 @@
[korma.core :as k]
[metabase.api.common :refer :all]
[metabase.db :refer :all]
[metabase.driver :as driver]
(metabase.models [hydrate :refer :all]
[field :refer [Field]]
[table :refer [Table] :as table])
[metabase.driver :as driver]))
[metabase.sync-database :as sync-database]))
(defannotation TableEntityType
"Param must be one of `person`, `event`, `photo`, or `place`."
......@@ -96,7 +97,7 @@
(let-404 [table (Table id)]
(write-check table)
;; run the task asynchronously
(future (driver/sync-table! table)))
(future (sync-database/sync-table! table)))
{:status :ok})
(defendpoint POST "/:id/reorder"
......
......@@ -31,7 +31,7 @@
"Return the distinct values of FIELD.
This is used to create a `FieldValues` object for `:category` Fields."
([field]
(field-distinct-values field @(resolve 'metabase.driver.sync/low-cardinality-threshold)))
(field-distinct-values field @(resolve 'metabase.sync-database.analyze/low-cardinality-threshold)))
([{field-id :id :as field} max-results]
{:pre [(integer? max-results)]}
(mapv first (field-query field (-> {}
......
......@@ -388,23 +388,6 @@
(throw (Exception. (humanize-connection-error-message driver (.getMessage e)))))
false))))
(defn sync-database!
"Sync a `Database`, its `Tables`, and `Fields`.
Takes an optional kwarg `:full-sync?` (default = `true`). A full sync includes more in depth table analysis work."
[database & {:keys [full-sync?]}]
{:pre [(map? database)]}
(require 'metabase.driver.sync)
(@(resolve 'metabase.driver.sync/sync-database!) (engine->driver (:engine database)) database :full-sync? full-sync?))
(defn sync-table!
"Sync a `Table` and its `Fields`.
Takes an optional kwarg `:full-sync?` (default = `true`). A full sync includes more in depth table analysis work."
[table & {:keys [full-sync?]}]
{:pre [(map? table)]}
(require 'metabase.driver.sync)
(@(resolve 'metabase.driver.sync/sync-table!) (database-id->driver (:db_id table)) table :full-sync? full-sync?))
(defn process-query
"Process a structured or native query, and return the result."
......
......@@ -4,17 +4,16 @@
[clojure.tools.logging :as log]
(korma [core :as k]
[db :as kdb])
[korma.sql.utils :as kutils]
(metabase [config :as config]
[db :as db]
[driver :as driver])
(metabase.driver [generic-sql :as sql]
[sync :as sync])
[metabase.driver.generic-sql :as sql]
[metabase.driver.generic-sql.query-processor :as sqlqp]
metabase.driver.query-processor.interface
(metabase.models [database :refer [Database]]
[field :as field]
[table :as table])
[metabase.sync-database.analyze :as analyze]
[metabase.util :as u]
[metabase.util.korma-extensions :as kx])
(:import (java.util Collections Date)
......@@ -404,7 +403,7 @@
driver/IDriver
(merge driver/IDriverDefaultsMixin
{:analyze-table sync/generic-analyze-table
{:analyze-table analyze/generic-analyze-table
:can-connect? (u/drop-first-arg can-connect?)
:date-interval (u/drop-first-arg (comp prepare-value u/relative-date))
:describe-database (u/drop-first-arg describe-database)
......
(ns metabase.driver.generic-sql
(:require [clojure.core.memoize :as memoize]
[clojure.java.jdbc :as jdbc]
(:require [clojure.java.jdbc :as jdbc]
[clojure.set :as set]
[clojure.tools.logging :as log]
(korma [core :as k]
[db :as kdb])
[metabase.driver :as driver]
[metabase.driver.sync :as sync]
[metabase.sync-database.analyze :as analyze]
metabase.driver.query-processor.interface
(metabase.models [field :as field]
[table :as table])
......@@ -297,7 +296,7 @@
(defn analyze-table
"Default implementation of `analyze-table` for SQL drivers."
[driver table new-table-ids]
((sync/make-analyze-table driver
((analyze/make-analyze-table driver
:field-avg-length-fn (partial field-avg-length driver)
:field-percent-urls-fn (partial field-percent-urls driver))
driver
......
......@@ -12,9 +12,9 @@
[util :refer [*mongo-connection* with-mongo-connection values->base-type]])
[metabase.models.field :as field]
[metabase.models.table :as table]
[metabase.sync-database.analyze :as analyze]
[metabase.util :as u]
[cheshire.core :as json]
[metabase.driver.sync :as sync])
[cheshire.core :as json])
(:import com.mongodb.DB))
(declare field-values-lazy-seq)
......@@ -138,10 +138,10 @@
(defn- analyze-table [_ table new-field-ids]
;; We only care about 1) table counts and 2) field values
{:row_count (sync/table-row-count table)
{:row_count (analyze/table-row-count table)
:fields (for [{:keys [id] :as field} (table/fields table)
:when (sync/test-for-cardinality? field (contains? new-field-ids (:id field)))]
(sync/test:cardinality-and-extract-field-values field {:id id}))})
:when (analyze/test-for-cardinality? field (contains? new-field-ids (:id field)))]
(analyze/test:cardinality-and-extract-field-values field {:id id}))})
(defn- field-values-lazy-seq [_ {:keys [qualified-name-components table], :as field}]
(assert (and (map? field)
......
This diff is collapsed.
......@@ -2,9 +2,9 @@
(:require [clojure.core.async :as async]
[clojure.tools.logging :as log]
[metabase.db :as db]
[metabase.driver :as driver]
[metabase.events :as events]
[metabase.models.database :refer [Database]]))
[metabase.models.database :refer [Database]]
[metabase.sync-database :as sync-database]))
(def ^:const sync-database-topics
......@@ -29,7 +29,7 @@
(when-let [database (db/sel :one Database :id (events/object->model-id topic object))]
;; just kick off a sync on another thread
(future (try
(driver/sync-database! database)
(sync-database/sync-database! database)
(catch Throwable t
(log/error (format "Error syncing Database: %d" (:id database)) t))))))
(catch Throwable e
......
(ns metabase.models.field
(:require [korma.core :as k]
[metabase.db :refer :all]
(:require [clojure.data :as d]
[clojure.string :as s]
[korma.core :as k]
[medley.core :as m]
[metabase.db :as db]
(metabase.models [common :as common]
[field-values :refer [FieldValues]]
[foreign-key :refer [ForeignKey]]
......@@ -85,22 +88,22 @@
(merge defaults field)))
(defn- pre-cascade-delete [{:keys [id]}]
(cascade-delete Field :parent_id id)
(cascade-delete ForeignKey (k/where (or (= :origin_id id)
(db/cascade-delete Field :parent_id id)
(db/cascade-delete ForeignKey (k/where (or (= :origin_id id)
(= :destination_id id))))
(cascade-delete 'FieldValues :field_id id))
(db/cascade-delete 'FieldValues :field_id id))
(defn ^:hydrate values
"Return the `FieldValues` associated with this FIELD."
[{:keys [id]}]
(sel :many [FieldValues :field_id :values], :field_id id))
(db/sel :many [FieldValues :field_id :values], :field_id id))
(defn qualified-name-components
"Return the pieces that represent a path to FIELD, of the form `[table-name parent-fields-name* field-name]`."
[{field-name :name, table-id :table_id, parent-id :parent_id}]
(conj (if-let [parent (Field parent-id)]
(qualified-name-components parent)
[(sel :one :field ['Table :name], :id table-id)])
[(db/sel :one :field ['Table :name], :id table-id)])
field-name))
(defn qualified-name
......@@ -112,7 +115,7 @@
"Return the `Table` associated with this `Field`."
{:arglists '([field])}
[{:keys [table_id]}]
(sel :one 'Table, :id table_id))
(db/sel :one 'Table, :id table_id))
(defn field->fk-field
"Attempts to follow a `ForeignKey` from the the given FIELD to a destination `Field`.
......@@ -121,7 +124,7 @@
{:hydrate :target}
[{:keys [id special_type]}]
(when (= :fk special_type)
(let [dest-id (sel :one :field [ForeignKey :destination_id] :origin_id id)]
(let [dest-id (db/sel :one :field [ForeignKey :destination_id] :origin_id id)]
(Field dest-id))))
(u/strict-extend (class Field)
......@@ -137,3 +140,110 @@
:can-write? i/superuser?
:pre-insert pre-insert
:pre-cascade-delete pre-cascade-delete}))
(def ^{:arglists '([field-name base-type])} infer-field-special-type
"If `name` and `base-type` matches a known pattern, return the `special_type` we should assign to it."
(let [bool-or-int #{:BooleanField :BigIntegerField :IntegerField}
float #{:DecimalField :FloatField}
int-or-text #{:BigIntegerField :IntegerField :CharField :TextField}
text #{:CharField :TextField}
;; tuples of [pattern set-of-valid-base-types special-type
;; * Convert field name to lowercase before matching against a pattern
;; * consider a nil set-of-valid-base-types to mean "match any base type"
pattern+base-types+special-type [[#"^.*_lat$" float :latitude]
[#"^.*_lon$" float :longitude]
[#"^.*_lng$" float :longitude]
[#"^.*_long$" float :longitude]
[#"^.*_longitude$" float :longitude]
[#"^.*_rating$" int-or-text :category]
[#"^.*_type$" int-or-text :category]
[#"^.*_url$" text :url]
[#"^_latitude$" float :latitude]
[#"^active$" bool-or-int :category]
[#"^city$" text :city]
[#"^country$" text :country]
[#"^countryCode$" text :country]
[#"^currency$" int-or-text :category]
[#"^first_name$" text :name]
[#"^full_name$" text :name]
[#"^gender$" int-or-text :category]
[#"^last_name$" text :name]
[#"^lat$" float :latitude]
[#"^latitude$" float :latitude]
[#"^lon$" float :longitude]
[#"^lng$" float :longitude]
[#"^long$" float :longitude]
[#"^longitude$" float :longitude]
[#"^name$" text :name]
[#"^postalCode$" int-or-text :zip_code]
[#"^postal_code$" int-or-text :zip_code]
[#"^rating$" int-or-text :category]
[#"^role$" int-or-text :category]
[#"^sex$" int-or-text :category]
[#"^state$" text :state]
[#"^status$" int-or-text :category]
[#"^type$" int-or-text :category]
[#"^url$" text :url]
[#"^zip_code$" int-or-text :zip_code]
[#"^zipcode$" int-or-text :zip_code]]]
;; Check that all the pattern tuples are valid
(doseq [[name-pattern base-types special-type] pattern+base-types+special-type]
(assert (= (type name-pattern) java.util.regex.Pattern))
(assert (every? (partial contains? base-types) base-types))
(assert (contains? special-types special-type)))
(fn [field-name base_type]
(when (and (string? field-name)
(keyword? base_type))
(or (when (= "id" (s/lower-case field-name)) :id)
(when-let [matching-pattern (m/find-first (fn [[name-pattern valid-base-types _]]
(and (or (nil? valid-base-types)
(contains? valid-base-types base_type))
(re-matches name-pattern (s/lower-case field-name))))
pattern+base-types+special-type)]
;; the actual special-type is the last element of the pattern
(last matching-pattern)))))))
(defn update-field
"Update an existing `Field` from the given FIELD-DEF."
[{:keys [id], :as existing-field} {field-name :name, :keys [base-type special-type pk? parent-id]}]
(let [updated-field (assoc existing-field
:base_type base-type
:display_name (or (:display_name existing-field)
(common/name->human-readable-name field-name))
:special_type (or (:special_type existing-field)
special-type
(when pk? :id)
(infer-field-special-type field-name base-type))
:parent_id parent-id)
[is-diff? _ _] (d/diff updated-field existing-field)]
;; if we have a different base-type or special-type, then update
(when is-diff?
(db/upd Field id
:display_name (:display_name updated-field)
:base_type base-type
:special_type (:special_type updated-field)
:parent_id parent-id))
;; return the updated field when we are done
updated-field))
(defn create-field
"Create a new `Field` from the given FIELD-DEF."
[table-id {field-name :name, :keys [base-type special-type pk? parent-id raw-column-id]}]
{:pre [(integer? table-id)
(string? field-name)
(contains? base-types base-type)]}
(db/ins Field
:table_id table-id
:raw_column_id raw-column-id
:name field-name
:display_name (common/name->human-readable-name field-name)
:base_type base-type
:special_type (or special-type
(when pk? :id)
(infer-field-special-type field-name base-type))
:parent_id parent-id))
......@@ -81,3 +81,28 @@
[table-id]
{:pre [(integer? table-id)]}
(db/sel :one :field [Table :db_id] :id table-id))
(defn update-table
"Update `Table` with the data from TABLE-DEF."
[{:keys [id display_name], :as existing-table} {table-name :name}]
(let [updated-table (assoc existing-table
:display_name (or display_name (common/name->human-readable-name table-name)))]
;; the only thing we need to update on a table is the :display_name, if it never got set
(when (nil? display_name)
(db/upd Table id
:display_name (:display_name updated-table)))
;; always return the table when we are done
updated-table))
(defn create-table
"Create `Table` with the data from TABLE-DEF."
[database-id {schema-name :schema, table-name :name, raw-table-id :raw-table-id}]
(db/ins Table
:db_id database-id
:raw_table_id raw-table-id
:schema schema-name
:name table-name
:display_name (common/name->human-readable-name table-name)
:active true))
......@@ -2,9 +2,9 @@
(:require [clojure.java.io :as io]
[clojure.string :as s]
[clojure.tools.logging :as log]
(metabase [db :as db]
[driver :as driver])
[metabase.db :as db]
[metabase.models.database :refer [Database]]
[metabase.sync-database :as sync-database]
[metabase.util :as u]))
......@@ -27,7 +27,7 @@
:details {:db h2-file}
:engine :h2
:is_sample true)]
(driver/sync-database! db))))
(sync-database/sync-database! db))))
(catch Throwable e
(log/error (u/format-color 'red "Failed to load sample dataset: %s" (.getMessage e)))))))
......@@ -35,6 +35,6 @@
;; TODO - it would be a bit nicer if we skipped this when the data hasn't changed
(when-let [db (db/sel :one Database :is_sample true)]
(try
(driver/sync-database! db)
(sync-database/sync-database! db)
(catch Throwable e
(log/error (u/format-color 'red "Failed to update sample dataset: %s" (.getMessage e)))))))
......@@ -6,14 +6,17 @@
[metabase.driver.query-processor :as qp]
[metabase.driver :as driver]
[metabase.events :as events]
[metabase.models.table :refer [Table], :as table]
[metabase.models.raw-table :as raw-table]
[metabase.models.table :as table]
[metabase.sync-database.analyze :as analyze]
[metabase.sync-database.introspect :as introspect]
[metabase.sync-database.sync :as sync]
[metabase.sync-database.sync-dynamic :as sync-dynamic]
[metabase.util :as u]))
(declare sync-database-with-tracking!)
(declare sync-database-with-tracking!
sync-table-with-tracking!)
(defn sync-database!
......@@ -21,13 +24,15 @@
Takes an optional kwarg `:full-sync?` which determines if we execute our table analysis work. If this is not specified
then we default to using the `:is_full_sync` attribute of the database."
[driver database & {:keys [full-sync?]}]
[database & {:keys [full-sync?]}]
{:pre [(map? database)]}
(binding [qp/*disable-qp-logging* true
db/*sel-disable-logging* true]
(let [full-sync? (if-not (nil? full-sync?)
(let [db-driver (driver/engine->driver (:engine database))
full-sync? (if-not (nil? full-sync?)
full-sync?
(:is_full_sync database))]
(driver/sync-in-context driver database (partial sync-database-with-tracking! driver database full-sync?)))))
(driver/sync-in-context db-driver database (partial sync-database-with-tracking! db-driver database full-sync?)))))
(defn sync-table!
"Sync a *single* TABLE and all of its Fields.
......@@ -35,15 +40,15 @@
Takes an optional kwarg `:full-sync?` which determines if we execute our table analysis work. If this is not specified
then we default to using the `:is_full_sync` attribute of the tables parent database."
[driver table & {:keys [full-sync?]}]
[table & {:keys [full-sync?]}]
{:pre [(map? table)]}
(binding [qp/*disable-qp-logging* true]
(let [database (table/database table)
db-driver (driver/engine->driver (:engine database))
full-sync? (if-not (nil? full-sync?)
full-sync?
(:is_full_sync database))]
(driver/sync-in-context driver database (fn []
;(sync-database-active-tables! driver [table] :analyze? full-sync?)
(events/publish-event :table-sync {:table_id (:id table)}))))))
(driver/sync-in-context db-driver database (partial sync-table-with-tracking! db-driver database table full-sync?)))))
;;; ## ---------------------------------------- IMPLEMENTATION ----------------------------------------
......@@ -55,19 +60,15 @@
(log/info (u/format-color 'magenta "Syncing %s database '%s'..." (name driver) (:name database)))
(events/publish-event :database-sync-begin {:database_id (:id database) :custom_id tracking-hash})
;; TODO :special-type = postgres :json columns + mongo
;; TODO :nested-fields = mongo only, move to analyze-table only
;; each field should be just a path from the root to the given field.
;; we'll want some way to reconstructing the path if needed.
;; TODO :custom -> :details
(binding [qp/*disable-qp-logging* true
db/*sel-disable-logging* true]
;; start with capturing a full introspection of the database
(introspect/introspect-database-and-update-raw-tables! driver database)
;; use the introspected schema information and update our working data models
(sync/update-data-models-from-raw-tables! driver database)
(if (driver/driver-supports? driver :dynamic-schema)
(sync-dynamic/scan-database-and-update-data-model! driver database)
(sync/update-data-models-from-raw-tables! database))
;; now do any in-depth data analysis which requires querying the tables (if enabled)
(when full-sync?
......@@ -78,8 +79,25 @@
(u/format-nanoseconds (- (System/nanoTime) start-time))))))
;; TODO: might be worth keeping this in metabase.driver NS
(defn generic-analyze-table
"An implementation of `analyze-table` using the defaults (`default-field-avg-length` and `field-percent-urls`)."
[driver table new-field-ids]
((analyze/make-analyze-table driver) driver table new-field-ids))
(defn- sync-table-with-tracking! [driver database table full-sync?]
(let [start-time (System/nanoTime)]
(log/info (u/format-color 'magenta "Syncing table '%s' from %s database '%s'..." (:display_name table) (name driver) (:name database)))
(binding [qp/*disable-qp-logging* true
db/*sel-disable-logging* true]
(when-let [raw-tbl (db/sel :one raw-table/RawTable :id (:raw_table_id table))]
;; introspect
(introspect/introspect-raw-table-and-update! driver database raw-tbl)
;; sync
(if (driver/driver-supports? driver :dynamic-schema)
(sync-dynamic/scan-table-and-update-data-model! driver database table)
(sync/update-data-models-for-table! table))
;; analyze
(when full-sync?
(analyze/analyze-table-data-shape! driver table))))
(events/publish-event :table-sync {:table_id (:id table)})
(log/info (u/format-color 'magenta "Finished syncing table '%s' from %s database '%s'. (%s)" (:display_name table) (name driver) (:name database)
(u/format-nanoseconds (- (System/nanoTime) start-time))))))
......@@ -157,7 +157,43 @@
(test-for-cardinality? field new-field?) (test:cardinality-and-extract-field-values field)
new-field? (test:new-field driver field))))})))
(defn generic-analyze-table
"An implementation of `analyze-table` using the defaults (`default-field-avg-length` and `field-percent-urls`)."
[driver table new-field-ids]
((make-analyze-table driver) driver table new-field-ids))
(defn analyze-table-data-shape!
"Analyze the data shape for a single `Table`."
[driver {table-id :id, :as tbl}]
(let [new-field-ids (set (db/sel :many :field [field/Field :id] :table_id table-id, :visibility_type [not= "retired"], :last_analyzed nil))]
;; TODO: this call should include the database
(when-let [table-stats (u/prog1 (driver/analyze-table driver tbl new-field-ids)
(when <>
(schema/validate driver/AnalyzeTable <>)))]
;; update table row count
(when (:row_count table-stats)
(db/upd table/Table table-id :rows (:row_count table-stats)))
;; update individual fields
(doseq [{:keys [id preview-display special-type values]} (:fields table-stats)]
;; set Field metadata we may have detected
(when (and id (or preview-display special-type))
(db/upd-non-nil-keys field/Field id
;; if a field marked `preview-display` as false then set the visibility type to `:details-only` (see models.field/visibility-types)
:visibility_type (when (false? preview-display) :details-only)
:special_type special-type))
;; handle field values, setting them if applicable otherwise clearing them
(if (and id values (< 0 (count (filter identity values))))
(field-values/save-field-values id values)
(field-values/clear-field-values id))))
;; update :last_analyzed for all fields in the table
(k/update field/Field
(k/set-fields {:last_analyzed (u/new-sql-timestamp)})
(k/where {:table_id table-id
:visibility_type [not= "retired"]}))))
(defn analyze-data-shape-for-tables!
"Perform in-depth analysis on the data shape for all `Tables` in a given DATABASE.
......@@ -171,34 +207,9 @@
(let [tables (db/sel :many table/Table :db_id database-id, :active true)
tables-count (count tables)
finished-tables-count (atom 0)]
(doseq [{table-id :id, table-name :name, :as tbl} tables]
(doseq [{table-name :name, :as tbl} tables]
(try
(let [new-field-ids (set (db/sel :many :field [field/Field :id] :table_id table-id, :visibility_type [not= "retired"], :last_analyzed nil))]
;; TODO: this call should include the database
(when-let [table-stats (u/prog1 (driver/analyze-table driver tbl new-field-ids)
(schema/validate driver/AnalyzeTable <>))]
;; update table row count
(when (:row_count table-stats)
(db/upd table/Table table-id :rows (:row_count table-stats)))
;; update individual fields
(doseq [{:keys [id preview-display special-type values]} (:fields table-stats)]
;; set Field metadata we may have detected
(when (and id (or preview-display special-type))
(db/upd-non-nil-keys field/Field id
;; if a field marked `preview-display` as false then set the visibility type to `:details-only` (see models.field/visibility-types)
:visibility_type (when (false? preview-display) :details-only)
:special_type special-type))
;; handle field values, setting them if applicable otherwise clearing them
(if (and id values (< 0 (count (filter identity values))))
(field-values/save-field-values id values)
(field-values/clear-field-values id))))
;; update :last_analyzed for all fields in the table
(k/update field/Field
(k/set-fields {:last_analyzed (u/new-sql-timestamp)})
(k/where {:table_id table-id
:visibility_type [not= "retired"]})))
(analyze-table-data-shape! driver tbl)
(catch Throwable t
(log/error "Unexpected error analyzing table" t))
(finally
......
......@@ -82,6 +82,7 @@
[database-id {table-name :name, table-schema :schema, :keys [details columns]}]
{:pre [(integer? database-id)
(string? table-name)]}
(log/debug (u/format-color 'cyan "Found new table: %s" (named-table table-schema table-name)))
(let [table (db/ins raw-table/RawTable
:database_id database-id
:schema table-schema
......@@ -119,6 +120,30 @@
(k/set-fields {:active false})))))
(defn introspect-raw-table-and-update!
"Introspect a single `RawTable` and persist the results as `RawTables` and `RawColumns`.
Uses the various `describe-*` functions on the IDriver protocol to gather information."
[driver database raw-tbl]
(let [table-def (select-keys raw-tbl [:schema :name])
table-def (if (contains? (driver/features driver) :dynamic-schema)
;; dynamic schemas are handled differently, we'll handle them elsewhere
(assoc table-def :columns [])
;; static schema databases get introspected now
(u/prog1 (driver/describe-table driver database table-def)
(schema/validate driver/DescribeTable <>)))
;; TODO: we should update drivers to return :columns instead of :fields
table-def (set/rename-keys table-def {:fields :columns})]
;; save the latest updates from the introspection
(update-raw-table! raw-tbl table-def)
;; if we support FKs then try updating those as well
(when (contains? (driver/features driver) :foreign-keys)
(when-let [table-fks (u/prog1 (driver/describe-table-fks driver database table-def)
(schema/validate driver/DescribeTableFKs <>))]
(save-all-table-fks! raw-tbl table-fks)))))
(defn introspect-database-and-update-raw-tables!
"Introspect a `Database` and persist the results as `RawTables` and `RawColumns`.
Uses the various `describe-*` functions on the IDriver protocol to gather information."
......@@ -147,9 +172,7 @@
table-def (set/rename-keys table-def {:fields :columns})]
(if-let [raw-tbl (get existing-tables (select-keys table-def [:schema :name]))]
(update-raw-table! raw-tbl table-def)
(do
(log/debug (u/format-color 'cyan "Found new table: %s" (named-table table-schema table-name)))
(create-raw-table! (:id database) table-def))))
(create-raw-table! (:id database) table-def)))
(catch Throwable t
;; TODO: we should capture this and save it on the RawTable
(log/error (u/format-color 'red "Unexpected error introspecting table schema: %s" (named-table table-schema table-name)) t))
......
......@@ -2,40 +2,36 @@
(:require [clojure.string :as s]
[clojure.tools.logging :as log]
[korma.core :as k]
[medley.core :as m]
[metabase.db :as db]
[metabase.driver :as driver]
[metabase.models.common :as common]
[metabase.models.field :as field]
[metabase.models.raw-column :as raw-column]
[metabase.models.raw-table :as raw-table]
[metabase.models.table :as table]
[metabase.util :as u]))
[metabase.util :as u]
[clojure.set :as set]))
(def ^:private ^:dynamic *sync-dynamic* false)
(defn- save-all-fks!
(defn- save-fks!
"Update all of the FK relationships present in DATABASE based on what's captured in the raw schema.
This will set :special_type :fk and :fk_target_field_id <field-id> for each found FK relationship.
NOTE: we currently overwrite any previously defined metadata when doing this."
[{database-id :id}]
(when-let [fk-sources (k/select raw-column/RawColumn
(k/fields :id :fk_target_column_id)
(k/join raw-table/RawTable (= :raw_table.id :raw_table_id))
(k/where {:raw_table.database_id database-id})
(k/where (not= :raw_column.fk_target_column_id nil)))]
(doseq [{fk-source-id :id, fk-target-id :fk_target_column_id} fk-sources]
;; TODO: it's possible there are multiple fields here with the same source/target column ids
(when-let [source-field-id (db/sel :one :field [field/Field :id] :raw_column_id fk-source-id, :visibility_type [not= "retired"])]
(when-let [target-field-id (db/sel :one :field [field/Field :id] :raw_column_id fk-target-id, :visibility_type [not= "retired"])]
(db/upd field/Field source-field-id
:special_type :fk
:fk_target_field_id target-field-id))))))
(defn- sync-metabase-metadata-table!
[fk-sources]
{:pre [(coll? fk-sources)
(every? map? fk-sources)]}
(doseq [{fk-source-id :source-column, fk-target-id :target-column} fk-sources]
;; TODO: it's possible there are multiple fields here with the same source/target column ids
(when-let [source-field-id (db/sel :one :field [field/Field :id] :raw_column_id fk-source-id, :visibility_type [not= "retired"])]
(when-let [target-field-id (db/sel :one :field [field/Field :id] :raw_column_id fk-target-id, :visibility_type [not= "retired"])]
(db/upd field/Field source-field-id
:special_type :fk
:fk_target_field_id target-field-id)))))
(defn sync-metabase-metadata-table!
"Databases may include a table named `_metabase_metadata` (case-insentive) which includes descriptions or other metadata about the `Tables` and `Fields`
it contains. This table is *not* synced normally, i.e. a Metabase `Table` is not created for it. Instead, *this* function is called, which reads the data it
contains and updates the relevant Metabase objects.
......@@ -68,107 +64,6 @@
(log/error (u/format-color 'red "Error in _metabase_metadata: %s" (.getMessage e))))))))
(def ^{:arglists '([column])} infer-field-special-type
"If RAW-COLUMN has a `name` and `base_type` that matches a known pattern, return the `special_type` we should assign to it."
(let [bool-or-int #{:BooleanField :BigIntegerField :IntegerField}
float #{:DecimalField :FloatField}
int-or-text #{:BigIntegerField :IntegerField :CharField :TextField}
text #{:CharField :TextField}
;; tuples of [pattern set-of-valid-base-types special-type
;; * Convert field name to lowercase before matching against a pattern
;; * consider a nil set-of-valid-base-types to mean "match any base type"
pattern+base-types+special-type [[#"^.*_lat$" float :latitude]
[#"^.*_lon$" float :longitude]
[#"^.*_lng$" float :longitude]
[#"^.*_long$" float :longitude]
[#"^.*_longitude$" float :longitude]
[#"^.*_rating$" int-or-text :category]
[#"^.*_type$" int-or-text :category]
[#"^.*_url$" text :url]
[#"^_latitude$" float :latitude]
[#"^active$" bool-or-int :category]
[#"^city$" text :city]
[#"^country$" text :country]
[#"^countryCode$" text :country]
[#"^currency$" int-or-text :category]
[#"^first_name$" text :name]
[#"^full_name$" text :name]
[#"^gender$" int-or-text :category]
[#"^last_name$" text :name]
[#"^lat$" float :latitude]
[#"^latitude$" float :latitude]
[#"^lon$" float :longitude]
[#"^lng$" float :longitude]
[#"^long$" float :longitude]
[#"^longitude$" float :longitude]
[#"^name$" text :name]
[#"^postalCode$" int-or-text :zip_code]
[#"^postal_code$" int-or-text :zip_code]
[#"^rating$" int-or-text :category]
[#"^role$" int-or-text :category]
[#"^sex$" int-or-text :category]
[#"^state$" text :state]
[#"^status$" int-or-text :category]
[#"^type$" int-or-text :category]
[#"^url$" text :url]
[#"^zip_code$" int-or-text :zip_code]
[#"^zipcode$" int-or-text :zip_code]]]
;; Check that all the pattern tuples are valid
(doseq [[name-pattern base-types special-type] pattern+base-types+special-type]
(assert (= (type name-pattern) java.util.regex.Pattern))
(assert (every? (partial contains? field/base-types) base-types))
(assert (contains? field/special-types special-type)))
(fn [{:keys [base_type details], field-name :name, pk? :is_pk}]
(when (and (string? field-name)
(keyword? base_type))
(let [{:keys [special-type]} details]
(or special-type
(when pk? :id)
(when (= "id" (s/lower-case field-name)) :id)
(when-let [matching-pattern (m/find-first (fn [[name-pattern valid-base-types _]]
(and (or (nil? valid-base-types)
(contains? valid-base-types base_type))
(re-matches name-pattern (s/lower-case field-name))))
pattern+base-types+special-type)]
;; a little something for the app log
(log/debug (u/format-color 'green "%s '%s' matches '%s'. Setting special_type to '%s'."
(name base_type) field-name (first matching-pattern) (name (last matching-pattern))))
;; the actual special-type is the last element of the pattern
(last matching-pattern))))))))
(defn- update-field!
"Update a single `Field` with values from `RawColumn`."
[{:keys [id], :as existing-field} {column-name :name, :keys [base_type], :as column}]
(let [special-type (or (:special_type existing-field)
(infer-field-special-type column))]
;; if we have a different base-type or special-type, then update
(when (or (not= base_type (:base_type existing-field))
(not= special-type (:special_type existing-field)))
(db/upd field/Field id
:display_name (or (:display_name existing-field)
(common/name->human-readable-name column-name))
:base_type base_type
:special_type special-type))))
(defn- create-field!
"Create a new `Field` with values from `RawColumn`."
[table-id {column-name :name, column-id :id, :keys [base_type], :as column}]
(let [fk-target-field (when-let [fk-target-column (:fk_target_column_id column)]
;; we need the field-id in this database which corresponds to this raw-columns fk target
(db/sel :one :field [field/Field :id] :raw_column_id fk-target-column))]
(db/ins field/Field
:table_id table-id
:raw_column_id column-id
:name column-name
:display_name (common/name->human-readable-name column-name)
:base_type base_type
:special_type (infer-field-special-type column)
:fk_target_field_id fk-target-field)))
(defn- save-table-fields!
"Refresh all `Fields` in a given `Table` based on what's available in the associated `RawColumns`.
......@@ -189,64 +84,31 @@
(k/set-fields {:visibility_type "retired"}))))
;; create/update the active columns
(doseq [{raw-column-id :id, :as column} active-raw-columns]
(if-let [existing-field (get existing-fields raw-column-id)]
;; field already exists, so we UPDATE it
(update-field! existing-field column)
;; looks like a new field, so we CREATE it
(create-field! table-id column)))))
(defn- save-fields!
"Update `Fields` for `Table`.
This is a simple delegating function which either calls sync-dynamic/save-table-fields! or sync/save-table-fields! based
on whether the database being synced has a `:dynamic-schema`."
[tbl]
(if *sync-dynamic*
(do
(require 'metabase.sync-database.sync-dynamic)
((ns-resolve 'metabase.sync-database.sync-dynamic 'save-table-fields!) tbl))
(save-table-fields! tbl)))
(defn- update-table!
"Update `Table` with the data from `RawTable`, including saving all fields."
[{:keys [id display_name], :as existing-table} {table-name :name}]
;; the only thing we need to update on a table is the :display_name, if it never got set
(when (nil? display_name)
(db/upd table/Table id
:display_name (common/name->human-readable-name table-name)))
;; now update the all the table fields
(save-fields! existing-table))
(defn- create-table!
"Create `Table` with the data from `RawTable`, including all fields."
[database-id {schema-name :schema, table-name :name, raw-table-id :id}]
(let [new-table (db/ins table/Table
:db_id database-id
:raw_table_id raw-table-id
:schema schema-name
:name table-name
:display_name (common/name->human-readable-name table-name)
:active true)]
;; now create all the table fields
(save-fields! new-table)))
(defn update-data-models-from-raw-tables!
"Update the working `Table` and `Field` metadata for DATABASE based on the latest raw schema information.
This function uses the data in `RawTable` and `RawColumn` to update the working data models as needed.
NOTE: when a database is a `:dynamic-schema` database we follow a slightly different execution path."
[driver {database-id :id, :as database}]
(doseq [{raw-column-id :id, :keys [details], :as column} active-raw-columns]
;; do a little bit of key renaming to match what's expected for a call to update/create-field
(let [column (-> (set/rename-keys column {:base_type :base-type
:is_pk :pk?
:id :raw-column-id})
(assoc :special-type (:special-type details)))]
(if-let [existing-field (get existing-fields raw-column-id)]
;; field already exists, so we UPDATE it
(field/update-field existing-field column)
;; looks like a new field, so we CREATE it
(field/create-field table-id (assoc column :raw-column-id raw-column-id)))))))
(defn retire-tables!
"Retire any `Table` who's `RawTable` has been deactivated.
This occurs when a database introspection reveals the table is no longer available."
[{database-id :id}]
{:pre [(integer? database-id)]}
;; retire tables (and their fields) as needed
(let [tables-to-remove (set (map :id (k/select table/Table
(k/fields :id)
(k/join raw-table/RawTable (= :raw_table.id :raw_table_id))
;; NOTE: something really wrong happening with SQLKorma here which requires us
;; to be explicit about :metabase_table.raw_table_id in the join condition
;; without this it seems to want to join against metabase_field !?
(k/join raw-table/RawTable (= :raw_table.id :metabase_table.raw_table_id))
(k/where {:db_id database-id
:active true
:raw_table.active false}))))]
......@@ -257,7 +119,47 @@
;; retire the fields of retired tables
(k/update field/Field
(k/where {:table_id [in tables-to-remove]})
(k/set-fields {:visibility_type "retired"})))
(k/set-fields {:visibility_type "retired"}))))
(defn update-data-models-for-table!
"Update the working `Table` and `Field` metadata for a given `Table` based on the latest raw schema information.
This function uses the data in `RawTable` and `RawColumn` to update the working data models as needed."
[{raw-table-id :raw_table_id, :as existing-table}]
(when-let [{database-id :database_id, :as raw-tbl} (db/sel :one raw-table/RawTable :id raw-table-id)]
(try
(-> (table/update-table existing-table raw-tbl)
save-table-fields!)
;; TODO: handle setting any fk relationships
(when-let [table-fks (k/select raw-column/RawColumn
(k/fields [:id :source-column]
[:fk_target_column_id :target-column])
;; NOTE: something really wrong happening with SQLKorma here which requires us
;; to be explicit about :metabase_table.raw_table_id in the join condition
;; without this it seems to want to join against metabase_field !?
(k/join raw-table/RawTable (= :raw_table.id :raw_column.raw_table_id))
(k/where {:raw_table.database_id database-id
:raw_table.id raw-table-id})
(k/where (not= :raw_column.fk_target_column_id nil)))]
(save-fks! table-fks))
(catch Throwable t
(log/error (u/format-color 'red "Unexpected error syncing table") t)))))
(defn update-data-models-from-raw-tables!
"Update the working `Table` and `Field` metadata for *all* tables in a `Database` based on the latest raw schema information.
This function uses the data in `RawTable` and `RawColumn` to update the working data models as needed."
[{database-id :id, :as database}]
{:pre [(integer? database-id)]}
;; quick sanity check that this is indeed a :dynamic-schema database
(when (driver/driver-supports? (driver/engine->driver (:engine database)) :dynamic-schema)
(throw (IllegalStateException. "This function cannot be called on databases which are :dynamic-schema")))
;; retire any tables which were disabled
(retire-tables! database)
(let [raw-tables (raw-table/active-tables database-id)
existing-tables (into {} (for [{raw-table-id :raw_table_id, :as table} (db/sel :many table/Table, :db_id database-id, :active true)]
......@@ -266,19 +168,29 @@
;; NOTE: we make sure to skip the _metabase_metadata table here. it's not a normal table.
(doseq [{raw-table-id :id, :as raw-tbl} (filter #(not= "_metabase_metadata" (s/lower-case (:name %))) raw-tables)]
(try
(binding [*sync-dynamic* (driver/driver-supports? driver :dynamic-schema)]
(if-let [existing-table (get existing-tables raw-table-id)]
;; table already exists, update it
(update-table! existing-table raw-tbl)
;; must be a new table, insert it
(create-table! database-id raw-tbl)))
(if-let [existing-table (get existing-tables raw-table-id)]
;; table already exists, update it
(-> (table/update-table existing-table raw-tbl)
save-table-fields!)
;; must be a new table, insert it
(-> (table/create-table database-id (assoc raw-tbl :raw-table-id raw-table-id))
save-table-fields!))
(catch Throwable t
(log/error (u/format-color 'red "Unexpected error syncing table") t))))
;; handle setting any fk relationships
;; NOTE: this must be done after fully syncing the tables/fields because we need all tables/fields in place
(save-all-fks! database)
(when-let [db-fks (k/select raw-column/RawColumn
(k/fields [:id :source-column]
[:fk_target_column_id :target-column])
;; NOTE: something really wrong happening with SQLKorma here which requires us
;; to be explicit about :metabase_table.raw_table_id in the join condition
;; without this it seems to want to join against metabase_field !?
(k/join raw-table/RawTable (= :raw_table.id :raw_column.raw_table_id))
(k/where {:raw_table.database_id database-id})
(k/where (not= :raw_column.fk_target_column_id nil)))]
(save-fks! db-fks))
;; NOTE: if per chance there were multiple _metabase_metadata tables in different schemas, we just take the first
(when-let [_metabase_metadata (first (filter #(= (s/lower-case (:name %)) "_metabase_metadata") raw-tables))]
(sync-metabase-metadata-table! driver database _metabase_metadata))))
(sync-metabase-metadata-table! (driver/engine->driver (:engine database)) database _metabase_metadata))))
(ns metabase.sync-database.sync-dynamic
(:require [clojure.set :as set]
[clojure.string :as s]
[clojure.tools.logging :as log]
[schema.core :as schema]
[metabase.db :as db]
[metabase.driver :as driver]
[metabase.models.common :as common]
[metabase.models.field :as field]
[metabase.models.raw-table :as raw-table]
[metabase.models.table :as table]
[metabase.sync-database.sync :as sync]
[metabase.util :as u]))
......@@ -13,78 +14,99 @@
(declare save-nested-fields!)
(defn- update-field!
"Update an existing `Field` from the given FIELD-DEF. Then save any nested fields if needed."
[{:keys [id], :as existing-field} {field-name :name, :keys [base-type nested-fields], :as field-def}]
(let [special-type (or (:special_type existing-field)
(sync/infer-field-special-type field-def))]
;; if we have a different base-type or special-type, then update
(when (or (not= base-type (:base_type existing-field))
(not= special-type (:special_type existing-field)))
(db/upd field/Field id
:display_name (or (:display_name existing-field)
(common/name->human-readable-name field-name))
:base_type base-type
:special_type special-type)))
(when nested-fields
(save-nested-fields! existing-field nested-fields)))
(defn- create-field!
"Create a new `Field` from the given FIELD-DEF. Then save any nested fields if needed."
[table-id {field-name :name, :keys [base-type nested-fields parent-id], :as field-def}]
(when-let [new-field (db/ins field/Field
:table_id table-id
:name field-name
:display_name (common/name->human-readable-name field-name)
:base_type base-type
:special_type (sync/infer-field-special-type field-def)
:parent_id parent-id)]
(when nested-fields
(save-nested-fields! new-field nested-fields))))
;; TODO: we need to handle reactivation of a retired field
(defn- save-nested-fields!
"Save any nested `Fields` for a given parent `Field`.
All field-defs provided are assumed to be children of the given FIELD."
[{parent-id :id, table-id :table_id, :as parent-field} nested-field-defs]
;; NOTE: remember that we never retire any fields in dynamic-schema tables
(let [existing-field-name->field (into {} (for [{table-name :name, :as tbl} (db/sel :many field/Field, :visibility_type [not= "retired"], :parent_id parent-id)]
(let [existing-field-name->field (into {} (for [{table-name :name, :as tbl} (db/sel :many field/Field, :parent_id parent-id)]
{table-name tbl}))]
(u/prog1 (set/difference (set (map :name nested-field-defs)) (set (keys existing-field-name->field)))
(when (seq <>)
(log/debug (u/format-color 'blue "Found new nested fields for field '%s': %s" (:name parent-field) <>))))
(doseq [nested-field-def nested-field-defs]
(let [nested-field-def (assoc nested-field-def :parent-id parent-id)]
(let [{:keys [nested-fields], :as nested-field-def} (assoc nested-field-def :parent-id parent-id)]
;; NOTE: this recursively creates fields until we hit the end of the nesting
(if-let [existing-field (existing-field-name->field (:name nested-field-def))]
;; field already exists, so we UPDATE it
(update-field! existing-field nested-field-def)
(cond-> (field/update-field existing-field nested-field-def)
nested-fields (save-nested-fields! nested-fields))
;; looks like a new field, so we CREATE it
(create-field! table-id nested-field-def))))))
(cond-> (field/create-field table-id nested-field-def)
nested-fields (save-nested-fields! nested-fields)))))))
(defn save-table-fields!
"Save all `Fields` in the provided `Table`.
Since this is expected to be a table from a `:dynamic-schema` we actually do a `(describe-table)` here and use its
output as the basis for the synced fields.
(defn- save-table-fields!
"Save a collection of `Fields` for the given `Table`.
NOTE: we never retire/disable any fields in a dynamic schema database, so this process will only add/update `Fields`."
[{table-id :id, :as tbl}]
(let [database (table/database tbl)
driver (driver/engine->driver (:engine database))
table-def (u/prog1 (driver/describe-table driver database (select-keys tbl [:name :schema]))
(schema/validate driver/DescribeTable <>))
field-name->field (into {} (for [{field-name :name, :as fld} (db/sel :many field/Field, :table_id table-id, :parent_id nil)]
{field-name fld}))]
;; NOTE: with dynamic schemas we never disable fields automatically because we don't know when that's appropriate
[{table-id :id} field-defs]
{:pre [(integer? table-id)
(coll? field-defs)
(every? map? field-defs)]}
(let [field-name->field (into {} (for [{field-name :name, :as fld} (db/sel :many field/Field, :table_id table-id, :parent_id nil)]
{field-name fld}))]
;; NOTE: with dynamic schemas we never disable fields
;; create/update the fields
(doseq [{field-name :name, :as field-def} (:fields table-def)]
(doseq [{field-name :name, :keys [nested-fields], :as field-def} field-defs]
(if-let [existing-field (get field-name->field field-name)]
;; field already exists, so we UPDATE it
(update-field! existing-field field-def)
(cond-> (field/update-field existing-field field-def)
nested-fields (save-nested-fields! nested-fields))
;; looks like a new field, so we CREATE it
(create-field! table-id field-def)))))
(cond-> (field/create-field table-id field-def)
nested-fields (save-nested-fields! nested-fields))))))
(defn scan-table-and-update-data-model!
"Update the working `Table` and `Field` metadata for the given `Table`."
[driver database {raw-table-id :raw_table_id, :as existing-table}]
(when-let [raw-tbl (db/sel :one raw-table/RawTable :id raw-table-id)]
(try
(let [table-def (u/prog1 (driver/describe-table driver database (select-keys existing-table [:name :schema]))
(schema/validate driver/DescribeTable <>))]
(-> (table/update-table existing-table raw-tbl)
(save-table-fields! (:fields table-def))))
;; NOTE: dynamic schemas don't have FKs
(catch Throwable t
(log/error (u/format-color 'red "Unexpected error scanning table") t)))))
(defn scan-database-and-update-data-model!
"Update the working `Table` and `Field` metadata for *all* tables in the given `Database`."
[driver {database-id :id, :as database}]
{:pre [(integer? database-id)]}
;; quick sanity check that this is indeed a :dynamic-schema database
(when-not (driver/driver-supports? driver :dynamic-schema)
(throw (IllegalStateException. "This function cannot be called on databases which are not :dynamic-schema")))
;; retire any tables which are no longer with us
(sync/retire-tables! database)
(let [raw-tables (raw-table/active-tables database-id)
existing-tables (into {} (for [{raw-table-id :raw_table_id, :as table} (db/sel :many table/Table, :db_id database-id, :active true)]
{raw-table-id table}))]
;; create/update tables (and their fields)
;; NOTE: we make sure to skip the _metabase_metadata table here. it's not a normal table.
(doseq [{raw-table-id :id, :as raw-tbl} (filter #(not= "_metabase_metadata" (s/lower-case (:name %))) raw-tables)]
(try
(let [table-def (u/prog1 (driver/describe-table driver database (select-keys raw-tbl [:name :schema]))
(schema/validate driver/DescribeTable <>))]
(if-let [existing-table (get existing-tables raw-table-id)]
;; table already exists, update it
(->> (table/update-table existing-table raw-tbl)
(save-table-fields! (:fields table-def)))
;; must be a new table, insert it
(->> (table/create-table database-id (assoc raw-tbl :raw-table-id raw-table-id))
(save-table-fields! (:fields table-def)))))
(catch Throwable t
(log/error (u/format-color 'red "Unexpected error scanning table") t))))
;; NOTE: dynamic schemas don't have FKs
;; NOTE: if per chance there were multiple _metabase_metadata tables in different schemas, we just take the first
(when-let [_metabase_metadata (first (filter #(= (s/lower-case (:name %)) "_metabase_metadata") raw-tables))]
(sync/sync-metabase-metadata-table! driver database _metabase_metadata))))
......@@ -4,9 +4,9 @@
[triggers :as triggers])
[clojurewerkz.quartzite.schedule.cron :as cron]
(metabase [db :as db]
[driver :as driver]
[task :as task])
[metabase.models.database :refer [Database]]))
[metabase.models.database :refer [Database]]
[metabase.sync-database :as sync-database]))
(def ^:private ^:const sync-databases-job-key "metabase.task.sync-databases.job")
(def ^:private ^:const sync-databases-trigger-key "metabase.task.sync-databases.trigger")
......@@ -21,7 +21,7 @@
(for [database (db/sel :many Database :is_sample false)] ; skip Sample Dataset DB
(try
;; NOTE: this happens synchronously for now to avoid excessive load if there are lots of databases
(driver/sync-database! database)
(sync-database/sync-database! database)
(catch Throwable e
(log/error "Error syncing database: " (:id database) e))))))
......
(ns metabase.api.database-test
(:require [expectations :refer :all]
[korma.core :as k]
[metabase.db :refer :all]
[metabase.driver :as driver]
(metabase.models [database :refer [Database]]
......@@ -8,7 +9,8 @@
[metabase.test.data :refer :all]
(metabase.test.data [datasets :as datasets]
[users :refer :all])
[metabase.test.util :refer [match-$ random-name expect-eval-actual-first]]))
[metabase.test.util :refer [match-$ random-name expect-eval-actual-first]]
[metabase.util :as u]))
;; HELPER FNS
......@@ -56,6 +58,7 @@
:active $
:id $
:db_id $
:raw_table_id $
:created_at $}))
......@@ -168,36 +171,35 @@
;; GET /api/databases (include tables)
(let [db-name (str "A" (random-name))] ; make sure this name comes before "test-data"
(expect-eval-actual-first
(set (filter identity (conj (for [engine datasets/all-valid-engines]
(datasets/when-testing-engine engine
(let [database (get-or-create-test-data-db! (driver/engine->driver engine))]
(match-$ database
{:created_at $
:engine (name $engine)
:id $
:updated_at $
:name "test-data"
:is_sample false
:is_full_sync true
:organization_id nil
:description nil
:tables (->> (sel :many Table :db_id (:id database))
(mapv table-details)
(sort-by :name))
:features (mapv name (driver/features (driver/engine->driver engine)))}))))
;; (?) I don't remember why we have to do this for postgres but not any other of the bonus drivers
(match-$ (sel :one Database :name db-name)
{:created_at $
:engine "postgres"
:id $
:updated_at $
:name $
:is_sample false
:is_full_sync true
:organization_id nil
:description nil
:tables []
:features (mapv name (driver/features (driver/engine->driver :postgres)))}))))
(conj [(match-$ (sel :one Database :name db-name)
{:created_at $
:engine "postgres"
:id $
:updated_at $
:name $
:is_sample false
:is_full_sync true
:organization_id nil
:description nil
:tables []
:features (mapv name (driver/features (driver/engine->driver :postgres)))})]
(first (filter identity (for [engine datasets/all-valid-engines]
(datasets/when-testing-engine engine
(let [database (get-or-create-test-data-db! (driver/engine->driver engine))]
(match-$ database
{:created_at $
:engine (name $engine)
:id $
:updated_at $
:name "test-data"
:is_sample false
:is_full_sync true
:organization_id nil
:description nil
:tables (->> (sel :many Table :db_id (:id database))
(mapv table-details)
(sort-by :name))
:features (mapv name (driver/features (driver/engine->driver engine)))})))))))
(do
;; Delete all the randomly created Databases we've made so far
(cascade-delete Database :id [not-in (set (filter identity
......@@ -207,7 +209,7 @@
;; Add an extra DB so we have something to fetch besides the Test DB
(create-db db-name)
;; Now hit the endpoint
(set ((user->client :rasta) :get 200 "database" :include_tables true)))))
((user->client :rasta) :get 200 "database" :include_tables true))))
;; ## GET /api/meta/table/:id/query_metadata
;; TODO - add in example with Field :values
......@@ -223,60 +225,65 @@
:organization_id nil
:description nil
:features (mapv name (driver/features (driver/engine->driver :h2)))
:tables [(match-$ (Table (id :categories))
{:description nil
:entity_type nil
:visibility_type nil
:schema "PUBLIC"
:name "CATEGORIES"
:display_name "Categories"
:fields [(match-$ (Field (id :categories :id))
{:description nil
:table_id (id :categories)
:special_type "id"
:name "ID"
:display_name "Id"
:updated_at $
:active true
:id $
:field_type "info"
:position 0
:target nil
:preview_display true
:created_at $
:base_type "BigIntegerField"
:visibility_type "normal"
:fk_target_field_id $
:parent_id nil
:values []})
(match-$ (Field (id :categories :name))
{:description nil
:table_id (id :categories)
:special_type "name"
:name "NAME"
:display_name "Name"
:updated_at $
:active true
:id $
:field_type "info"
:position 0
:target nil
:preview_display true
:created_at $
:base_type "TextField"
:visibility_type "normal"
:fk_target_field_id $
:parent_id nil
:values []})]
:segments []
:metrics []
:rows 75
:updated_at $
:entity_name nil
:active true
:id (id :categories)
:db_id (id)
:created_at $})]})
:tables [(match-$ (Table (id :categories))
{:description nil
:entity_type nil
:visibility_type nil
:schema "PUBLIC"
:name "CATEGORIES"
:display_name "Categories"
:fields [(match-$ (Field (id :categories :id))
{:description nil
:table_id (id :categories)
:special_type "id"
:name "ID"
:display_name "Id"
:updated_at $
:active true
:id $
:raw_column_id $
:field_type "info"
:position 0
:target nil
:preview_display true
:created_at $
:last_analyzed $
:base_type "BigIntegerField"
:visibility_type "normal"
:fk_target_field_id $
:parent_id nil
:values []})
(match-$ (Field (id :categories :name))
{:description nil
:table_id (id :categories)
:special_type "name"
:name "NAME"
:display_name "Name"
:updated_at $
:active true
:id $
:raw_column_id $
:field_type "info"
:position 0
:target nil
:preview_display true
:created_at $
:last_analyzed $
:base_type "TextField"
:visibility_type "normal"
:fk_target_field_id $
:parent_id nil
:values []})]
:segments []
:metrics []
:rows 75
:updated_at $
:entity_name nil
:active true
:id (id :categories)
:raw_table_id $
:db_id (id)
:created_at $})]})
(let [resp ((user->client :rasta) :get 200 (format "database/%d/metadata" (id)))]
(assoc resp :tables (filter #(= "CATEGORIES" (:name %)) (:tables resp)))))
......@@ -288,11 +295,11 @@
(expect
(let [db-id (id)]
[(match-$ (Table (id :categories))
{:description nil, :entity_type nil, :visibility_type nil, :schema "PUBLIC", :name "CATEGORIES", :rows 75, :updated_at $, :entity_name nil, :active true, :id $, :db_id db-id, :created_at $, :display_name "Categories"})
{:description nil, :entity_type nil, :visibility_type nil, :schema "PUBLIC", :name "CATEGORIES", :rows 75, :updated_at $, :entity_name nil, :active true, :id $, :db_id db-id, :created_at $, :display_name "Categories", :raw_table_id $})
(match-$ (Table (id :checkins))
{:description nil, :entity_type nil, :visibility_type nil, :schema "PUBLIC", :name "CHECKINS", :rows 1000, :updated_at $, :entity_name nil, :active true, :id $, :db_id db-id, :created_at $, :display_name "Checkins"})
{:description nil, :entity_type nil, :visibility_type nil, :schema "PUBLIC", :name "CHECKINS", :rows 1000, :updated_at $, :entity_name nil, :active true, :id $, :db_id db-id, :created_at $, :display_name "Checkins", :raw_table_id $})
(match-$ (Table (id :users))
{:description nil, :entity_type nil, :visibility_type nil, :schema "PUBLIC", :name "USERS", :rows 15, :updated_at $, :entity_name nil, :active true, :id $, :db_id db-id, :created_at $, :display_name "Users"})
{:description nil, :entity_type nil, :visibility_type nil, :schema "PUBLIC", :name "USERS", :rows 15, :updated_at $, :entity_name nil, :active true, :id $, :db_id db-id, :created_at $, :display_name "Users", :raw_table_id $})
(match-$ (Table (id :venues))
{:description nil, :entity_type nil, :visibility_type nil, :schema "PUBLIC", :name "VENUES", :rows 100, :updated_at $, :entity_name nil, :active true, :id $, :db_id db-id, :created_at $, :display_name "Venues"})])
{:description nil, :entity_type nil, :visibility_type nil, :schema "PUBLIC", :name "VENUES", :rows 100, :updated_at $, :entity_name nil, :active true, :id $, :db_id db-id, :created_at $, :display_name "Venues", :raw_table_id $})])
((user->client :rasta) :get 200 (format "database/%d/tables" (id))))
......@@ -30,6 +30,7 @@
(tu/match-$ (Field (id :users :name))
{:description nil
:table_id (id :users)
:raw_column_id $
:table (tu/match-$ (Table (id :users))
{:description nil
:entity_type nil
......@@ -44,11 +45,13 @@
:active true
:id (id :users)
:db_id (id)
:raw_table_id $
:created_at $})
:special_type "name"
:name "NAME"
:display_name "Name"
:updated_at $
:last_analyzed $
:active true
:id (id :users :name)
:field_type "info"
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment