diff --git a/resources/migrations/000_migrations.yaml b/resources/migrations/000_migrations.yaml index 13de99f8472463c96dccfd3f0b8d2f8ae56ac577..78f336bd117d1e85d68f8bc9157720de104cc142 100644 --- a/resources/migrations/000_migrations.yaml +++ b/resources/migrations/000_migrations.yaml @@ -3968,3 +3968,22 @@ databaseChangeLog: tableName: pulse columnName: name columnDataType: varchar(254) + - changeSet: + id: 70 + author: camsaul + comment: 'Added 0.28.0' + changes: + - addColumn: + tableName: metabase_field + columns: + - column: + name: database_type + type: varchar(255) + remarks: 'The actual type of this column in the database. e.g. VARCHAR or TEXT.' + # We want to enforce NOT NULL right away for all columns going forward so just put some sort of + # placeholder in place for existing columns. + - addNotNullConstraint: + tableName: metabase_field + columnName: database_type + columnDataType: varchar(255) + defaultNullValue: '?' diff --git a/src/metabase/api/database.clj b/src/metabase/api/database.clj index bd885a2eb6685145658338709b19d6c23e4ac145..fb4256593091d3bf69aedd8d98b31a09075b0179 100644 --- a/src/metabase/api/database.clj +++ b/src/metabase/api/database.clj @@ -40,7 +40,7 @@ "value must be a valid database engine.")) -;;; ------------------------------------------------------------ GET /api/database ------------------------------------------------------------ +;;; ----------------------------------------------- GET /api/database ------------------------------------------------ (defn- add-tables [dbs] (let [db-id->tables (group-by :db_id (filter mi/can-read? (db/select Table @@ -258,7 +258,7 @@ (log/warn "Error with autocomplete: " (.getMessage t))))) -;;; ------------------------------------------------------------ GET /api/database/:id/fields ------------------------------------------------------------ +;;; ------------------------------------------ GET /api/database/:id/fields ------------------------------------------ (api/defendpoint GET "/:id/fields" "Get a list of all `Fields` in `Database`." @@ -277,7 +277,7 @@ :schema (:schema table)}))) -;;; ------------------------------------------------------------ GET /api/database/:id/idfields ------------------------------------------------------------ +;;; ----------------------------------------- GET /api/database/:id/idfields ----------------------------------------- (api/defendpoint GET "/:id/idfields" "Get a list of all primary key `Fields` for `Database`." @@ -287,7 +287,7 @@ (hydrate :table))))) -;;; ------------------------------------------------------------ POST /api/database ------------------------------------------------------------ +;;; ----------------------------------------------- POST /api/database ----------------------------------------------- (defn- invalid-connection-response [field m] ;; work with the new {:field error-message} format but be backwards-compatible with the UI as it exists right now @@ -324,11 +324,9 @@ (s/defn ^:private test-connection-details :- su/Map "Try a making a connection to database ENGINE with DETAILS. - Tries twice: once with SSL, and a second time without if the first fails. - If either attempt is successful, returns the details used to successfully connect. - Otherwise returns a map with the connection error message. (This map will also - contain the key `:valid` = `false`, which you can use to distinguish an error from - valid details.)" + Tries twice: once with SSL, and a second time without if the first fails. If either attempt is successful, returns + the details used to successfully connect. Otherwise returns a map with the connection error message. (This map + will also contain the key `:valid` = `false`, which you can use to distinguish an error from valid details.)" [engine :- DBEngineString, details :- su/Map] (let [details (if (supports-ssl? engine) (assoc details :ssl true) @@ -398,7 +396,8 @@ (let [details-or-error (test-connection-details engine details)] {:valid (not (false? (:valid details-or-error)))})) -;;; ------------------------------------------------------------ POST /api/database/sample_dataset ------------------------------------------------------------ + +;;; --------------------------------------- POST /api/database/sample_dataset ---------------------------------------- (api/defendpoint POST "/sample_dataset" "Add the sample dataset as a new `Database`." @@ -408,7 +407,7 @@ (Database :is_sample true)) -;;; ------------------------------------------------------------ PUT /api/database/:id ------------------------------------------------------------ +;;; --------------------------------------------- PUT /api/database/:id ---------------------------------------------- (api/defendpoint PUT "/:id" "Update a `Database`." @@ -455,7 +454,7 @@ (add-expanded-schedules db))))))) -;;; ------------------------------------------------------------ DELETE /api/database/:id ------------------------------------------------------------ +;;; -------------------------------------------- DELETE /api/database/:id -------------------------------------------- (api/defendpoint DELETE "/:id" "Delete a `Database`." @@ -467,8 +466,7 @@ api/generic-204-no-content) -;;; ------------------------------------------------------------ POST /api/database/:id/sync ------------------------------------------------------------ - +;;; ------------------------------------------ POST /api/database/:id/sync ------------------------------------------- ;; TODO - Shouldn't we just check for superuser status instead of write checking? ;; NOTE Atte: This becomes maybe obsolete diff --git a/src/metabase/db.clj b/src/metabase/db.clj index 464d6b0ff8bd2983aa7368e96f684b2efbac60b1..6bd144b66919ea1371d5e232ea6c505d6ef52e97 100644 --- a/src/metabase/db.clj +++ b/src/metabase/db.clj @@ -21,9 +21,9 @@ liquibase.Liquibase liquibase.resource.ClassLoaderResourceAccessor)) -;;; +------------------------------------------------------------------------------------------------------------------------+ -;;; | DB FILE & CONNECTION DETAILS | -;;; +------------------------------------------------------------------------------------------------------------------------+ +;;; +----------------------------------------------------------------------------------------------------------------+ +;;; | DB FILE & CONNECTION DETAILS | +;;; +----------------------------------------------------------------------------------------------------------------+ (def db-file "Path to our H2 DB file from env var or app config." @@ -42,7 +42,8 @@ [(System/getProperty "user.dir") "/" db-file-name options])))))) (defn- parse-connection-string - "Parse a DB connection URI like `postgres://cam@localhost.com:5432/cams_cool_db?ssl=true&sslfactory=org.postgresql.ssl.NonValidatingFactory` and return a broken-out map." + "Parse a DB connection URI like `postgres://cam@localhost.com:5432/cams_cool_db?ssl=true&sslfactory=org.postgresql.ssl.NonValidatingFactory` + and return a broken-out map." [uri] (when-let [[_ protocol user pass host port db query] (re-matches #"^([^:/@]+)://(?:([^:/@]+)(?::([^:@]+))?@)?([^:@]+)(?::(\d+))?/([^/?]+)(?:\?(.*))?$" uri)] (merge {:type (case (keyword protocol) @@ -101,9 +102,9 @@ :postgres (dbspec/postgres (assoc db-details :db (:dbname db-details)))))) -;;; +------------------------------------------------------------------------------------------------------------------------+ -;;; | MIGRATE! | -;;; +------------------------------------------------------------------------------------------------------------------------+ +;;; +----------------------------------------------------------------------------------------------------------------+ +;;; | MIGRATE! | +;;; +----------------------------------------------------------------------------------------------------------------+ (def ^:private ^:const ^String changelog-file "liquibase.yaml") @@ -117,9 +118,10 @@ (defn- migrations-lines "Return a sequnce of DDL statements that should be used to perform migrations for LIQUIBASE. - MySQL gets snippy if we try to run the entire DB migration as one single string; it seems to only like it if we run one statement at a time; - Liquibase puts each DDL statement on its own line automatically so just split by lines and filter out blank / comment lines. Even though this - is not neccesary for H2 or Postgres go ahead and do it anyway because it keeps the code simple and doesn't make a significant performance difference." + MySQL gets snippy if we try to run the entire DB migration as one single string; it seems to only like it if we run + one statement at a time; Liquibase puts each DDL statement on its own line automatically so just split by lines and + filter out blank / comment lines. Even though this is not neccesary for H2 or Postgres go ahead and do it anyway + because it keeps the code simple and doesn't make a significant performance difference." [^Liquibase liquibase] (for [line (s/split-lines (migrations-sql liquibase)) :when (not (or (s/blank? line) @@ -129,8 +131,9 @@ (defn- has-unrun-migrations? "Does LIQUIBASE have migration change sets that haven't been run yet? - It's a good idea to Check to make sure there's actually something to do before running `(migrate :up)` because `migrations-sql` will - always contain SQL to create and release migration locks, which is both slightly dangerous and a waste of time when we won't be using them." + It's a good idea to Check to make sure there's actually something to do before running `(migrate :up)` because + `migrations-sql` will always contain SQL to create and release migration locks, which is both slightly dangerous + and a waste of time when we won't be using them." ^Boolean [^Liquibase liquibase] (boolean (seq (.listUnrunChangeSets liquibase nil)))) @@ -146,14 +149,16 @@ (u/auto-retry 5 (when (has-migration-lock? liquibase) (Thread/sleep 2000) - (throw (Exception. "Database has migration lock; cannot run migrations. You can force-release these locks by running `java -jar metabase.jar migrate release-locks`."))))) + (throw (Exception. (str "Database has migration lock; cannot run migrations. You can force-release these locks " + "by running `java -jar metabase.jar migrate release-locks`.")))))) (defn- migrate-up-if-needed! "Run any unrun LIQUIBASE migrations, if needed. This creates SQL for the migrations to be performed, then executes each DDL statement. - Running `.update` directly doesn't seem to work as we'd expect; it ends up commiting the changes made and they can't be rolled back at - the end of the transaction block. Converting the migration to SQL string and running that via `jdbc/execute!` seems to do the trick." + Running `.update` directly doesn't seem to work as we'd expect; it ends up commiting the changes made and they + can't be rolled back at the end of the transaction block. Converting the migration to SQL string and running that + via `jdbc/execute!` seems to do the trick." [conn, ^Liquibase liquibase] (log/info "Checking if Database has unrun migrations...") (when (has-unrun-migrations? liquibase) @@ -169,10 +174,12 @@ 1. This doesn't check to make sure the DB locks are cleared 2. Any DDL statements that fail are ignored - It can be used to fix situations where the database got into a weird state, as was common before the fixes made in #3295. + It can be used to fix situations where the database got into a weird state, as was common before the fixes made in + #3295. - Each DDL statement is ran inside a nested transaction; that way if the nested transaction fails we can roll it back without rolling back the entirety of changes - that were made. (If a single statement in a transaction fails you can't do anything futher until you clear the error state by doing something like calling `.rollback`.)" + Each DDL statement is ran inside a nested transaction; that way if the nested transaction fails we can roll it back + without rolling back the entirety of changes that were made. (If a single statement in a transaction fails you + can't do anything futher until you clear the error state by doing something like calling `.rollback`.)" [conn, ^Liquibase liquibase] (when (has-unrun-migrations? liquibase) (doseq [line (migrations-lines liquibase)] @@ -185,7 +192,8 @@ (log/error (u/format-color 'red "[ERROR] %s" (.getMessage e))))))))) (def ^{:arglists '([])} ^DatabaseFactory database-factory - "Return an instance of the Liquibase `DatabaseFactory`. This is done on a background thread at launch because otherwise it adds 2 seconds to startup time." + "Return an instance of the Liquibase `DatabaseFactory`. This is done on a background thread at launch because + otherwise it adds 2 seconds to startup time." (partial deref (future (DatabaseFactory/getInstance)))) (defn- conn->liquibase @@ -198,13 +206,13 @@ (Liquibase. changelog-file (ClassLoaderResourceAccessor.) database)))) (defn consolidate-liquibase-changesets - "Consolidate all previous DB migrations so they came from single file. - Previous migrations where stored in many small files which added seconds - per file to the startup time because liquibase was checking the jar - signature for each file. This function is required to correct the liquibase - tables to reflect that these migrations where moved to a single file. + "Consolidate all previous DB migrations so they come from single file. - see https://github.com/metabase/metabase/issues/3715" + Previously migrations where stored in many small files which added seconds per file to the startup time because + liquibase was checking the jar signature for each file. This function is required to correct the liquibase tables to + reflect that these migrations where moved to a single file. + + see https://github.com/metabase/metabase/issues/3715" [conn] (let [liquibases-table-name (if (#{:h2 :mysql} (db-type)) "DATABASECHANGELOG" @@ -216,17 +224,19 @@ (jdbc/execute! conn [query "migrations/000_migrations.yaml"])))) (defn migrate! - "Migrate the database (this can also be ran via command line like `java -jar metabase.jar migrate up` or `lein run migrate up`): - - * `:up` - Migrate up - * `:force` - Force migrate up, ignoring locks and any DDL statements that fail. - * `:down-one` - Rollback a single migration - * `:print` - Just print the SQL for running the migrations, don't actually run them. - * `:release-locks` - Manually release migration locks left by an earlier failed migration. - (This shouldn't be necessary now that we run migrations inside a transaction, but is available just in case). - - Note that this only performs *schema migrations*, not data migrations. Data migrations are handled separately by `metabase.db.migrations/run-all!`. - (`setup-db!`, below, calls both this function and `run-all!`)." + "Migrate the database (this can also be ran via command line like `java -jar metabase.jar migrate up` or `lein run + migrate up`): + + * `:up` - Migrate up + * `:force` - Force migrate up, ignoring locks and any DDL statements that fail. + * `:down-one` - Rollback a single migration + * `:print` - Just print the SQL for running the migrations, don't actually run them. + * `:release-locks` - Manually release migration locks left by an earlier failed migration. + (This shouldn't be necessary now that we run migrations inside a transaction, but is + available just in case). + + Note that this only performs *schema migrations*, not data migrations. Data migrations are handled separately by + `metabase.db.migrations/run-all!`. (`setup-db!`, below, calls both this function and `run-all!`)." ([] (migrate! :up)) ([direction] @@ -254,14 +264,15 @@ :done ;; If for any reason any part of the migrations fail then rollback all changes (catch Throwable e - ;; This should already be happening as a result of `db-set-rollback-only!` but running it twice won't hurt so better safe than sorry + ;; This should already be happening as a result of `db-set-rollback-only!` but running it twice won't hurt so + ;; better safe than sorry (.rollback (jdbc/get-connection conn)) (throw e)))))) -;;; +------------------------------------------------------------------------------------------------------------------------+ -;;; | CONNECTION POOLS & TRANSACTION STUFF | -;;; +------------------------------------------------------------------------------------------------------------------------+ +;;; +----------------------------------------------------------------------------------------------------------------+ +;;; | CONNECTION POOLS & TRANSACTION STUFF | +;;; +----------------------------------------------------------------------------------------------------------------+ (defn connection-pool "Create a C3P0 connection pool for the given database SPEC." @@ -283,8 +294,10 @@ (.setTestConnectionOnCheckout false) (.setPreferredTestQuery nil) (.setProperties (u/prog1 (Properties.) - (doseq [[k v] (dissoc spec :classname :subprotocol :subname :naming :delimiters :alias-delimiter - :excess-timeout :minimum-pool-size :idle-connection-test-period)] + (doseq [[k v] (dissoc spec :classname :subprotocol :subname + :naming :delimiters :alias-delimiter + :excess-timeout :minimum-pool-size + :idle-connection-test-period)] (.setProperty <> (name k) (str v))))))}) (defn- create-connection-pool! [spec] @@ -295,10 +308,9 @@ (db/set-default-db-connection! (connection-pool spec))) -;;; +------------------------------------------------------------------------------------------------------------------------+ -;;; | DB SETUP | -;;; +------------------------------------------------------------------------------------------------------------------------+ - +;;; +----------------------------------------------------------------------------------------------------------------+ +;;; | DB SETUP | +;;; +----------------------------------------------------------------------------------------------------------------+ (def ^:private setup-db-has-been-called? (atom false)) @@ -309,19 +321,21 @@ @setup-db-has-been-called?) (def ^:dynamic *allow-potentailly-unsafe-connections* - "We want to make *every* database connection made by the drivers safe -- read-only, only connect if DB file exists, etc. - At the same time, we'd like to be able to use driver functionality like `can-connect-with-details?` to check whether we can - connect to the Metabase database, in which case we'd like to allow connections to databases that don't exist. + "We want to make *every* database connection made by the drivers safe -- read-only, only connect if DB file exists, + etc. At the same time, we'd like to be able to use driver functionality like `can-connect-with-details?` to check + whether we can connect to the Metabase database, in which case we'd like to allow connections to databases that + don't exist. - So we need some way to distinguish the Metabase database from other databases. We could add a key to the details map - specifying that it's the Metabase DB, but what if some shady user added that key to another database? + So we need some way to distinguish the Metabase database from other databases. We could add a key to the details + map specifying that it's the Metabase DB, but what if some shady user added that key to another database? - We could check if a database details map matched `db-connection-details` above, but what if a shady user went Meta-Metabase - and added the Metabase DB to Metabase itself? Then when they used it they'd have potentially unsafe access. + We could check if a database details map matched `db-connection-details` above, but what if a shady user went + Meta-Metabase and added the Metabase DB to Metabase itself? Then when they used it they'd have potentially unsafe + access. - So this is where dynamic variables come to the rescue. We'll make this one `true` when we use `can-connect?` for the - Metabase DB, in which case we'll allow connection to non-existent H2 (etc.) files, and leave it `false` happily and - forever after, making all other connnections \"safe\"." + So this is where dynamic variables come to the rescue. We'll make this one `true` when we use `can-connect?` for the + Metabase DB, in which case we'll allow connection to non-existent H2 (etc.) files, and leave it `false` happily and + forever after, making all other connnections \"safe\"." false) (defn- verify-db-connection @@ -340,8 +354,9 @@ (def ^:dynamic ^Boolean *disable-data-migrations* "Should we skip running data migrations when setting up the DB? (Default is `false`). - There are certain places where we don't want to do this; for example, none of the migrations should be ran when Metabase is launched via `load-from-h2`. - That's because they will end up doing things like creating duplicate entries for the \"magic\" groups and permissions entries. " + There are certain places where we don't want to do this; for example, none of the migrations should be ran when + Metabase is launched via `load-from-h2`. That's because they will end up doing things like creating duplicate + entries for the \"magic\" groups and permissions entries. " false) (defn- print-migrations-and-quit! diff --git a/src/metabase/db/migrations.clj b/src/metabase/db/migrations.clj index ec688001f41f67b964197e370e22bc076fa9564b..7eb11fd619a1a198bdabbb4961e0f5d099c8731b 100644 --- a/src/metabase/db/migrations.clj +++ b/src/metabase/db/migrations.clj @@ -1,8 +1,8 @@ (ns metabase.db.migrations "Clojure-land data migration definitions and fns for running them. - These migrations are all ran once when Metabase is first launched, except when transferring data from an existing H2 database. - When data is transferred from an H2 database, migrations will already have been run against that data; thus, all of these migrations - need to be repeatable, e.g.: + These migrations are all ran once when Metabase is first launched, except when transferring data from an existing + H2 database. When data is transferred from an H2 database, migrations will already have been run against that data; + thus, all of these migrations need to be repeatable, e.g.: CREATE TABLE IF NOT EXISTS ... -- Good CREATE TABLE ... -- Bad" @@ -54,7 +54,8 @@ (def ^:private data-migrations (atom [])) (defmacro ^:private defmigration - "Define a new data migration. This is just a simple wrapper around `defn-` that adds the resulting var to that `data-migrations` atom." + "Define a new data migration. This is just a simple wrapper around `defn-` that adds the resulting var to that + `data-migrations` atom." [migration-name & body] `(do (defn- ~migration-name [] ~@body) (swap! data-migrations conj #'~migration-name))) @@ -69,9 +70,9 @@ (log/info "Finished running data migrations.")) -;;; +------------------------------------------------------------------------------------------------------------------------+ -;;; | MIGRATIONS | -;;; +------------------------------------------------------------------------------------------------------------------------+ +;;; +----------------------------------------------------------------------------------------------------------------+ +;;; | MIGRATIONS | +;;; +----------------------------------------------------------------------------------------------------------------+ ;; Upgrade for the `Card` model when `:database_id`, `:table_id`, and `:query_type` were added and needed populating. ;; @@ -150,9 +151,9 @@ :visibility_type "normal"))) -;;; +------------------------------------------------------------------------------------------------------------------------+ -;;; | PERMISSIONS v1 | -;;; +------------------------------------------------------------------------------------------------------------------------+ +;;; +----------------------------------------------------------------------------------------------------------------+ +;;; | PERMISSIONS v1 | +;;; +----------------------------------------------------------------------------------------------------------------+ ;; Add users to default permissions groups. This will cause the groups to be created if needed as well. (defmigration ^{:author "camsaul", :added "0.20.0"} add-users-to-default-permissions-groups @@ -179,7 +180,8 @@ :group_id (:id (perm-group/admin)) :object "/")))) -;; add existing databases to default permissions groups. default and metabot groups have entries for each individual DB +;; add existing databases to default permissions groups. default and metabot groups have entries for each individual +;; DB (defmigration ^{:author "camsaul", :added "0.20.0"} add-databases-to-magic-permissions-groups (let [db-ids (db/select-ids Database)] (doseq [{group-id :id} [(perm-group/all-users) @@ -191,9 +193,9 @@ :group_id group-id))))) -;;; +------------------------------------------------------------------------------------------------------------------------+ -;;; | NEW TYPE SYSTEM | -;;; +------------------------------------------------------------------------------------------------------------------------+ +;;; +----------------------------------------------------------------------------------------------------------------+ +;;; | NEW TYPE SYSTEM | +;;; +----------------------------------------------------------------------------------------------------------------+ (def ^:private ^:const old-special-type->new-type {"avatar" "type/AvatarURL" @@ -240,9 +242,9 @@ (doseq [[_ t] old-base-type->new-type] (assert (isa? (keyword t) :type/*)))) -;; migrate all of the old base + special types to the new ones. -;; This also takes care of any types that are already correct other than the fact that they're missing :type/ in the front. -;; This was a bug that existed for a bit in 0.20.0-SNAPSHOT but has since been corrected +;; migrate all of the old base + special types to the new ones. This also takes care of any types that are already +;; correct other than the fact that they're missing :type/ in the front. This was a bug that existed for a bit in +;; 0.20.0-SNAPSHOT but has since been corrected (defmigration ^{:author "camsaul", :added "0.20.0"} migrate-field-types (doseq [[old-type new-type] old-special-type->new-type] ;; migrate things like :timestamp_milliseconds -> :type/UNIXTimestampMilliseconds @@ -259,24 +261,28 @@ (db/update-where! 'Field {:base_type (name (keyword new-type))} :base_type new-type))) -;; if there were invalid field types in the database anywhere fix those so the new stricter validation logic doesn't blow up +;; if there were invalid field types in the database anywhere fix those so the new stricter validation logic doesn't +;; blow up (defmigration ^{:author "camsaul", :added "0.20.0"} fix-invalid-field-types (db/update-where! 'Field {:base_type [:not-like "type/%"]} :base_type "type/*") (db/update-where! 'Field {:special_type [:not-like "type/%"]} :special_type nil)) -;; Copy the value of the old setting `-site-url` to the new `site-url` if applicable. -;; (`site-url` used to be stored internally as `-site-url`; this was confusing, see #4188 for details) -;; This has the side effect of making sure the `site-url` has no trailing slashes (as part of the magic setter fn; this was fixed as part of #4123) +;; Copy the value of the old setting `-site-url` to the new `site-url` if applicable. (`site-url` used to be stored +;; internally as `-site-url`; this was confusing, see #4188 for details) This has the side effect of making sure the +;; `site-url` has no trailing slashes (as part of the magic setter fn; this was fixed as part of #4123) (defmigration ^{:author "camsaul", :added "0.23.0"} copy-site-url-setting-and-remove-trailing-slashes (when-let [site-url (db/select-one-field :value Setting :key "-site-url")] (public-settings/site-url site-url))) -;;; ------------------------------------------------------------ Migrating QueryExecutions ------------------------------------------------------------ +;;; +----------------------------------------------------------------------------------------------------------------+ +;;; | Migrating QueryExecutions | +;;; +----------------------------------------------------------------------------------------------------------------+ -;; We're copying over data from the legacy `query_queryexecution` table to the new `query_execution` table; see #4522 and #4531 for details +;; We're copying over data from the legacy `query_queryexecution` table to the new `query_execution` table; see #4522 +;; and #4531 for details ;; model definition for the old table to facilitate the data copying process (models/defmodel ^:private ^:deprecated LegacyQueryExecution :query_queryexecution) @@ -301,8 +307,8 @@ ;; Migrate entries from the old query execution table to the new one. This might take a few minutes (defmigration ^{:author "camsaul", :added "0.23.0"} migrate-query-executions - ;; migrate the most recent 100,000 entries - ;; make sure the DB doesn't get snippy by trying to insert too many records at once. Divide the INSERT statements into chunks of 1,000 + ;; migrate the most recent 100,000 entries. Make sure the DB doesn't get snippy by trying to insert too many records + ;; at once. Divide the INSERT statements into chunks of 1,000 (binding [query-execution/*validate-context* false] (doseq [chunk (partition-all 1000 (db/select LegacyQueryExecution {:limit 100000, :order-by [[:id :desc]]}))] (db/insert-many! QueryExecution diff --git a/src/metabase/driver/bigquery.clj b/src/metabase/driver/bigquery.clj index 98fecef91d1bc316c823488c1d89eadd0c757255..745f71308b8c83164c3b081e3f6fb1c3e6f34300 100644 --- a/src/metabase/driver/bigquery.clj +++ b/src/metabase/driver/bigquery.clj @@ -99,8 +99,9 @@ (defn- table-schema->metabase-field-info [^TableSchema schema] (for [^TableFieldSchema field (.getFields schema)] - {:name (.getName field) - :base-type (bigquery-type->base-type (.getType field))})) + {:name (.getName field) + :database-type (.getType field) + :base-type (bigquery-type->base-type (.getType field))})) (defn- describe-table [database {table-name :name}] {:schema nil diff --git a/src/metabase/driver/druid.clj b/src/metabase/driver/druid.clj index 8323e00bb935e482e816b81ea9d0ee6f021581fc..12bcf95339174a00655efb0e5851c43e633ae35d 100644 --- a/src/metabase/driver/druid.clj +++ b/src/metabase/driver/druid.clj @@ -73,10 +73,10 @@ (defn- describe-table-field [druid-field-type field-name] ;; all dimensions are Strings, and all metrics as JS Numbers, I think (?) ;; string-encoded booleans + dates are treated as strings (!) - {:name field-name - :base-type (if (= :metric druid-field-type) - :type/Float - :type/Text)}) + (assoc (case druid-field-type + :metric {:database-type "metric", :base-type :type/Float} + :dimension {:database-type "dimension", :base-type :type/Text}) + :name field-name)) (defn- describe-table [database table] (ssh/with-ssh-tunnel [details-with-tunnel (:details database)] @@ -85,9 +85,10 @@ :name (:name table) :fields (set (concat ;; every Druid table is an event stream w/ a timestamp field - [{:name "timestamp" - :base-type :type/DateTime - :pk? true}] + [{:name "timestamp" + :database-type "timestamp" + :base-type :type/DateTime + :pk? true}] (map (partial describe-table-field :dimension) dimensions) (map (partial describe-table-field :metric) metrics)))}))) diff --git a/src/metabase/driver/generic_sql.clj b/src/metabase/driver/generic_sql.clj index 9b5071abd4cf1f65e339077afacc9e39fdfee3b1..1cc011eca286c650e645ed14d82679185e90b558 100644 --- a/src/metabase/driver/generic_sql.clj +++ b/src/metabase/driver/generic_sql.clj @@ -325,20 +325,29 @@ {:name (:table_name table) :schema (:table_schem table)}))) -(defn- describe-table-fields - [^DatabaseMetaData metadata, driver, {:keys [schema name]}] - (set (for [{:keys [column_name type_name]} (jdbc/result-set-seq (.getColumns metadata nil schema name nil)) - :let [calculated-special-type (column->special-type driver column_name (keyword type_name))]] - (merge {:name column_name - :custom {:column-type type_name} - :base-type (or (column->base-type driver (keyword type_name)) - (do (log/warn (format "Don't know how to map column type '%s' to a Field base_type, falling back to :type/*." - type_name)) - :type/*))} - (when calculated-special-type - (assert (isa? calculated-special-type :type/*) - (str "Invalid type: " calculated-special-type)) - {:special-type calculated-special-type}))))) +(defn- database-type->base-type + "Given a `database-type` (e.g. `VARCHAR`) return the mapped Metabase type (e.g. `:type/Text`)." + [driver database-type] + (or (column->base-type driver (keyword database-type)) + (do (log/warn (format "Don't know how to map column type '%s' to a Field base_type, falling back to :type/*." + database-type)) + :type/*))) + +(defn- calculated-special-type + "Get an appropriate special type for a column with `column-name` of type `database-type`." + [driver column-name database-type] + (when-let [special-type (column->special-type driver column-name (keyword database-type))] + (assert (isa? special-type :type/*) + (str "Invalid type: " special-type)) + special-type)) + +(defn- describe-table-fields [^DatabaseMetaData metadata, driver, {schema :schema, table-name :name}] + (set (for [{database-type :type_name, column-name :column_name} (jdbc/result-set-seq (.getColumns metadata nil schema table-name nil))] + (merge {:name column-name + :database-type (name database-type) + :base-type (database-type->base-type driver database-type)} + (when-let [special-type (calculated-special-type driver column-name database-type)] + {:special-type special-type}))))) (defn- add-table-pks [^DatabaseMetaData metadata, table] diff --git a/src/metabase/driver/mongo.clj b/src/metabase/driver/mongo.clj index fa09f1d6eca37f9b0959c7bdfec77d94c2adce2b..28b51c0a9966d4d3318bf47e71d1c7d6b70d0188 100644 --- a/src/metabase/driver/mongo.clj +++ b/src/metabase/driver/mongo.clj @@ -13,7 +13,9 @@ [database :refer [Database]] [field :as field]] [metabase.sync.interface :as si] - [metabase.util.ssh :as ssh] + [metabase.util + [schema :as su] + [ssh :as ssh]] [monger [collection :as mc] [command :as cmd] @@ -104,22 +106,28 @@ (find-nested-fields field-value nested-fields) nested-fields))))) +(s/defn ^:private ^:always-validate most-common-object-type :- Class + [field-types :- [(s/pair Class "Class" s/Int "Int")]] + (->> field-types + (sort-by second) + last + first)) + (defn- describe-table-field [field-kw field-info] - ;; TODO: indicate preview-display status based on :len - (cond-> {:name (name field-kw) - :base-type (->> (vec (:types field-info)) - (sort-by second) - last - first - driver/class->base-type)} - (= :_id field-kw) (assoc :pk? true) - (:special-types field-info) (assoc :special-type (->> (vec (:special-types field-info)) - (filter #(not (nil? (first %)))) - (sort-by second) - last - first)) - (:nested-fields field-info) (assoc :nested-fields (set (for [field (keys (:nested-fields field-info))] - (describe-table-field field (field (:nested-fields field-info)))))))) + ;; :types + (let [most-common-object-type (most-common-object-type (vec (:types field-info)))] + ;; TODO: indicate preview-display status based on :len + (cond-> {:name (name field-kw) + :database-type (.getName most-common-object-type) + :base-type (driver/class->base-type most-common-object-type)} + (= :_id field-kw) (assoc :pk? true) + (:special-types field-info) (assoc :special-type (->> (vec (:special-types field-info)) + (filter #(not (nil? (first %)))) + (sort-by second) + last + first)) + (:nested-fields field-info) (assoc :nested-fields (set (for [field (keys (:nested-fields field-info))] + (describe-table-field field (field (:nested-fields field-info))))))))) (defn- describe-database [database] (with-mongo-connection [^com.mongodb.DB conn database] diff --git a/src/metabase/driver/presto.clj b/src/metabase/driver/presto.clj index 16a78ba8cf73187fea3d15a032c630c4f7bcce01..f3e99d97a333bdc9002a897a33839515e3f92e2e 100644 --- a/src/metabase/driver/presto.clj +++ b/src/metabase/driver/presto.clj @@ -163,7 +163,9 @@ {:schema schema :name table-name :fields (set (for [[name type] rows] - {:name name, :base-type (presto-type->base-type type)}))})) + {:name name + :database-type type + :base-type (presto-type->base-type type)}))})) (defprotocol ^:private IPrepareValue (^:private prepare-value [this])) diff --git a/src/metabase/sync/interface.clj b/src/metabase/sync/interface.clj index 492b2b6941296cc1bac0d4b4d9b72ddc921ef13f..f1a9e9065c274bbcd4b0a1138be6c3b8a3ee6cb1 100644 --- a/src/metabase/sync/interface.clj +++ b/src/metabase/sync/interface.clj @@ -24,6 +24,7 @@ (def TableMetadataField "Schema for a given Field as provided in `describe-table`." {:name su/NonBlankString + :database-type su/NonBlankString :base-type su/FieldType (s/optional-key :special-type) (s/maybe su/FieldType) (s/optional-key :pk?) s/Bool diff --git a/src/metabase/sync/sync_metadata/fields.clj b/src/metabase/sync/sync_metadata/fields.clj index 0ba6e55f279a6ce0d868d0a24308f40f07e161c6..f0edc4e45400a83dd0753497284d002a7cf9cec1 100644 --- a/src/metabase/sync/sync_metadata/fields.clj +++ b/src/metabase/sync/sync_metadata/fields.clj @@ -1,9 +1,9 @@ (ns metabase.sync.sync-metadata.fields "Logic for updating Metabase Field models from metadata fetched from a physical DB. - The basic idea here is to look at the metadata we get from calling `describe-table` on a connected database, - then construct an identical set of metadata from what we have about that Table in the Metabase DB. Then we - iterate over both sets of Metadata and perform whatever steps are needed to make sure the things in the DB - match the things that came back from `describe-table`." + The basic idea here is to look at the metadata we get from calling `describe-table` on a connected database, then + construct an identical set of metadata from what we have about that Table in the Metabase DB. Then we iterate over + both sets of Metadata and perform whatever steps are needed to make sure the things in the DB match the things that + came back from `describe-table`." (:require [clojure.string :as str] [clojure.tools.logging :as log] [medley.core :as m] @@ -24,8 +24,8 @@ (def ^:private TableMetadataFieldWithID "Schema for `TableMetadataField` with an included ID of the corresponding Metabase Field object. - `our-metadata` is always returned in this format. (The ID is needed in certain places so we know - which Fields to retire, and the parent ID of any nested-fields.)" + `our-metadata` is always returned in this format. (The ID is needed in certain places so we know which Fields to + retire, and the parent ID of any nested-fields.)" (assoc i/TableMetadataField :id su/IntGreaterThanZero (s/optional-key :nested-fields) #{(s/recursive #'TableMetadataFieldWithID)})) @@ -51,7 +51,8 @@ :active false)) (s/defn ^:private ->metabase-field! :- i/FieldInstance - "Return an active Metabase Field instance that matches NEW-FIELD-METADATA. This object will be created or reactivated as a side effect of calling this function." + "Return an active Metabase Field instance that matches NEW-FIELD-METADATA. This object will be created or + reactivated as a side effect of calling this function." [table :- i/TableInstance, new-field-metadata :- i/TableMetadataField, parent-id :- ParentID] (if-let [matching-inactive-field (matching-inactive-field table new-field-metadata parent-id)] ;; if the field already exists but was just marked inactive then reäctivate it @@ -60,15 +61,16 @@ ;; now return the Field in question (Field (u/get-id matching-inactive-field))) ;; otherwise insert a new field - (let [{field-name :name, :keys [base-type special-type pk? raw-column-id]} new-field-metadata] + (let [{field-name :name, :keys [database-type base-type special-type pk? raw-column-id]} new-field-metadata] (db/insert! Field - :table_id (u/get-id table) - :name field-name - :display_name (humanization/name->human-readable-name field-name) - :base_type base-type - :special_type (or special-type - (when pk? :type/PK)) - :parent_id parent-id)))) + :table_id (u/get-id table) + :name field-name + :display_name (humanization/name->human-readable-name field-name) + :database_type database-type + :base_type base-type + :special_type (or special-type + (when pk? :type/PK)) + :parent_id parent-id)))) (s/defn ^:private create-or-reactivate-field! @@ -174,15 +176,16 @@ (s/defn ^:private parent-id->fields :- {ParentID #{TableMetadataFieldWithID}} "Build a map of the Metabase Fields we have for TABLE, keyed by their parent id (usually `nil`)." [table :- i/TableInstance] - (->> (for [field (db/select [Field :name :base_type :special_type :parent_id :id] + (->> (for [field (db/select [Field :name :database_type :base_type :special_type :parent_id :id] :table_id (u/get-id table) :active true)] - {:parent-id (:parent_id field) - :id (:id field) - :name (:name field) - :base-type (:base_type field) - :special-type (:special_type field) - :pk? (isa? (:special_type field) :type/PK)}) + {:parent-id (:parent_id field) + :id (:id field) + :name (:name field) + :database-type (:database_type field) + :base-type (:base_type field) + :special-type (:special_type field) + :pk? (isa? (:special_type field) :type/PK)}) ;; make a map of parent-id -> set of (group-by :parent-id) ;; remove the parent ID because the Metadata from `describe-table` won't have it. Save the results as a set @@ -194,7 +197,8 @@ "Return information we have about Fields for a TABLE currently in the application database in (almost) exactly the same `TableMetadataField` format returned by `describe-table`." [table :- i/TableInstance] - ;; Fetch all the Fields for this TABLE. Then group them by their parent ID, which we'll use to construct our metadata in the correct format + ;; Fetch all the Fields for this TABLE. Then group them by their parent ID, which we'll use to construct our + ;; metadata in the correct format (let [parent-id->fields (parent-id->fields table)] ;; get all the top-level fields, then call `add-nested-fields` to recursively add the fields (set (for [field (get parent-id->fields nil)] diff --git a/src/metabase/types.clj b/src/metabase/types.clj index 6c7c8d450319ff1b18acb2b774c85d39e92be8d6..815d465cae63c8442415897953a95696ac0e3c1e 100644 --- a/src/metabase/types.clj +++ b/src/metabase/types.clj @@ -61,7 +61,8 @@ (derive :type/Boolean :type/*) (derive :type/Enum :type/*) -;;; Text-Like Types: Things that should be displayed as text for most purposes but that *shouldn't* support advanced filter options like starts with / contains +;;; Text-Like Types: Things that should be displayed as text for most purposes but that *shouldn't* support advanced +;;; filter options like starts with / contains (derive :type/TextLike :type/*) (derive :type/IPAddress :type/TextLike) @@ -76,7 +77,8 @@ (derive :type/ZipCode :type/Address) -;;; Legacy Special Types. These will hopefully be going away in the future when we add columns like `:is_pk` and `:cardinality` +;;; Legacy Special Types. These will hopefully be going away in the future when we add columns like `:is_pk` and +;;; `:cardinality` (derive :type/Special :type/*) @@ -91,11 +93,11 @@ (derive :type/Name :type/Category) -;;; ------------------------------------------------------------ Util Fns ------------------------------------------------------------ +;;; ---------------------------------------------------- Util Fns ---------------------------------------------------- (defn types->parents - "Return a map of various types to their parent types. - This is intended for export to the frontend as part of `MetabaseBootstrap` so it can build its own implementation of `isa?`." + "Return a map of various types to their parent types. This is intended for export to the frontend as part of + `MetabaseBootstrap` so it can build its own implementation of `isa?`." [] (into {} (for [t (descendants :type/*)] {t (parents t)}))) diff --git a/src/metabase/util.clj b/src/metabase/util.clj index 03e82dfb2981935e1b1f97b71ce1b193745d24c7..6c19c683e2962060e4eff255397126143c512d15 100644 --- a/src/metabase/util.clj +++ b/src/metabase/util.clj @@ -27,12 +27,13 @@ org.joda.time.DateTime org.joda.time.format.DateTimeFormatter)) -;; This is the very first log message that will get printed. -;; It's here because this is one of the very first namespaces that gets loaded, and the first that has access to the logger -;; It shows up a solid 10-15 seconds before the "Starting Metabase in STANDALONE mode" message because so many other namespaces need to get loaded +;; This is the very first log message that will get printed. It's here because this is one of the very first +;; namespaces that gets loaded, and the first that has access to the logger It shows up a solid 10-15 seconds before +;; the "Starting Metabase in STANDALONE mode" message because so many other namespaces need to get loaded (log/info (trs "Loading Metabase...")) -;; Set the default width for pprinting to 200 instead of 72. The default width is too narrow and wastes a lot of space for pprinting huge things like expanded queries +;; Set the default width for pprinting to 200 instead of 72. The default width is too narrow and wastes a lot of space +;; for pprinting huge things like expanded queries (intern 'clojure.pprint '*print-right-margin* 200) (declare pprint-to-str) @@ -368,7 +369,8 @@ ;; TODO - rename to `url?` (defn is-url? - "Is STRING a valid HTTP/HTTPS URL? (This only handles `localhost` and domains like `metabase.com`; URLs containing IP addresses will return `false`.)" + "Is STRING a valid HTTP/HTTPS URL? (This only handles `localhost` and domains like `metabase.com`; URLs containing + IP addresses will return `false`.)" ^Boolean [^String s] (boolean (when (seq s) (when-let [^java.net.URL url (ignore-exceptions (java.net.URL. s))] @@ -445,7 +447,8 @@ (apply f (concat args bound-args)))) (defmacro pdoseq - "(Almost) just like `doseq` but runs in parallel. Doesn't support advanced binding forms like `:let` or `:when` and only supports a single binding </3" + "(Almost) just like `doseq` but runs in parallel. Doesn't support advanced binding forms like `:let` or `:when` and + only supports a single binding </3" {:style/indent 1} [[binding collection] & body] `(dorun (pmap (fn [~binding] @@ -464,7 +467,8 @@ (seq more) (recur (inc i) more)))) (defmacro prog1 - "Execute FIRST-FORM, then any other expressions in BODY, presumably for side-effects; return the result of FIRST-FORM. + "Execute FIRST-FORM, then any other expressions in BODY, presumably for side-effects; return the result of + FIRST-FORM. (def numbers (atom [])) @@ -640,8 +644,8 @@ (defn- check-protocol-impl-method-map - "Check that the methods expected for PROTOCOL are all implemented by METHOD-MAP, and that no extra methods are provided. - Used internally by `strict-extend`." + "Check that the methods expected for PROTOCOL are all implemented by METHOD-MAP, and that no extra methods are + provided. Used internally by `strict-extend`." [protocol method-map] (let [[missing-methods extra-methods] (data/diff (set (keys (:method-map protocol))) (set (keys method-map)))] (when missing-methods @@ -650,10 +654,12 @@ (throw (Exception. (format "Methods implemented that are not in %s: %s " (:var protocol) extra-methods)))))) (defn strict-extend - "A strict version of `extend` that throws an exception if any methods declared in the protocol are missing or any methods not - declared in the protocol are provided. - Since this has better compile-time error-checking, prefer `strict-extend` to regular `extend` in all situations, and to - `extend-protocol`/ `extend-type` going forward." ; TODO - maybe implement strict-extend-protocol and strict-extend-type ? + "A strict version of `extend` that throws an exception if any methods declared in the protocol are missing or any + methods not declared in the protocol are provided. + + Since this has better compile-time error-checking, prefer `strict-extend` to regular `extend` in all situations, and + to `extend-protocol`/ `extend-type` going forward." + ;; TODO - maybe implement strict-extend-protocol and strict-extend-type ? {:style/indent 1} [atype protocol method-map & more] (check-protocol-impl-method-map protocol method-map) @@ -666,11 +672,12 @@ ^String [^String s] (when (seq s) (s/replace - ;; First, "decompose" the characters. e.g. replace 'LATIN CAPITAL LETTER A WITH ACUTE' with 'LATIN CAPITAL LETTER A' + 'COMBINING ACUTE ACCENT' - ;; See http://docs.oracle.com/javase/8/docs/api/java/text/Normalizer.html + ;; First, "decompose" the characters. e.g. replace 'LATIN CAPITAL LETTER A WITH ACUTE' with 'LATIN CAPITAL LETTER + ;; A' + 'COMBINING ACUTE ACCENT' See http://docs.oracle.com/javase/8/docs/api/java/text/Normalizer.html (Normalizer/normalize s Normalizer$Form/NFD) - ;; next, remove the combining diacritical marks -- this SO answer explains what's going on here best: http://stackoverflow.com/a/5697575/1198455 - ;; The closest thing to a relevant JavaDoc I could find was http://docs.oracle.com/javase/7/docs/api/java/lang/Character.UnicodeBlock.html#COMBINING_DIACRITICAL_MARKS + ;; next, remove the combining diacritical marks -- this SO answer explains what's going on here best: + ;; http://stackoverflow.com/a/5697575/1198455 The closest thing to a relevant JavaDoc I could find was + ;; http://docs.oracle.com/javase/7/docs/api/java/lang/Character.UnicodeBlock.html#COMBINING_DIACRITICAL_MARKS #"\p{Block=CombiningDiacriticalMarks}+" ""))) @@ -726,8 +733,9 @@ (defn key-by "Convert a sequential COLL to a map of `(f item)` -> `item`. - This is similar to `group-by`, but the resultant map's values are single items from COLL rather than sequences of items. - (Because only a single item is kept for each value of `f`, items producing duplicate values will be discarded). + This is similar to `group-by`, but the resultant map's values are single items from COLL rather than sequences of + items. (Because only a single item is kept for each value of `f`, items producing duplicate values will be + discarded). (key-by :id [{:id 1, :name :a} {:id 2, :name :b}]) -> {1 {:id 1, :name :a}, 2 {:id 2, :name :b}}" {:style/indent 1} @@ -868,12 +876,9 @@ (->DateTimeFormatter "yyyy-MM-dd HH:mm:ss.SSS")) (def ^:private ordered-date-parsers - "When using clj-time.format/parse without a formatter, it tries all - default formatters, but not ordered by how likely the date - formatters will succeed. This leads to very slow parsing as many - attempts fail before the right one is found. Using this retains that - flexibility but improves performance by trying the most likely ones - first" + "When using clj-time.format/parse without a formatter, it tries all default formatters, but not ordered by how + likely the date formatters will succeed. This leads to very slow parsing as many attempts fail before the right one + is found. Using this retains that flexibility but improves performance by trying the most likely ones first" (let [most-likely-default-formatters [:mysql :date-hour-minute-second :date-time :date :basic-date-time :basic-date-time-no-ms :date-time :date-time-no-ms]] @@ -882,9 +887,8 @@ (vals (apply dissoc time/formatters most-likely-default-formatters))))) (defn str->date-time - "Like clj-time.format/parse but uses an ordered list of parsers to - be faster. Returns the parsed date or nil if it was unable to be - parsed." + "Like clj-time.format/parse but uses an ordered list of parsers to be faster. Returns the parsed date or nil if it + was unable to be parsed." ([^String date-str] (str->date-time date-str nil)) ([^String date-str ^TimeZone tz] diff --git a/test/metabase/api/database_test.clj b/test/metabase/api/database_test.clj index 73a878d6b9f4859d20910b8c6660d665649094a9..83c41777d46a5ce19a20752ce7a00e033f47d9d5 100644 --- a/test/metabase/api/database_test.clj +++ b/test/metabase/api/database_test.clj @@ -305,6 +305,7 @@ :special_type "type/PK" :name "ID" :display_name "ID" + :database_type "BIGINT" :base_type "type/BigInteger" :visibility_type "normal") (assoc (field-details (Field (id :categories :name))) @@ -312,6 +313,7 @@ :special_type "type/Name" :name "NAME" :display_name "Name" + :database_type "VARCHAR" :base_type "type/Text" :visibility_type "normal")] :segments [] diff --git a/test/metabase/api/field_test.clj b/test/metabase/api/field_test.clj index 492da27ed088da8b9c592fe416194fca3697205e..a8587328de8bac8aaf86eb95840bc9c146b5cd61 100644 --- a/test/metabase/api/field_test.clj +++ b/test/metabase/api/field_test.clj @@ -78,6 +78,7 @@ :position 0 :preview_display true :created_at $ + :database_type "VARCHAR" :base_type "type/Text" :fk_target_field_id nil :parent_id nil}) diff --git a/test/metabase/api/table_test.clj b/test/metabase/api/table_test.clj index 594b7ddf16290e202a69462b2de6f2bcb1d0cb80..8f9bfffc0902e8b40d5c20ab404846881c411563 100644 --- a/test/metabase/api/table_test.clj +++ b/test/metabase/api/table_test.clj @@ -129,8 +129,8 @@ (->> ((user->client :rasta) :get 200 "table") (filter #(= (:db_id %) (data/id))) ; prevent stray tables from affecting unit test results (map #(dissoc % - :raw_table_id :db :created_at :updated_at :schema :entity_name :description :entity_type :visibility_type - :caveats :points_of_interest :show_in_getting_started :db_id :active)) + :raw_table_id :db :created_at :updated_at :schema :entity_name :description :entity_type + :visibility_type :caveats :points_of_interest :show_in_getting_started :db_id :active)) set)) @@ -173,18 +173,20 @@ :display_name "Categories" :fields [(assoc (field-details (Field (data/id :categories :id))) :table_id (data/id :categories) - :special_type "type/PK" - :name "ID" - :display_name "ID" - :base_type "type/BigInteger") + :special_type "type/PK" + :name "ID" + :display_name "ID" + :database_type "BIGINT" + :base_type "type/BigInteger") (assoc (field-details (Field (data/id :categories :name))) - :table_id (data/id :categories) - :special_type "type/Name" - :name "NAME" - :display_name "Name" - :base_type "type/Text" - :values data/venue-categories - :dimension_options [] + :table_id (data/id :categories) + :special_type "type/Name" + :name "NAME" + :display_name "Name" + :database_type "VARCHAR" + :base_type "type/Text" + :values data/venue-categories + :dimension_options [] :default_dimension_option nil)] :rows 75 :updated_at $ @@ -195,8 +197,8 @@ (def ^:private user-last-login-date-strs - "In an effort to be really annoying, the date strings returned by the API are different on Circle than they are locally. - Generate strings like '2014-01-01' at runtime so we get matching values." + "In an effort to be really annoying, the date strings returned by the API are different on Circle than they are + locally. Generate strings like '2014-01-01' at runtime so we get matching values." (let [format-inst (fn [^java.util.Date inst] (format "%d-%02d-%02d" (+ (.getYear inst) 1900) @@ -212,7 +214,8 @@ (defs/field-values defs/test-data-map "users" "name")) ;;; GET api/table/:id/query_metadata?include_sensitive_fields -;;; Make sure that getting the User table *does* include info about the password field, but not actual values themselves +;; Make sure that getting the User table *does* include info about the password field, but not actual values +;; themselves (expect (merge (query-metadata-defaults) (match-$ (Table (data/id :users)) @@ -224,32 +227,35 @@ :table_id (data/id :users) :name "ID" :display_name "ID" + :database_type "BIGINT" :base_type "type/BigInteger" :visibility_type "normal") (assoc (field-details (Field (data/id :users :last_login))) - :table_id (data/id :users) - :name "LAST_LOGIN" - :display_name "Last Login" - :base_type "type/DateTime" - :visibility_type "normal" + :table_id (data/id :users) + :name "LAST_LOGIN" + :display_name "Last Login" + :database_type "TIMESTAMP" + :base_type "type/DateTime" + :visibility_type "normal" :dimension_options (var-get #'table-api/datetime-dimension-indexes) - :default_dimension_option (var-get #'table-api/date-default-index) - ) + :default_dimension_option (var-get #'table-api/date-default-index)) (assoc (field-details (Field (data/id :users :name))) - :special_type "type/Name" - :table_id (data/id :users) - :name "NAME" - :display_name "Name" - :base_type "type/Text" - :visibility_type "normal" - :values (map vector (sort user-full-names)) - :dimension_options [] + :special_type "type/Name" + :table_id (data/id :users) + :name "NAME" + :display_name "Name" + :database_type "VARCHAR" + :base_type "type/Text" + :visibility_type "normal" + :values (map vector (sort user-full-names)) + :dimension_options [] :default_dimension_option nil) (assoc (field-details (Field :table_id (data/id :users), :name "PASSWORD")) :special_type "type/Category" :table_id (data/id :users) :name "PASSWORD" :display_name "Password" + :database_type "VARCHAR" :base_type "type/Text" :visibility_type "sensitive")] :rows 15 @@ -268,39 +274,42 @@ :name "USERS" :display_name "Users" :fields [(assoc (field-details (Field (data/id :users :id))) - :table_id (data/id :users) - :special_type "type/PK" - :name "ID" - :display_name "ID" - :base_type "type/BigInteger") + :table_id (data/id :users) + :special_type "type/PK" + :name "ID" + :display_name "ID" + :database_type "BIGINT" + :base_type "type/BigInteger") (assoc (field-details (Field (data/id :users :last_login))) :table_id (data/id :users) :name "LAST_LOGIN" :display_name "Last Login" + :database_type "TIMESTAMP" :base_type "type/DateTime" :dimension_options (var-get #'table-api/datetime-dimension-indexes) :default_dimension_option (var-get #'table-api/date-default-index)) (assoc (field-details (Field (data/id :users :name))) - :table_id (data/id :users) - :special_type "type/Name" - :name "NAME" - :display_name "Name" - :base_type "type/Text" - :values [["Broen Olujimi"] - ["Conchúr Tihomir"] - ["Dwight Gresham"] - ["Felipinho Asklepios"] - ["Frans Hevel"] - ["Kaneonuskatew Eiran"] - ["Kfir Caj"] - ["Nils Gotam"] - ["Plato Yeshua"] - ["Quentin Sören"] - ["Rüstem Hebel"] - ["Shad Ferdynand"] - ["Simcha Yan"] - ["Spiros Teofil"] - ["Szymon Theutrich"]])] + :table_id (data/id :users) + :special_type "type/Name" + :name "NAME" + :display_name "Name" + :database_type "VARCHAR" + :base_type "type/Text" + :values [["Broen Olujimi"] + ["Conchúr Tihomir"] + ["Dwight Gresham"] + ["Felipinho Asklepios"] + ["Frans Hevel"] + ["Kaneonuskatew Eiran"] + ["Kfir Caj"] + ["Nils Gotam"] + ["Plato Yeshua"] + ["Quentin Sören"] + ["Rüstem Hebel"] + ["Shad Ferdynand"] + ["Simcha Yan"] + ["Spiros Teofil"] + ["Szymon Theutrich"]])] :rows 15 :updated_at $ :id (data/id :users) @@ -383,38 +392,40 @@ :relationship "Mt1" :origin (-> (fk-field-details checkins-user-field) (dissoc :target :dimensions :values) - (assoc :table_id (data/id :checkins) - :name "USER_ID" - :display_name "User ID" - :base_type "type/Integer" - :special_type "type/FK" - :table (merge (dissoc (table-defaults) :segments :field_values :metrics) - (match-$ (Table (data/id :checkins)) - {:schema "PUBLIC" - :name "CHECKINS" - :display_name "Checkins" - :rows 1000 - :updated_at $ - :id $ - :raw_table_id $ - :created_at $})))) + (assoc :table_id (data/id :checkins) + :name "USER_ID" + :display_name "User ID" + :database_type "INTEGER" + :base_type "type/Integer" + :special_type "type/FK" + :table (merge (dissoc (table-defaults) :segments :field_values :metrics) + (match-$ (Table (data/id :checkins)) + {:schema "PUBLIC" + :name "CHECKINS" + :display_name "Checkins" + :rows 1000 + :updated_at $ + :id $ + :raw_table_id $ + :created_at $})))) :destination (-> (fk-field-details users-id-field) (dissoc :target :dimensions :values) - (assoc :table_id (data/id :users) - :name "ID" - :display_name "ID" - :base_type "type/BigInteger" - :special_type "type/PK" - :table (merge (dissoc (table-defaults) :db :segments :field_values :metrics) - (match-$ (Table (data/id :users)) - {:schema "PUBLIC" - :name "USERS" - :display_name "Users" - :rows 15 - :updated_at $ - :id $ - :raw_table_id $ - :created_at $}))))}]) + (assoc :table_id (data/id :users) + :name "ID" + :display_name "ID" + :database_type "BIGINT" + :base_type "type/BigInteger" + :special_type "type/PK" + :table (merge (dissoc (table-defaults) :db :segments :field_values :metrics) + (match-$ (Table (data/id :users)) + {:schema "PUBLIC" + :name "USERS" + :display_name "Users" + :rows 15 + :updated_at $ + :id $ + :raw_table_id $ + :created_at $}))))}]) ((user->client :rasta) :get 200 (format "table/%d/fks" (data/id :users)))) ;; Make sure metadata for 'virtual' tables comes back as expected from GET /api/table/:id/query_metadata @@ -462,9 +473,8 @@ dim)))))) (defn- category-id-special-type - "Field values will only be returned when the field's special type is - set to type/Category. This function will change that for - category_id, then invoke `F` and roll it back afterwards" + "Field values will only be returned when the field's special type is set to type/Category. This function will change + that for category_id, then invoke `F` and roll it back afterwards" [special-type f] (let [original-special-type (:special_type (Field (data/id :venues :category_id)))] (try diff --git a/test/metabase/driver/generic_sql_test.clj b/test/metabase/driver/generic_sql_test.clj index 95a9d69a2674e95cbafaa85a8a2f08bf9e0c6eee..301f7b1eef792c1695e0e5ab3e5caf451c074d5e 100644 --- a/test/metabase/driver/generic_sql_test.clj +++ b/test/metabase/driver/generic_sql_test.clj @@ -36,25 +36,25 @@ (expect {:name "VENUES" :schema "PUBLIC" - :fields #{{:name "NAME", - :custom {:column-type "VARCHAR"} - :base-type :type/Text} - {:name "LATITUDE" - :custom {:column-type "DOUBLE"} - :base-type :type/Float} - {:name "LONGITUDE" - :custom {:column-type "DOUBLE"} - :base-type :type/Float} - {:name "PRICE" - :custom {:column-type "INTEGER"} - :base-type :type/Integer} - {:name "CATEGORY_ID" - :custom {:column-type "INTEGER"} - :base-type :type/Integer} - {:name "ID" - :custom {:column-type "BIGINT"} - :base-type :type/BigInteger - :pk? true}}} + :fields #{{:name "NAME", + :database-type "VARCHAR" + :base-type :type/Text} + {:name "LATITUDE" + :database-type "DOUBLE" + :base-type :type/Float} + {:name "LONGITUDE" + :database-type "DOUBLE" + :base-type :type/Float} + {:name "PRICE" + :database-type "INTEGER" + :base-type :type/Integer} + {:name "CATEGORY_ID" + :database-type "INTEGER" + :base-type :type/Integer} + {:name "ID" + :database-type "BIGINT" + :base-type :type/BigInteger + :pk? true}}} (driver/describe-table (H2Driver.) (db) @venues-table)) ;; DESCRIBE-TABLE-FKS diff --git a/test/metabase/driver/mongo_test.clj b/test/metabase/driver/mongo_test.clj index 30608e09d9c40dc46380f95c333588ff102c4911..2365f5950683febe2b8607cf7cdb2de1e25513c6 100644 --- a/test/metabase/driver/mongo_test.clj +++ b/test/metabase/driver/mongo_test.clj @@ -100,19 +100,25 @@ (datasets/expect-with-engine :mongo {:schema nil :name "venues" - :fields #{{:name "name" - :base-type :type/Text} - {:name "latitude" - :base-type :type/Float} - {:name "longitude" - :base-type :type/Float} - {:name "price" - :base-type :type/Integer} - {:name "category_id" - :base-type :type/Integer} - {:name "_id" - :base-type :type/Integer - :pk? true}}} + :fields #{{:name "name" + :database-type "java.lang.String" + :base-type :type/Text} + {:name "latitude" + :database-type "java.lang.Double" + :base-type :type/Float} + {:name "longitude" + :database-type "java.lang.Double" + :base-type :type/Float} + {:name "price" + :database-type "java.lang.Long" + :base-type :type/Integer} + {:name "category_id" + :database-type "java.lang.Long" + :base-type :type/Integer} + {:name "_id" + :database-type "java.lang.Long" + :base-type :type/Integer + :pk? true}}} (driver/describe-table (MongoDriver.) (data/db) (Table (data/id :venues)))) diff --git a/test/metabase/permissions_test.clj b/test/metabase/permissions_test.clj index dfab5d9df3196e4eadaee57cbdfa7f3892654ea8..188429bfe4df13037a09eb20eae6ee0881db9684 100644 --- a/test/metabase/permissions_test.clj +++ b/test/metabase/permissions_test.clj @@ -31,7 +31,7 @@ ;; lucky, member of All Users, Ops -;;; ------------------------------------------------------------ Ops Group ------------------------------------------------------------ +;;; -------------------------------------------------- Ops Group ---------------------------------------------------- ;; ops group is a group with only one member: lucky (def ^:dynamic *ops-group*) @@ -48,7 +48,7 @@ (f))))) -;;; ------------------------------------------------------------ DBs, Tables, & Fields ------------------------------------------------------------ +;;; --------------------------------------------- DBs, Tables, & Fields --------------------------------------------- (def db-details (delay (db/select-one [Database :details :engine] :id (data/id)))) @@ -77,8 +77,15 @@ (doseq [table-name ["venues" "users" "checkins"]] (db/insert! Table :db_id (u/get-id db), :active true, :name table-name)) ;; do the same for Fields - (doseq [field [{:table_id (u/get-id (table db :venues)), :name "price", :base_type :type/Integer, :special_type :type/Category} - {:table_id (u/get-id (table db :users)), :name "last_login", :base_type :type/DateTime}]] + (doseq [field [{:table_id (u/get-id (table db :venues)), + :name "price" + :database_type "INT" + :base_type :type/Integer + :special_type :type/Category} + {:table_id (u/get-id (table db :users)) + :name "last_login" + :database_type "TIMESTAMP" + :base_type :type/DateTime}]] (db/insert! Field field)) ;; ok ! (f db)))) @@ -109,7 +116,7 @@ (f))))) -;;; ------------------------------------------------------------ Cards ------------------------------------------------------------ +;;; ----------------------------------------------------- Cards ----------------------------------------------------- (defn- count-card [db table-name card-name] (let [table (table db table-name)] @@ -174,8 +181,7 @@ *card:db2-sql-count-of-users* db2-sql-count-of-users] (f))))) - -;;; ------------------------------------------------------------ Dashboards ------------------------------------------------------------ +;;; --------------------------------------------------- Dashboards --------------------------------------------------- (def ^:dynamic *dash:db1-all*) (def ^:dynamic *dash:db2-all*) @@ -218,7 +224,7 @@ (f))))) -;;; ------------------------------------------------------------ Pulses ------------------------------------------------------------ +;;; ----------------------------------------------------- Pulses ----------------------------------------------------- (def ^:dynamic *pulse:all*) (def ^:dynamic *pulse:db1-all*) @@ -297,7 +303,7 @@ (f))))) -;;; ------------------------------------------------------------ Metrics ------------------------------------------------------------ +;;; ---------------------------------------------------- Metrics ----------------------------------------------------- (def ^:dynamic *metric:db1-venues-count*) (def ^:dynamic *metric:db2-venues-count*) @@ -326,7 +332,8 @@ *metric:db2-users-count* db2-users-count] (f))))) -;;; ------------------------------------------------------------ Segments ------------------------------------------------------------ + +;;; ---------------------------------------------------- Segments ---------------------------------------------------- (def ^:dynamic *segment:db1-expensive-venues*) (def ^:dynamic *segment:db2-expensive-venues*) @@ -365,8 +372,8 @@ *segment:db2-todays-users* db2-todays-users] (f))))) -;;; ------------------------------------------------------------ with everything! ------------------------------------------------------------ +;;; ------------------------------------------------ with everything! ------------------------------------------------ (defn -do-with-test-data [f] (((comp with-ops-group @@ -389,11 +396,11 @@ ~actual))) -;;; +----------------------------------------------------------------------------------------------------------------------------------------------------------------+ -;;; | QUERY BUILDER | -;;; +----------------------------------------------------------------------------------------------------------------------------------------------------------------+ +;;; +----------------------------------------------------------------------------------------------------------------+ +;;; | QUERY BUILDER | +;;; +----------------------------------------------------------------------------------------------------------------+ -;;; ------------------------------------------------------------ GET /api/database?include_tables=true (Visible DBs + Tables) ------------------------------------------------------------ +;;; -------------------------- GET /api/database?include_tables=true (Visible DBs + Tables) -------------------------- (defn- GET-database [username] (vec (for [db ((test-users/user->client username) :get 200 "database", :include_tables true) @@ -418,7 +425,7 @@ (GET-database :lucky)) -;;; ------------------------------------------------------------ GET /api/table/:id/query_metadata ------------------------------------------------------------ +;;; --------------------------------------- GET /api/table/:id/query_metadata ---------------------------------------- (defn- GET-table-query-metadata [username db table-name] (not= ((test-users/user->client username) :get (format "table/%d/query_metadata" (u/get-id (table db table-name)))) @@ -449,7 +456,7 @@ (expect-with-test-data true (GET-table-query-metadata :lucky *db2* :venues)) -;;; ------------------------------------------------------------ POST /api/dataset (SQL query) ------------------------------------------------------------ +;;; ----------------------------------------- POST /api/dataset (SQL query) ------------------------------------------ (defn- sql-query [username db] (let [results ((test-users/user->client username) :post "dataset" @@ -466,17 +473,18 @@ (expect-with-test-data [[100]] (sql-query :rasta *db1*)) (expect-with-test-data [[100]] (sql-query :lucky *db1*)) -;; Only Admin should be able to ask SQL questions against DB 2. Error message is slightly different for Rasta & Lucky because Rasta has no permissions whatsoever for DB 2 while Lucky has partial perms +;; Only Admin should be able to ask SQL questions against DB 2. Error message is slightly different for Rasta & Lucky +;; because Rasta has no permissions whatsoever for DB 2 while Lucky has partial perms (expect-with-test-data [[100]] (sql-query :crowberto *db2*)) (expect-with-test-data "You don't have permissions to do that." (sql-query :rasta *db2*)) (expect-with-test-data #"You do not have read permissions for /db/\d+/native/\." (sql-query :lucky *db2*)) -;;; +----------------------------------------------------------------------------------------------------------------------------------------------------------------+ -;;; | SAVED QUESTIONS | -;;; +----------------------------------------------------------------------------------------------------------------------------------------------------------------+ +;;; +----------------------------------------------------------------------------------------------------------------+ +;;; | SAVED QUESTIONS | +;;; +----------------------------------------------------------------------------------------------------------------+ -;;; ------------------------------------------------------------ GET /api/card (Visible Cards) ------------------------------------------------------------ +;;; ----------------------------------------- GET /api/card (Visible Cards) ------------------------------------------ (defn- GET-card [username] (vec (for [card ((test-users/user->client username) :get 200 "card") @@ -514,7 +522,7 @@ (GET-card :lucky)) -;;; ------------------------------------------------------------ GET /api/card/:id ------------------------------------------------------------ +;;; ----------------------------------------------- GET /api/card/:id ------------------------------------------------ ;; just return true/false based on whether they were allowed to see the card (defn- GET-card-id [username card] @@ -552,11 +560,11 @@ (expect-with-test-data true (GET-card-id :lucky *card:db2-sql-count-of-users*)) -;;; +----------------------------------------------------------------------------------------------------------------------------------------------------------------+ -;;; | DASHBOARDS | -;;; +----------------------------------------------------------------------------------------------------------------------------------------------------------------+ +;;; +----------------------------------------------------------------------------------------------------------------+ +;;; | DASHBOARDS | +;;; +----------------------------------------------------------------------------------------------------------------+ -;;; ------------------------------------------------------------ GET /api/dashboard (Visible Dashboards) ------------------------------------------------------------ +;;; ------------------------------------ GET /api/dashboard (Visible Dashboards) ------------------------------------- (defn- GET-dashboard [username] (vec (for [dashboard ((test-users/user->client username) :get 200 "dashboard") @@ -584,7 +592,7 @@ (GET-dashboard :lucky)) -;;; ------------------------------------------------------------ GET /api/dashboard/:id ------------------------------------------------------------ +;;; --------------------------------------------- GET /api/dashboard/:id --------------------------------------------- (defn- GET-dashboard-id [username dashboard] (not= ((test-users/user->client username) :get (str "dashboard/" (u/get-id dashboard))) @@ -603,11 +611,11 @@ (expect-with-test-data false (GET-dashboard-id :lucky *dash:db2-public*)) -;;; +----------------------------------------------------------------------------------------------------------------------------------------------------------------+ -;;; | PULSES | -;;; +----------------------------------------------------------------------------------------------------------------------------------------------------------------+ +;;; +----------------------------------------------------------------------------------------------------------------+ +;;; | PULSES | +;;; +----------------------------------------------------------------------------------------------------------------+ -;;; ------------------------------------------------------------ GET /api/pulse ------------------------------------------------------------ +;;; ------------------------------------------------- GET /api/pulse ------------------------------------------------- (defn- GET-pulse [username] (vec (for [pulse ((test-users/user->client username) :get 200 "pulse") @@ -637,7 +645,7 @@ (GET-pulse :lucky)) -;;; ------------------------------------------------------------ GET /api/pulse/:id ------------------------------------------------------------ +;;; ----------------------------------------------- GET /api/pulse/:id ----------------------------------------------- (defn- GET-pulse-id [username pulse] (not= ((test-users/user->client username) :get (str "pulse/" (u/get-id pulse))) @@ -665,11 +673,11 @@ (expect-with-test-data false (GET-pulse-id :lucky *pulse:db2-restricted*)) -;;; +----------------------------------------------------------------------------------------------------------------------------------------------------------------+ -;;; | DATA REFERENCE | -;;; +----------------------------------------------------------------------------------------------------------------------------------------------------------------+ +;;; +----------------------------------------------------------------------------------------------------------------+ +;;; | DATA REFERENCE | +;;; +----------------------------------------------------------------------------------------------------------------+ -;;; ------------------------------------------------------------ GET /api/metric (Visible Metrics) ------------------------------------------------------------ +;;; --------------------------------------- GET /api/metric (Visible Metrics) ---------------------------------------- (defn- GET-metric [username] (vec (for [metric ((test-users/user->client username) :get 200 "metric") @@ -695,7 +703,7 @@ (GET-metric :lucky)) -;;; ------------------------------------------------------------ GET /api/segment (Visible Segments) ------------------------------------------------------------ +;;; -------------------------------------- GET /api/segment (Visible Segments) --------------------------------------- (defn- GET-segment [username] (vec (for [segment ((test-users/user->client username) :get 200 "segment") @@ -721,7 +729,7 @@ (GET-segment :lucky)) -;;; ------------------------------------------------------------ GET /api/database/:id/metadata (Visible Tables) ------------------------------------------------------------ +;;; -------------------------------- GET /api/database/:id/metadata (Visible Tables) --------------------------------- (defn- GET-database-id-metadata [username db] (let [db ((test-users/user->client username) :get (format "database/%d/metadata" (u/get-id db)))] diff --git a/test/metabase/sync/sync_metadata/metabase_metadata_test.clj b/test/metabase/sync/sync_metadata/metabase_metadata_test.clj index 033159c5d35ad86de7cd9b8cc33f05a4dcb85ded..aa8c465f06c3db2faa256043e3348368647fc723 100644 --- a/test/metabase/sync/sync_metadata/metabase_metadata_test.clj +++ b/test/metabase/sync/sync_metadata/metabase_metadata_test.clj @@ -37,9 +37,10 @@ :name "movies" :active true)] (db/insert! Field - :base_type :type/Boolean - :table_id (u/get-id table) - :name "filming") + :database_type "BOOL" + :base_type :type/Boolean + :table_id (u/get-id table) + :name "filming") ;; here we go [(get-table-and-fields-descriptions table) (do diff --git a/test/metabase/sync_database/sync_dynamic_test.clj b/test/metabase/sync_database/sync_dynamic_test.clj index ff8743a5dc6ff7f7532bfacf533f1ef48eed0406..404dcdd3c8e18023c9410a1aae94d8b5a6cf4e83 100644 --- a/test/metabase/sync_database/sync_dynamic_test.clj +++ b/test/metabase/sync_database/sync_dynamic_test.clj @@ -26,7 +26,7 @@ (update :fields (fn [fields] (for [field fields] (u/select-non-nil-keys field [:table_id :name :fk_target_field_id :parent_id :base_type - :special_type]))))))) + :special_type :database_type]))))))) (defn- get-tables [database-or-id] (->> (hydrate (db/select Table, :db_id (u/get-id database-or-id), {:order-by [:id]}) :fields) @@ -40,9 +40,13 @@ (remove-nonsense (get-tables db)))) -;;; ------------------------------------------------------------ Tests for sync-metadata ------------------------------------------------------------ +;;; +----------------------------------------------------------------------------------------------------------------+ +;;; | TESTS FOR SYNC METADATA | +;;; +----------------------------------------------------------------------------------------------------------------+ -;; TODO - At some point these tests should be moved into a `sync-metadata-test` or `sync-metadata.fields-test` namespace + +;; TODO - At some point these tests should be moved into a `sync-metadata-test` or `sync-metadata.fields-test` +;; namespace ;; make sure nested fields get resynced correctly if their parent field didn't change (expect @@ -105,11 +109,12 @@ toucan-field-id (u/get-id (db/select-one-id Field :table_id transactions-table-id, :name "toucan")) details-field-id (u/get-id (db/select-one-id Field :table_id transactions-table-id, :name "details", :parent_id toucan-field-id)) gender-field-id (u/get-id (db/insert! Field - :name "gender" - :base_type "type/Text" - :table_id transactions-table-id - :parent_id details-field-id - :active true))] + :name "gender" + :database_type "VARCHAR" + :base_type "type/Text" + :table_id transactions-table-id + :parent_id details-field-id + :active true))] ;; now sync again. (sync-metadata/sync-db-metadata! db) @@ -127,17 +132,19 @@ toucan-field-id (u/get-id (db/select-one-id Field :table_id transactions-table-id, :name "toucan")) details-field-id (u/get-id (db/select-one-id Field :table_id transactions-table-id, :name "details", :parent_id toucan-field-id)) food-likes-field-id (u/get-id (db/insert! Field - :name "food-likes" - :base_type "type/Dictionary" - :table_id transactions-table-id - :parent_id details-field-id - :active true)) - blueberries-field-id (u/get-id (db/insert! Field - :name "blueberries" - :base_type "type/Boolean" - :table_id transactions-table-id - :parent_id food-likes-field-id - :active true))] + :name "food-likes" + :database_type "OBJECT" + :base_type "type/Dictionary" + :table_id transactions-table-id + :parent_id details-field-id + :active true)) + blueberries-field-id (u/get-id (db/insert! Field + :name "blueberries" + :database_type "BOOLEAN" + :base_type "type/Boolean" + :table_id transactions-table-id + :parent_id food-likes-field-id + :active true))] ;; now sync again. (sync-metadata/sync-db-metadata! db) diff --git a/test/metabase/sync_database_test.clj b/test/metabase/sync_database_test.clj index ee789b90d7b1052e6b991c46c044b21370f8d6ab..19ba81dd3a2c83e22356f96e593baa0f5f2cd9b2 100644 --- a/test/metabase/sync_database_test.clj +++ b/test/metabase/sync_database_test.clj @@ -22,21 +22,26 @@ (def ^:private ^:const sync-test-tables - {"movie" {:name "movie" + {"movie" {:name "movie" :schema "default" - :fields #{{:name "id" - :base-type :type/Integer} - {:name "title" - :base-type :type/Text} - {:name "studio" - :base-type :type/Text}}} - "studio" {:name "studio" + :fields #{{:name "id" + :database-type "SERIAL" + :base-type :type/Integer} + {:name "title" + :database-type "VARCHAR" + :base-type :type/Text} + {:name "studio" + :database-type "VARCHAR" + :base-type :type/Text}}} + "studio" {:name "studio" :schema nil - :fields #{{:name "studio" - :base-type :type/Text - :special-type :type/PK} - {:name "name" - :base-type :type/Text}}}}) + :fields #{{:name "studio" + :database-type "VARCHAR" + :base-type :type/Text + :special-type :type/PK} + {:name "name" + :database-type "VARCHAR" + :base-type :type/Text}}}}) ;; TODO - I'm 90% sure we could just reüse the "MovieDB" instead of having this subset of it used here @@ -126,34 +131,39 @@ :name "movie" :display_name "Movie" :fields [(merge field-defaults - {:special_type :type/PK - :name "id" - :display_name "ID" - :base_type :type/Integer}) + {:special_type :type/PK + :name "id" + :display_name "ID" + :database_type "SERIAL" + :base_type :type/Integer}) (merge field-defaults {:special_type :type/FK :name "studio" :display_name "Studio" + :database_type "VARCHAR" :base_type :type/Text :fk_target_field_id true}) (merge field-defaults - {:special_type nil - :name "title" - :display_name "Title" - :base_type :type/Text})]}) + {:special_type nil + :name "title" + :display_name "Title" + :database_type "VARCHAR" + :base_type :type/Text})]}) (merge table-defaults {:name "studio" :display_name "Studio" :fields [(merge field-defaults - {:special_type :type/Name - :name "name" - :display_name "Name" - :base_type :type/Text}) + {:special_type :type/Name + :name "name" + :display_name "Name" + :database_type "VARCHAR" + :base_type :type/Text}) (merge field-defaults - {:special_type :type/PK - :name "studio" - :display_name "Studio" - :base_type :type/Text})]})] + {:special_type :type/PK + :name "studio" + :display_name "Studio" + :database_type "VARCHAR" + :base_type :type/Text})]})] (tt/with-temp Database [db {:engine :sync-test}] (sync-database! db) ;; we are purposely running the sync twice to test for possible logic issues which only manifest @@ -170,20 +180,23 @@ :name "movie" :display_name "Movie" :fields [(merge field-defaults - {:special_type :type/PK - :name "id" - :display_name "ID" - :base_type :type/Integer}) + {:special_type :type/PK + :name "id" + :display_name "ID" + :database_type "SERIAL" + :base_type :type/Integer}) (merge field-defaults - {:special_type nil - :name "studio" - :display_name "Studio" - :base_type :type/Text}) + {:special_type nil + :name "studio" + :display_name "Studio" + :database_type "VARCHAR" + :base_type :type/Text}) (merge field-defaults - {:special_type nil - :name "title" - :display_name "Title" - :base_type :type/Text})]}) + {:special_type nil + :name "title" + :display_name "Title" + :database_type "VARCHAR" + :base_type :type/Text})]}) (tt/with-temp* [Database [db {:engine :sync-test}] Table [table {:name "movie" :schema "default" @@ -214,7 +227,8 @@ ;; only one sync should be going on at a time (expect - ;; describe-database gets called twice during a single sync process, once for syncing tables and a second time for syncing the _metabase_metadata table + ;; describe-database gets called twice during a single sync process, once for syncing tables and a second time for + ;; syncing the _metabase_metadata table 2 (tt/with-temp* [Database [db {:engine :concurrent-sync-test}]] (reset! calls-to-describe-database 0) @@ -225,7 +239,8 @@ (Thread/sleep 200) ;; Start another in the background. Nothing should happen here because the first is already running (future (sync-database! db)))] - ;; Start another in the foreground. Again, nothing should happen here because the original should still be running + ;; Start another in the foreground. Again, nothing should happen here because the original should still be + ;; running (sync-database! db) ;; make sure both of the futures have finished (deref f1) @@ -234,8 +249,8 @@ @calls-to-describe-database))) -;; Test that we will remove field-values when they aren't appropriate. -;; Calling `sync-database!` below should cause them to get removed since the Field doesn't have an appropriate special type +;; Test that we will remove field-values when they aren't appropriate. Calling `sync-database!` below should cause +;; them to get removed since the Field doesn't have an appropriate special type (expect [[1 2 3] nil] @@ -295,7 +310,8 @@ {:special_type :type/FK, :fk_target_field_id true}] (let [field-id (id :checkins :user_id) get-special-type-and-fk-exists? (fn [] - (into {} (-> (db/select-one [Field :special_type :fk_target_field_id], :id field-id) + (into {} (-> (db/select-one [Field :special_type :fk_target_field_id], + :id field-id) (update :fk_target_field_id #(db/exists? Field :id %)))))] [ ;; FK should exist to start with (get-special-type-and-fk-exists?) @@ -364,7 +380,8 @@ ;; field values should exist... (assert (= (count (db/select-one-field :values FieldValues :field_id field-id)) 100)) - ;; ok, now insert enough rows to push the field past the `low-cardinality-threshold` and sync again, there should be no more field values + ;; ok, now insert enough rows to push the field past the `low-cardinality-threshold` and sync again, + ;; there should be no more field values (exec! [(insert-range-sql (range 100 (+ 100 field-values/low-cardinality-threshold)))]) (sync-database! db) (db/exists? FieldValues :field_id field-id)))))))) diff --git a/test/metabase/test/data/interface.clj b/test/metabase/test/data/interface.clj index 5a3c89f006f6307de200df501208fb9cb16e19f4..7a0a37db8e81a23cb592d34ca3d737e24965843c 100644 --- a/test/metabase/test/data/interface.clj +++ b/test/metabase/test/data/interface.clj @@ -39,19 +39,21 @@ (defn db-qualified-table-name "Return a combined table name qualified with the name of its database, suitable for use as an identifier. - Provided for drivers where testing wackiness makes it hard to actually create separate Databases, such as Oracle, where this is disallowed on RDS. - (Since Oracle can't create seperate DBs, we just create various tables in the same DB; thus their names must be qualified to differentiate them effectively.)" + Provided for drivers where testing wackiness makes it hard to actually create separate Databases, such as Oracle, + where this is disallowed on RDS. (Since Oracle can't create seperate DBs, we just create various tables in the same + DB; thus their names must be qualified to differentiate them effectively.)" ^String [^String database-name, ^String table-name] {:pre [(string? database-name) (string? table-name)]} ;; take up to last 30 characters because databases like Oracle have limits on the lengths of identifiers (apply str (take-last 30 (str/replace (str/lower-case (str database-name \_ table-name)) #"-" "_")))) (defn single-db-qualified-name-components - "Implementation of `qualified-name-components` for drivers like Oracle and Redshift that must use a single existing DB for testing. - This implementation simulates separate databases by doing two things: + "Implementation of `qualified-name-components` for drivers like Oracle and Redshift that must use a single existing + DB for testing. This implementation simulates separate databases by doing two things: 1. Using a \"session schema\" to make sure each test run is isolated from other test runs - 2. Embedding the name of the database into table names, e.g. to differentiate \"test_data_categories\" and \"tupac_sightings_categories\". + 2. Embedding the name of the database into table names, e.g. to differentiate \"test_data_categories\" and + \"tupac_sightings_categories\". To use this implementation, partially bind this function with a SESSION-SCHEMA: @@ -64,8 +66,8 @@ (defprotocol IMetabaseInstance (metabase-instance [this context] "Return the Metabase object associated with this definition, if applicable. CONTEXT should be the parent - object (the actual instance, *not* the definition) of the Metabase object to return (e.g., a pass a `Table` to a `FieldDefintion`). For a `DatabaseDefinition`, - pass the engine keyword.")) + object (the actual instance, *not* the definition) of the Metabase object to return (e.g., a pass a `Table` to a + `FieldDefintion`). For a `DatabaseDefinition`, pass the engine keyword.")) (extend-protocol IMetabaseInstance FieldDefinition @@ -74,7 +76,8 @@ TableDefinition (metabase-instance [this database] - ;; Look first for an exact table-name match; otherwise allow DB-qualified table names for drivers that need them like Oracle + ;; Look first for an exact table-name match; otherwise allow DB-qualified table names for drivers that need them + ;; like Oracle (or (Table :db_id (:id database), :%lower.name (str/lower-case (:table-name this))) (Table :db_id (:id database), :%lower.name (db-qualified-table-name (:name database) (:table-name this))))) @@ -95,10 +98,11 @@ "Return the engine keyword associated with this database, e.g. `:h2` or `:mongo`.") (database->connection-details [this, ^Keyword context, ^DatabaseDefinition database-definition] - "Return the connection details map that should be used to connect to this database (i.e. a Metabase `Database` details map) - CONTEXT is one of: + "Return the connection details map that should be used to connect to this database (i.e. a Metabase `Database` + details map). CONTEXT is one of: - * `:server` - Return details for making the connection in a way that isn't DB-specific (e.g., for creating/destroying databases) + * `:server` - Return details for making the connection in a way that isn't DB-specific (e.g., for + creating/destroying databases) * `:db` - Return details for connecting specifically to the DB.") (create-db! [this, ^DatabaseDefinition database-definition] @@ -112,20 +116,23 @@ (expected-base-type->actual [this base-type] "*OPTIONAL*. Return the base type type that is actually used to store `Fields` of BASE-TYPE. - The default implementation of this method is an identity fn. This is provided so DBs that don't support a given BASE-TYPE used in the test data - can specifiy what type we should expect in the results instead. - For example, Oracle has no `INTEGER` data types, so `:type/Integer` test values are instead stored as `NUMBER`, which we map to `:type/Decimal`.") + The default implementation of this method is an identity fn. This is provided so DBs that don't support a given + BASE-TYPE used in the test data can specifiy what type we should expect in the results instead. For example, + Oracle has no `INTEGER` data types, so `:type/Integer` test values are instead stored as `NUMBER`, which we map + to `:type/Decimal`.") (format-name ^String [this, ^String table-or-field-name] "*OPTIONAL* Transform a lowercase string `Table` or `Field` name in a way appropriate for this dataset (e.g., `h2` would want to upcase these names; `mongo` would want to use `\"_id\"` in place of `\"id\"`.") (has-questionable-timezone-support? ^Boolean [this] - "*OPTIONAL*. Does this driver have \"questionable\" timezone support? (i.e., does it group things by UTC instead of the `US/Pacific` when we're testing?) + "*OPTIONAL*. Does this driver have \"questionable\" timezone support? (i.e., does it group things by UTC instead + of the `US/Pacific` when we're testing?). Defaults to `(not (contains? (metabase.driver/features this) :set-timezone)`") (id-field-type ^clojure.lang.Keyword [this] - "*OPTIONAL* Return the `base_type` of the `id` `Field` (e.g. `:type/Integer` or `:type/BigInteger`). Defaults to `:type/Integer`.")) + "*OPTIONAL* Return the `base_type` of the `id` `Field` (e.g. `:type/Integer` or `:type/BigInteger`). Defaults to + `:type/Integer`.")) (def IDriverTestExtensionsDefaultsMixin "Default implementations for the `IDriverTestExtensions` methods marked *OPTIONAL*." @@ -226,7 +233,8 @@ (str \_ (flatten-field-name fk-dest-name)))))) (defn flatten-dbdef - "Create a flattened version of DBDEF by following resolving all FKs and flattening all rows into the table with TABLE-NAME." + "Create a flattened version of DBDEF by following resolving all FKs and flattening all rows into the table with + TABLE-NAME." [^DatabaseDefinition dbdef, ^String table-name] (create-database-definition (:database-name dbdef) [table-name @@ -247,8 +255,8 @@ default))) (defn- to-system-env-var-str - "Converts the clojure environment variable form (a keyword) to a - stringified version that will be specified at the system level + "Converts the clojure environment variable form (a keyword) to a stringified version that will be specified at the + system level i.e. :foo-bar -> FOO_BAR" [env-var-kwd] @@ -258,7 +266,7 @@ str/upper-case)) (defn db-test-env-var-or-throw - "Same as `db-test-env-var` but will throw an exception if the variable is nil" + "Same as `db-test-env-var` but will throw an exception if the variable is `nil`." ([engine env-var] (db-test-env-var-or-throw engine env-var nil)) ([engine env-var default] diff --git a/test/metabase/test/mock/toucanery.clj b/test/metabase/test/mock/toucanery.clj index 24038e8086bd96f69b940ea6fd82473c016c181b..9af412daa3b2e1ba10e837cc73a2dc562cb949e5 100644 --- a/test/metabase/test/mock/toucanery.clj +++ b/test/metabase/test/mock/toucanery.clj @@ -11,33 +11,45 @@ :schema nil :fields #{{:name "id" :pk? true + :database-type "SERIAL" :base-type :type/Integer} {:name "ts" + :database-type "BIGINT" :base-type :type/BigInteger :special-type :type/UNIXTimestampMilliseconds} {:name "toucan" + :database-type "OBJECT" :base-type :type/Dictionary :nested-fields #{{:name "name" + :database-type "VARCHAR" :base-type :type/Text} {:name "details" + :database-type "OBJECT" :base-type :type/Dictionary - :nested-fields #{{:name "age" - :base-type :type/Integer} - {:name "weight" - :special-type :type/Category - :base-type :type/Decimal}}}}} + :nested-fields #{{:name "age" + :database-type "INT" + :base-type :type/Integer} + {:name "weight" + :database-type "DECIMAL" + :special-type :type/Category + :base-type :type/Decimal}}}}} {:name "buyer" + :database-type "OBJECT" :base-type :type/Dictionary - :nested-fields #{{:name "name" - :base-type :type/Text} - {:name "cc" - :base-type :type/Text}}}}} - "employees" {:name "employees" - :schema nil - :fields #{{:name "id" - :base-type :type/Integer} - {:name "name" - :base-type :type/Text}}}}) + :nested-fields #{{:name "name" + :database-type "VARCHAR" + :base-type :type/Text} + {:name "cc" + :database-type "VARCHAR" + :base-type :type/Text}}}}} + "employees" {:name "employees" + :schema nil + :fields #{{:name "id" + :database-type "SERIAL" + :base-type :type/Integer} + {:name "name" + :database-type "VARCHAR" + :base-type :type/Text}}}}) (defn- describe-database [_ {:keys [exclude-tables]}] @@ -75,67 +87,79 @@ [(merge mock-util/table-defaults {:name "employees" :fields [(merge mock-util/field-defaults - {:name "id" - :display_name "ID" - :base_type :type/Integer - :special_type :type/PK}) + {:name "id" + :display_name "ID" + :database_type "SERIAL" + :base_type :type/Integer + :special_type :type/PK}) (merge mock-util/field-defaults - {:name "name" - :display_name "Name" - :base_type :type/Text - :special_type :type/Name})] + {:name "name" + :display_name "Name" + :database_type "VARCHAR" + :base_type :type/Text + :special_type :type/Name})] :display_name "Employees"}) (merge mock-util/table-defaults {:name "transactions" :fields [(merge mock-util/field-defaults - {:name "age" - :display_name "Age" - :base_type :type/Integer - :parent_id true}) + {:name "age" + :display_name "Age" + :database_type "INT" + :base_type :type/Integer + :parent_id true}) (merge mock-util/field-defaults - {:name "buyer" - :display_name "Buyer" - :base_type :type/Dictionary}) + {:name "buyer" + :display_name "Buyer" + :database_type "OBJECT" + :base_type :type/Dictionary}) (merge mock-util/field-defaults - {:name "cc" - :display_name "Cc" - :base_type :type/Text - :parent_id true}) + {:name "cc" + :display_name "Cc" + :database_type "VARCHAR" + :base_type :type/Text + :parent_id true}) (merge mock-util/field-defaults - {:name "details" - :display_name "Details" - :base_type :type/Dictionary - :parent_id true}) + {:name "details" + :display_name "Details" + :database_type "OBJECT" + :base_type :type/Dictionary + :parent_id true}) (merge mock-util/field-defaults - {:name "id" - :display_name "ID" - :base_type :type/Integer - :special_type :type/PK}) + {:name "id" + :display_name "ID" + :database_type "SERIAL" + :base_type :type/Integer + :special_type :type/PK}) (merge mock-util/field-defaults - {:name "name" - :display_name "Name" - :base_type :type/Text - :parent_id true - :special_type :type/Name}) + {:name "name" + :display_name "Name" + :database_type "VARCHAR" + :base_type :type/Text + :parent_id true + :special_type :type/Name}) (merge mock-util/field-defaults - {:name "name" - :display_name "Name" - :base_type :type/Text - :parent_id true - :special_type :type/Name}) + {:name "name" + :display_name "Name" + :database_type "VARCHAR" + :base_type :type/Text + :parent_id true + :special_type :type/Name}) (merge mock-util/field-defaults - {:name "toucan" - :display_name "Toucan" - :base_type :type/Dictionary}) + {:name "toucan" + :display_name "Toucan" + :database_type "OBJECT" + :base_type :type/Dictionary}) (merge mock-util/field-defaults - {:name "ts" - :display_name "Ts" - :base_type :type/BigInteger - :special_type :type/UNIXTimestampMilliseconds}) + {:name "ts" + :display_name "Ts" + :database_type "BIGINT" + :base_type :type/BigInteger + :special_type :type/UNIXTimestampMilliseconds}) (merge mock-util/field-defaults - {:name "weight" - :display_name "Weight" - :base_type :type/Decimal - :parent_id true - :special_type :type/Category})] + {:name "weight" + :display_name "Weight" + :database_type "DECIMAL" + :base_type :type/Decimal + :parent_id true + :special_type :type/Category})] :display_name "Transactions"})]) diff --git a/test/metabase/test/util.clj b/test/metabase/test/util.clj index 3943407ea0f15416cda01e0d5c4e6f0da20b2f5e..6b9cfb7a12f49ea99848124c1abe56d11886c9dd 100644 --- a/test/metabase/test/util.clj +++ b/test/metabase/test/util.clj @@ -153,10 +153,11 @@ (u/strict-extend (class Field) test/WithTempDefaults - {:with-temp-defaults (fn [_] {:base_type :type/Text - :name (random-name) - :position 1 - :table_id (data/id :checkins)})}) + {:with-temp-defaults (fn [_] {:database_type "VARCHAR" + :base_type :type/Text + :name (random-name) + :position 1 + :table_id (data/id :checkins)})}) (u/strict-extend (class Metric) test/WithTempDefaults