Skip to content
Snippets Groups Projects
Unverified Commit abe23118 authored by Cam Saül's avatar Cam Saül
Browse files

Minor cleanup :shower:

parent b8710a61
No related branches found
No related tags found
No related merge requests found
......@@ -28,8 +28,8 @@
;;; CONFIG
(def app
"The primary entry point to the HTTP server"
(def ^:private app
"The primary entry point to the Ring HTTP server."
(-> routes/routes
mb-middleware/log-api-call
mb-middleware/add-security-headers ; Add HTTP headers to API responses to prevent them from being cached
......@@ -80,7 +80,7 @@
(log/info (u/format-color 'green "Please use the following url to setup your Metabase installation:\n\n%s\n\n"
setup-url))))
(defn destroy
(defn- destroy!
"General application shutdown function which should be called once at application shuddown."
[]
(log/info "Metabase Shutting Down ...")
......@@ -95,7 +95,7 @@
(reset! metabase-initialization-progress 0.1)
;; First of all, lets register a shutdown hook that will tidy things up for us on app exit
(.addShutdownHook (Runtime/getRuntime) (Thread. ^Runnable destroy))
(.addShutdownHook (Runtime/getRuntime) (Thread. ^Runnable destroy!))
(reset! metabase-initialization-progress 0.3)
;; Load up all of our Database drivers, which are used for app db work
......
......@@ -252,9 +252,11 @@
0)))
(defn- url-percentage [url-count total-count]
(if (and total-count (pos? total-count) url-count)
(float (/ url-count total-count))
0.0))
(double (if (and total-count (pos? total-count) url-count)
;; make sure to coerce to Double before dividing because if it's a BigDecimal division can fail for non-terminating floating-point numbers
(/ (double url-count)
(double total-count))
0.0)))
;; TODO - Full table scan!?! Maybe just fetch first N non-nil values and do in Clojure-land instead
(defn slow-field-percent-urls
......
......@@ -39,4 +39,4 @@
;; ### FIELD-DISTINCT-VALUES
(datasets/expect-with-engines metadata-queries-test-engines
[1 2 3 4 5 6 7 8 9 10 11 12 13 14 15]
(field-distinct-values (Field (id :checkins :user_id))))
(map int (field-distinct-values (Field (id :checkins :user_id)))))
......@@ -37,25 +37,25 @@
(expect
{:name "VENUES"
:schema "PUBLIC"
:fields #{{:name "NAME",
:custom {:column-type "VARCHAR"},
:fields #{{:name "NAME",
:custom {:column-type "VARCHAR"}
:base-type :TextField}
{:name "LATITUDE",
:custom {:column-type "DOUBLE"},
{:name "LATITUDE"
:custom {:column-type "DOUBLE"}
:base-type :FloatField}
{:name "LONGITUDE",
:custom {:column-type "DOUBLE"},
{:name "LONGITUDE"
:custom {:column-type "DOUBLE"}
:base-type :FloatField}
{:name "PRICE",
:custom {:column-type "INTEGER"},
{:name "PRICE"
:custom {:column-type "INTEGER"}
:base-type :IntegerField}
{:name "CATEGORY_ID",
:custom {:column-type "INTEGER"},
{:name "CATEGORY_ID"
:custom {:column-type "INTEGER"}
:base-type :IntegerField}
{:name "ID",
:custom {:column-type "BIGINT"},
:base-type :BigIntegerField,
:pk? true}}}
{:name "ID"
:custom {:column-type "BIGINT"}
:base-type :BigIntegerField
:pk? true}}}
(driver/describe-table (H2Driver.) (db) @venues-table))
;; DESCRIBE-TABLE-FKS
......@@ -109,7 +109,11 @@
(for [row (take 5 (sort-by :id (table-rows-seq datasets/*driver*
(db/select-one 'Database :id (id))
(db/select-one 'RawTable :id (db/select-one-field :raw_table_id 'Table, :id (id :venues))))))]
(dissoc row :latitude :longitude))) ; different DBs use different precisions for these
;; different DBs use different precisions for these
(-> (dissoc row :latitude :longitude)
(update :price int)
(update :category_id int)
(update :id int))))
;;; FIELD-PERCENT-URLS
(datasets/expect-with-engines @generic-sql-engines
......
(ns metabase.http-client
"HTTP client for making API calls against the Metabase API. For test/REPL purposes."
(:require [clojure.tools.logging :as log]
(:require [clojure.string :as s]
[clojure.tools.logging :as log]
[cheshire.core :as json]
[clj-http.client :as client]
[metabase.config :as config]
[metabase.util :as u]))
(declare authenticate auto-deserialize-dates build-url -client)
;; ## API CLIENT
;;; build-url
(def ^:dynamic *url-prefix*
"Prefix to automatically prepend to the URL of calls made with `client`."
(str "http://localhost:" (config/config-str :mb-jetty-port) "/api/"))
(defn- build-url [url url-param-kwargs]
{:pre [(string? url)
(or (nil? url-param-kwargs)
(map? url-param-kwargs))]}
(str *url-prefix* url (when-not (empty? url-param-kwargs)
(str "?" (s/join \& (for [[k v] url-param-kwargs]
(str (if (keyword? k) (name k) k)
\=
(if (keyword? v) (name v) v))))))))
;;; parse-response
(def ^:private ^:const auto-deserialize-dates-keys
#{:created_at :updated_at :last_login :date_joined :started_at :finished_at :last_analyzed})
(defn- auto-deserialize-dates
"Automatically recurse over RESPONSE and look for keys that are known to correspond to dates.
Parse their values and convert to `java.sql.Timestamps`."
[response]
(cond (sequential? response) (map auto-deserialize-dates response)
(map? response) (->> response
(map (fn [[k v]]
{k (cond
(contains? auto-deserialize-dates-keys k) (u/->Timestamp v)
(coll? v) (auto-deserialize-dates v)
:else v)}))
(into {}))
:else response))
(defn- parse-response
"Deserialize the JSON response or return as-is if that fails."
[body]
(try
(auto-deserialize-dates (json/parse-string body keyword))
(catch Throwable _
(when-not (s/blank? body)
body))))
;;; authentication
(declare client)
(defn authenticate [{:keys [email password] :as credentials}]
{:pre [(string? email) (string? password)]}
(try
(:id (client :post 200 "session" credentials))
(catch Throwable e
(log/error "Failed to authenticate with email:" email "and password:" password ":" (.getMessage e)))))
;;; client
(defn- build-request-map [credentials http-body]
(cond-> {:accept :json
:headers {"X-METABASE-SESSION" (when credentials
(if (map? credentials)
(authenticate credentials)
credentials))}}
(seq http-body) (assoc
:content-type :json
:body (json/generate-string http-body))))
(defn- check-status-code
"If an EXPECTED-STATUS-CODE was passed to the client, check that the actual status code matches, or throw an exception."
[method-name url body expected-status-code actual-status-code]
(when expected-status-code
(when (not= actual-status-code expected-status-code)
(let [message (format "%s %s expected a status code of %d, got %d." method-name url expected-status-code actual-status-code)
body (try
(json/parse-string body keyword)
(catch Throwable _
body))]
(log/error (u/pprint-to-str 'red body))
(throw (ex-info message {:status-code actual-status-code}))))))
(defn- method->request-fn [method]
(case method
:get client/get
:post client/post
:put client/put
:delete client/delete))
(defn- -client [credentials method expected-status url http-body url-param-kwargs]
;; Since the params for this function can get a little complicated make sure we validate them
{:pre [(or (u/maybe? map? credentials)
(string? credentials))
(contains? #{:get :post :put :delete} method)
(u/maybe? integer? expected-status)
(string? url)
(u/maybe? map? http-body)
(u/maybe? map? url-param-kwargs)]}
(let [request-map (build-request-map credentials http-body)
request-fn (method->request-fn method)
url (build-url url url-param-kwargs)
method-name (s/upper-case (name method))
;; Now perform the HTTP request
{:keys [status body]} (try (request-fn url request-map)
(catch clojure.lang.ExceptionInfo e
(log/debug method-name url)
(:object (ex-data e))))]
(log/debug method-name url status)
(check-status-code method-name url body expected-status status)
(parse-response body)))
(defn client
"Perform an API call and return the response (for test purposes).
The first arg after URL will be passed as a JSON-encoded body if it is a map.
......@@ -40,101 +147,3 @@
[expected-status [url & args]] (u/optional integer? args)
[body [& {:as url-param-kwargs}]] (u/optional map? args)]
(-client credentials method expected-status url body url-param-kwargs)))
;; ## INTERNAL FUNCTIONS
(defn- -client [credentials method expected-status url http-body url-param-kwargs]
;; Since the params for this function can get a little complicated make sure we validate them
{:pre [(or (nil? credentials)
(map? credentials)
(string? credentials))
(contains? #{:get :post :put :delete} method)
(or (nil? expected-status)
(integer? expected-status))
(string? url)
(or (nil? http-body)
(map? http-body))
(or (nil? url-param-kwargs)
(map? url-param-kwargs))]}
(let [request-map (cond-> {:accept :json
:headers {"X-METABASE-SESSION" (when credentials (if (map? credentials) (authenticate credentials)
credentials))}}
(seq http-body) (assoc
:content-type :json
:body (json/generate-string http-body)))
request-fn (case method
:get client/get
:post client/post
:put client/put
:delete client/delete)
url (build-url url url-param-kwargs)
method-name (.toUpperCase ^String (name method))
;; Now perform the HTTP request
{:keys [status body]} (try (request-fn url request-map)
(catch clojure.lang.ExceptionInfo e
(log/debug method-name url)
(:object (ex-data e))))]
;; -check the status code if EXPECTED-STATUS was passed
(log/debug method-name url status)
(when expected-status
(when-not (= status expected-status)
(let [message (format "%s %s expected a status code of %d, got %d." method-name url expected-status status)
body (try (-> (json/parse-string body)
clojure.walk/keywordize-keys)
(catch Throwable _ body))]
(log/error (u/pprint-to-str 'red body))
(throw (ex-info message {:status-code status})))))
;; Deserialize the JSON response or return as-is if that fails
(try (-> body
json/parse-string
clojure.walk/keywordize-keys
auto-deserialize-dates)
(catch Throwable _
(if (clojure.string/blank? body) nil
body)))))
(defn authenticate [{:keys [email password] :as credentials}]
{:pre [(string? email) (string? password)]}
(try
(:id (client :post 200 "session" credentials))
(catch Throwable e
(log/error "Failed to authenticate with email:" email "and password:" password ":" (.getMessage e)))))
(defn- build-url [url url-param-kwargs]
{:pre [(string? url)
(or (nil? url-param-kwargs)
(map? url-param-kwargs))]}
(str *url-prefix* url (when-not (empty? url-param-kwargs)
(str "?" (->> url-param-kwargs
(map (fn [[k v]]
[(if (keyword? k) (name k) k)
(if (keyword? v) (name v) v)]))
(map (partial interpose "="))
(map (partial apply str))
(interpose "&")
(apply str))))))
;; ## AUTO-DESERIALIZATION
(def ^:private ^:const auto-deserialize-dates-keys
#{:created_at :updated_at :last_login :date_joined :started_at :finished_at :last_analyzed})
(defn- auto-deserialize-dates
"Automatically recurse over RESPONSE and look for keys that are known to correspond to dates.
Parse their values and convert to `java.sql.Timestamps`."
[response]
(cond (sequential? response) (map auto-deserialize-dates response)
(map? response) (->> response
(map (fn [[k v]]
{k (cond
(contains? auto-deserialize-dates-keys k) (u/->Timestamp v)
(coll? v) (auto-deserialize-dates v)
:else v)}))
(into {}))
:else response))
......@@ -12,7 +12,7 @@
[metabase.query-processor :as qp]
(metabase.query-processor [expand :as ql]
[parameters :refer :all])
[metabase.query-processor-test :refer [non-timeseries-engines first-row]]
[metabase.query-processor-test :refer [non-timeseries-engines first-row format-rows-by]]
[metabase.test.data :as data]
(metabase.test.data [datasets :as datasets]
[users :refer :all])
......@@ -139,38 +139,44 @@
;; check that date ranges work correctly
(datasets/expect-with-engines params-test-engines
[29]
(first-row (qp/process-query {:database (data/id)
:type :query
:query (data/query checkins
(ql/aggregation (ql/count)))
:parameters [{:hash "abc123"
:name "foo"
:type "date"
:target ["dimension" ["field-id" (data/id :checkins :date)]]
:value "2015-04-01~2015-05-01"}]})))
(first-row
(format-rows-by [int]
(qp/process-query {:database (data/id)
:type :query
:query (data/query checkins
(ql/aggregation (ql/count)))
:parameters [{:hash "abc123"
:name "foo"
:type "date"
:target ["dimension" ["field-id" (data/id :checkins :date)]]
:value "2015-04-01~2015-05-01"}]}))))
;; check that IDs work correctly (passed in as numbers)
(datasets/expect-with-engines params-test-engines
[1]
(first-row (qp/process-query {:database (data/id)
:type :query
:query (data/query checkins
(ql/aggregation (ql/count)))
:parameters [{:hash "abc123"
:name "foo"
:type "number"
:target ["dimension" ["field-id" (data/id :checkins :id)]]
:value 100}]})))
(first-row
(format-rows-by [int]
(qp/process-query {:database (data/id)
:type :query
:query (data/query checkins
(ql/aggregation (ql/count)))
:parameters [{:hash "abc123"
:name "foo"
:type "number"
:target ["dimension" ["field-id" (data/id :checkins :id)]]
:value 100}]}))))
;; check that IDs work correctly (passed in as strings, as the frontend is wont to do; should get converted)
(datasets/expect-with-engines params-test-engines
[1]
(first-row (qp/process-query {:database (data/id)
:type :query
:query (data/query checkins
(ql/aggregation (ql/count)))
:parameters [{:hash "abc123"
:name "foo"
:type "number"
:target ["dimension" ["field-id" (data/id :checkins :id)]]
:value "100"}]})))
(first-row
(format-rows-by [int]
(qp/process-query {:database (data/id)
:type :query
:query (data/query checkins
(ql/aggregation (ql/count)))
:parameters [{:hash "abc123"
:name "foo"
:type "number"
:target ["dimension" ["field-id" (data/id :checkins :id)]]
:value "100"}]}))))
......@@ -7,10 +7,12 @@
[metabase.models.table :as table]
[metabase.query-processor :as qp]
[metabase.query-processor.sql-parameters :refer :all]
[metabase.query-processor-test :refer [engines-that-support first-row]]
[metabase.query-processor-test :refer [engines-that-support first-row format-rows-by]]
[metabase.test.data :as data]
[metabase.test.data.datasets :as datasets]
[metabase.test.util :as tu]))
[metabase.test.data.generic-sql :as generic-sql]
[metabase.test.util :as tu]
[metabase.test.data.generic-sql :as generic]))
;;; ------------------------------------------------------------ simple substitution -- {{x}} ------------------------------------------------------------
......@@ -327,8 +329,14 @@
;;; ------------------------------------------------------------ "REAL" END-TO-END-TESTS ------------------------------------------------------------
(defn- quote-name [identifier]
(generic-sql/quote-name datasets/*driver* identifier))
(defn- checkins-identifier []
(name (table/qualified-identifier (db/select-one ['Table :name :schema], :id (data/id :checkins)))))
(let [{table-name :name, schema :schema} (db/select-one ['Table :name :schema], :id (data/id :checkins))]
(str (when (seq schema)
(str (quote-name schema) \.))
(quote-name table-name))))
;; as with the MBQL parameters tests redshift and crate fail for unknown reasons; disable their tests for now
(def ^:private ^:const sql-parameters-engines
......@@ -337,20 +345,22 @@
(datasets/expect-with-engines sql-parameters-engines
[29]
(first-row
(qp/process-query
{:database (data/id)
:type :native
:native {:query (format "SELECT COUNT(*) FROM %s WHERE {{checkin_date}};" (checkins-identifier))
:template_tags {:checkin_date {:name "checkin_date", :display_name "Checkin Date", :type "dimension", :dimension ["field-id" (data/id :checkins :date)]}}}
:parameters [{:type "date/range", :target ["dimension" ["template-tag" "checkin_date"]], :value "2015-04-01~2015-05-01"}]})))
(format-rows-by [int]
(qp/process-query
{:database (data/id)
:type :native
:native {:query (format "SELECT COUNT(*) FROM %s WHERE {{checkin_date}}" (checkins-identifier))
:template_tags {:checkin_date {:name "checkin_date", :display_name "Checkin Date", :type "dimension", :dimension ["field-id" (data/id :checkins :date)]}}}
:parameters [{:type "date/range", :target ["dimension" ["template-tag" "checkin_date"]], :value "2015-04-01~2015-05-01"}]}))))
;; no parameter -- should give us a query with "WHERE 1 = 1"
(datasets/expect-with-engines sql-parameters-engines
[1000]
(first-row
(qp/process-query
{:database (data/id)
:type :native
:native {:query (format "SELECT COUNT(*) FROM %s WHERE {{checkin_date}};" (checkins-identifier))
:template_tags {:checkin_date {:name "checkin_date", :display_name "Checkin Date", :type "dimension", :dimension ["field-id" (data/id :checkins :date)]}}}
:parameters []})))
(format-rows-by [int]
(qp/process-query
{:database (data/id)
:type :native
:native {:query (format "SELECT COUNT(*) FROM %s WHERE {{checkin_date}}" (checkins-identifier))
:template_tags {:checkin_date {:name "checkin_date", :display_name "Checkin Date", :type "dimension", :dimension ["field-id" (data/id :checkins :date)]}}}
:parameters []}))))
......@@ -480,9 +480,11 @@
(defn- ->bool [x] ; SQLite returns 0/1 for false/true;
(condp = x ; Redshift returns nil/true.
0 false ; convert to false/true and restore sanity.
0M false
1 true
1M true
nil false
x))
x))
;;; filter = true
(expect-with-non-timeseries-dbs
......@@ -525,13 +527,13 @@
;;; FILTER -- "BETWEEN" with dates
(qp-expect-with-all-engines
{:rows [[29]]
:columns ["count"]
:cols [(aggregate-col :count)]
:native_form true}
{:rows [[29]]
:columns ["count"]
:cols [(aggregate-col :count)]
:native_form true}
(->> (run-query checkins
(ql/aggregation (ql/count))
(ql/filter (ql/between $date "2015-04-01" "2015-05-01")))
(ql/aggregation (ql/count))
(ql/filter (ql/between $date "2015-04-01" "2015-05-01")))
booleanize-native-form
(format-rows-by [int])))
......@@ -557,17 +559,20 @@
;;; FILTER - `is-null` & `not-null` on datetime columns
(expect-with-non-timeseries-dbs
[1000]
(first-row (run-query checkins
(ql/aggregation (ql/count))
(ql/filter (ql/not-null $date)))))
(first-row
(format-rows-by [int]
(run-query checkins
(ql/aggregation (ql/count))
(ql/filter (ql/not-null $date))))))
(expect-with-non-timeseries-dbs
;; Some DBs like Mongo don't return any results at all in this case, and there's no easy workaround
true
(let [result (first-row (run-query checkins
(ql/aggregation (ql/count))
(ql/filter (ql/is-null $date))))]
;; Some DBs like Mongo don't return any results at all in this case, and there's no easy workaround
(or (= result [0])
(= result [0M])
(nil? result))))
......@@ -1397,14 +1402,18 @@
;;; ------------------------------------------------------------ BUCKETING ------------------------------------------------------------
(defn- ->int-if-number [x]
(if (number? x)
(int x)
x))
(defn- sad-toucan-incidents-with-bucketing [unit]
(->> (with-db (get-or-create-database! defs/sad-toucan-incidents)
(run-query incidents
(ql/aggregation (ql/count))
(ql/breakout (ql/datetime-field $timestamp unit))
(ql/limit 10)))
rows (format-rows-by [(fn [x] (if (number? x) (int x) x))
int])))
rows (format-rows-by [->int-if-number int])))
(expect-with-non-timeseries-dbs
(cond
......@@ -1759,109 +1768,149 @@
;; TODO - maybe it makes sense to have a separate namespace to test the Query eXpander so we don't need to run all these extra queries?
;;; =
(expect-with-non-timeseries-dbs [99] (first-row (run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/= $id 1))))))
(expect-with-non-timeseries-dbs [99] (first-row
(format-rows-by [int]
(run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/= $id 1)))))))
;;; !=
(expect-with-non-timeseries-dbs [1] (first-row (run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/!= $id 1))))))
(expect-with-non-timeseries-dbs [1] (first-row
(format-rows-by [int]
(run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/!= $id 1)))))))
;;; <
(expect-with-non-timeseries-dbs [61] (first-row (run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/< $id 40))))))
(expect-with-non-timeseries-dbs [61] (first-row
(format-rows-by [int]
(run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/< $id 40)))))))
;;; >
(expect-with-non-timeseries-dbs [40] (first-row (run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/> $id 40))))))
(expect-with-non-timeseries-dbs [40] (first-row
(format-rows-by [int]
(run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/> $id 40)))))))
;;; <=
(expect-with-non-timeseries-dbs [60] (first-row (run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/<= $id 40))))))
(expect-with-non-timeseries-dbs [60] (first-row
(format-rows-by [int]
(run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/<= $id 40)))))))
;;; >=
(expect-with-non-timeseries-dbs [39] (first-row (run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/>= $id 40))))))
(expect-with-non-timeseries-dbs [39] (first-row
(format-rows-by [int]
(run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/>= $id 40)))))))
;;; is-null
(expect-with-non-timeseries-dbs [100] (first-row (run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/is-null $id))))))
(expect-with-non-timeseries-dbs [100] (first-row
(format-rows-by [int]
(run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/is-null $id)))))))
;;; between
(expect-with-non-timeseries-dbs [89] (first-row (run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/between $id 30 40))))))
(expect-with-non-timeseries-dbs [89] (first-row
(format-rows-by [int]
(run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/between $id 30 40)))))))
;;; inside
(expect-with-non-timeseries-dbs [39] (first-row (run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/inside $latitude $longitude 40 -120 30 -110))))))
(expect-with-non-timeseries-dbs [39] (first-row
(format-rows-by [int]
(run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/inside $latitude $longitude 40 -120 30 -110)))))))
;;; starts-with
(expect-with-non-timeseries-dbs [80] (first-row (run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/starts-with $name "T"))))))
(expect-with-non-timeseries-dbs [80] (first-row
(format-rows-by [int]
(run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/starts-with $name "T")))))))
;;; contains
(expect-with-non-timeseries-dbs [97] (first-row (run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/contains $name "BBQ"))))))
(expect-with-non-timeseries-dbs [97] (first-row
(format-rows-by [int]
(run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/contains $name "BBQ")))))))
;;; does-not-contain
;; This should literally be the exact same query as the one above by the time it leaves the Query eXpander, so this is more of a QX test than anything else
(expect-with-non-timeseries-dbs [97] (first-row (run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/does-not-contain $name "BBQ")))))
(expect-with-non-timeseries-dbs [97] (first-row
(format-rows-by [int]
(run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/does-not-contain $name "BBQ"))))))
;;; ends-with
(expect-with-non-timeseries-dbs [87] (first-row (run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/ends-with $name "a"))))))
(expect-with-non-timeseries-dbs [87] (first-row
(format-rows-by [int]
(run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/ends-with $name "a")))))))
;;; and
(expect-with-non-timeseries-dbs [98] (first-row (run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/and (ql/> $id 32)
(ql/contains $name "BBQ")))))))
(expect-with-non-timeseries-dbs [98] (first-row
(format-rows-by [int]
(run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/and (ql/> $id 32)
(ql/contains $name "BBQ"))))))))
;;; or
(expect-with-non-timeseries-dbs [31] (first-row (run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/or (ql/> $id 32)
(ql/contains $name "BBQ")))))))
(expect-with-non-timeseries-dbs [31] (first-row
(format-rows-by [int]
(run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/or (ql/> $id 32)
(ql/contains $name "BBQ"))))))))
;;; nested and/or
(expect-with-non-timeseries-dbs [96] (first-row (run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/or (ql/and (ql/> $id 32)
(ql/< $id 35))
(ql/contains $name "BBQ")))))))
(expect-with-non-timeseries-dbs [96] (first-row
(format-rows-by [int]
(run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/or (ql/and (ql/> $id 32)
(ql/< $id 35))
(ql/contains $name "BBQ"))))))))
;;; nested not
(expect-with-non-timeseries-dbs [3] (first-row (run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/not (ql/contains $name "BBQ")))))))
(expect-with-non-timeseries-dbs [3] (first-row
(format-rows-by [int]
(run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/not (ql/not (ql/contains $name "BBQ"))))))))
;;; not nested inside and/or
(expect-with-non-timeseries-dbs [1] (first-row (run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/and (ql/not (ql/> $id 32))
(ql/contains $name "BBQ"))))))
(expect-with-non-timeseries-dbs [1] (first-row
(format-rows-by [int]
(run-query venues
(ql/aggregation (ql/count))
(ql/filter (ql/and (ql/not (ql/> $id 32))
(ql/contains $name "BBQ")))))))
;;; +----------------------------------------------------------------------------------------------------------------------+
;;; | MIN & MAX |
;;; +----------------------------------------------------------------------------------------------------------------------+
(expect-with-non-timeseries-dbs [1] (first-row (run-query venues
(ql/aggregation (ql/min $price)))))
(expect-with-non-timeseries-dbs [1] (first-row
(format-rows-by [int]
(run-query venues
(ql/aggregation (ql/min $price))))))
(expect-with-non-timeseries-dbs [4] (first-row (run-query venues
(ql/aggregation (ql/max $price)))))
(expect-with-non-timeseries-dbs [4] (first-row
(format-rows-by [int]
(run-query venues
(ql/aggregation (ql/max $price))))))
(expect-with-non-timeseries-dbs
[[1 34.0071] [2 33.7701] [3 10.0646] [4 33.983]]
......@@ -1986,7 +2035,8 @@
["Peter Pelican" 5]
["Ronald Raven" 1]]
(dataset avian-singles
(rows (run-query messages
(ql/aggregation (ql/count))
(ql/breakout $sender_id->users.name)
(ql/filter (ql/= $reciever_id->users.name "Rasta Toucan"))))))
(format-rows-by [str int]
(rows (run-query messages
(ql/aggregation (ql/count))
(ql/breakout $sender_id->users.name)
(ql/filter (ql/= $reciever_id->users.name "Rasta Toucan")))))))
......@@ -13,7 +13,6 @@
(def ^:const all-valid-engines (set (keys (driver/available-drivers))))
;; # Logic for determining which datasets to test against
;; By default, we'll test against against only the :h2 (H2) dataset; otherwise, you can specify which
......
......@@ -140,8 +140,8 @@
(str \" nm \"))
(defn- quote+combine-names [driver names]
(apply str (interpose \. (for [n names]
(name (hx/qualify-and-escape-dots (quote-name driver n)))))))
(s/join \. (for [n names]
(name (hx/qualify-and-escape-dots (quote-name driver n))))))
(defn- default-qualify+quote-name
([driver db-name]
......@@ -307,7 +307,7 @@
;; Add the SQL for creating each Table
(doseq [tabledef table-definitions]
(swap! statements conj (drop-table-if-exists-sql driver dbdef tabledef)
(create-table-sql driver dbdef tabledef)))
(create-table-sql driver dbdef tabledef)))
;; Add the SQL for adding FK constraints
(doseq [{:keys [field-definitions], :as tabledef} table-definitions]
......@@ -316,7 +316,7 @@
(swap! statements conj (add-fk-sql driver dbdef tabledef fielddef)))))
;; exec the combined statement
(execute-sql! driver :db dbdef (apply str (interpose ";\n" (map hx/unescape-dots @statements)))))
(execute-sql! driver :db dbdef (s/join ";\n" (map hx/unescape-dots @statements))))
;; Now load the data for each Table
(doseq [tabledef table-definitions]
......
......@@ -8,7 +8,8 @@
[driver :as driver])
(metabase.models [database :refer [Database]]
[field :refer [Field] :as field]
[table :refer [Table]]))
[table :refer [Table]])
[metabase.util :as u])
(:import clojure.lang.Keyword))
(defrecord FieldDefinition [^String field-name
......@@ -62,27 +63,28 @@
(defprotocol IDatasetLoader
"Methods for creating, deleting, and populating *pyhsical* DBMS databases, tables, and fields.
Methods marked *OPTIONAL* have default implementations in `IDatasetLoaderDefaultsMixin`."
(engine [this]
(engine ^clojure.lang.Keyword [this]
"Return the engine keyword associated with this database, e.g. `:h2` or `:mongo`.")
(database->connection-details [this ^Keyword context, ^DatabaseDefinition database-definition]
(database->connection-details [this, ^Keyword context, ^DatabaseDefinition database-definition]
"Return the connection details map that should be used to connect to this database (i.e. a Metabase `Database` details map)
CONTEXT is one of:
* `:server` - Return details for making the connection in a way that isn't DB-specific (e.g., for creating/destroying databases)
* `:db` - Return details for connecting specifically to the DB.")
(create-db! [this ^DatabaseDefinition database-definition]
(create-db! [this, ^DatabaseDefinition database-definition]
"Create a new database from DATABASE-DEFINITION, including adding tables, fields, and foreign key constraints,
and add the appropriate data. This method should drop existing databases with the same name if applicable.
(This refers to creating the actual *DBMS* database itself, *not* a Metabase `Database` object.)")
(destroy-db! [this ^DatabaseDefinition database-definition]
(destroy-db! [this, ^DatabaseDefinition database-definition]
"Destroy database, if any, associated with DATABASE-DEFINITION.
This refers to destroying a *DBMS* database -- removing an H2 file, dropping a Postgres database, etc.
This does not need to remove corresponding Metabase definitions -- this is handled by `DatasetLoader`.")
(default-schema [this]
;; TODO - this would be more useful if DATABASE-DEFINITION was a parameter
(default-schema ^String [this]
"*OPTIONAL* Return the default schema name that tables for this DB should be expected to have.")
(expected-base-type->actual [this base-type]
......@@ -91,22 +93,21 @@
can specifiy what type we should expect in the results instead.
For example, Oracle has `INTEGER` data types, so `:IntegerField` test values are instead stored as `NUMBER`, which we map to `:DecimalField`.")
(format-name [this table-or-field-name]
(format-name ^String [this, ^String table-or-field-name]
"*OPTIONAL* Transform a lowercase string `Table` or `Field` name in a way appropriate for this dataset
(e.g., `h2` would want to upcase these names; `mongo` would want to use `\"_id\"` in place of `\"id\"`.")
(has-questionable-timezone-support? [this]
(has-questionable-timezone-support? ^Boolean [this]
"*OPTIONAL*. Does this driver have \"questionable\" timezone support? (i.e., does it group things by UTC instead of the `US/Pacific` when we're testing?)
Defaults to `(not (contains? (metabase.driver/features this) :set-timezone)`")
(id-field-type [this]
(id-field-type ^clojure.lang.Keyword [this]
"*OPTIONAL* Return the `base_type` of the `id` `Field` (e.g. `:IntegerField` or `:BigIntegerField`). Defaults to `:IntegerField`."))
(def IDatasetLoaderDefaultsMixin
{:expected-base-type->actual (fn [_ base-type] base-type)
{:expected-base-type->actual (u/drop-first-arg identity)
:default-schema (constantly nil)
:format-name (fn [_ table-or-field-name]
table-or-field-name)
:format-name (u/drop-first-arg identity)
:has-questionable-timezone-support? (fn [driver]
(not (contains? (driver/features driver) :set-timezone)))
:id-field-type (constantly :IntegerField)})
......@@ -209,8 +210,8 @@
field-name
(let [[_ fk-table fk-dest-name] field-name]
(-> fk-table
(clojure.string/replace #"ies$" "y")
(clojure.string/replace #"s$" "")
(s/replace #"ies$" "y")
(s/replace #"s$" "")
(str \_ (flatten-field-name fk-dest-name))))))
(defn flatten-dbdef
......
......@@ -7,8 +7,7 @@
[expectations :refer :all]
(metabase [core :as core]
[db :as db]
[driver :as driver]
[util :as u])
[driver :as driver])
(metabase.models [setting :as setting]
[table :refer [Table]])
[metabase.test.data :as data]
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment