diff --git a/src/metabase/driver/mongo/query_processor.clj b/src/metabase/driver/mongo/query_processor.clj index 7a76f7e48901088c29eb42201ae93403f34af20c..b626fbb10c0c38c316155cdb524c6110cb9d7f6f 100644 --- a/src/metabase/driver/mongo/query_processor.clj +++ b/src/metabase/driver/mongo/query_processor.clj @@ -21,17 +21,17 @@ [metabase.util :as u]) (:import (com.mongodb CommandResult DB) - (clojure.lang PersistentArrayMap) - (org.bson.types ObjectId) + clojure.lang.PersistentArrayMap + java.util.Calendar + org.bson.types.ObjectId (metabase.driver.query_processor.interface DateTimeField DateTimeValue Field OrderByAggregateField + RelativeDateTimeValue Value))) -(declare apply-clause - eval-raw-command - process-structured +(declare process-and-run-native process-and-run-structured) @@ -39,26 +39,25 @@ (def ^:dynamic ^:private *query* nil) +(defn- log-monger-form [form] + (when-not qp/*disable-qp-logging* + (log/debug (u/format-color 'green "\nMONGO AGGREGATION PIPELINE:\n%s\n" + (->> form + (walk/postwalk #(if (symbol? %) (symbol (name %)) %)) ; strip namespace qualifiers from Monger form + u/pprint-to-str) "\n")))) + (defn process-and-run "Process and run a MongoDB QUERY." [{query-type :type, :as query}] - (binding [*query* query] - (case (keyword query-type) - :query (let [generated-query (process-structured (:query query))] - (when-not qp/*disable-qp-logging* - (log/debug (u/format-color 'green "\nMONGER FORM:\n%s\n" - (->> generated-query - (walk/postwalk #(if (symbol? %) (symbol (name %)) %)) ; strip namespace qualifiers from Monger form - u/pprint-to-str) "\n"))) ; so it's easier to read - (eval generated-query)) - :native (let [results (eval-raw-command (:query (:native query)))] - (if (sequential? results) results - [results]))))) + {:pre [query-type]} + (case (keyword query-type) + :query (process-and-run-structured query) + :native (process-and-run-native query))) ;; # NATIVE QUERY PROCESSOR -(defn eval-raw-command +(defn- eval-raw-command "Evaluate raw MongoDB javascript code. This must be ran insided the body of a `with-mongo-connection`. (with-mongo-connection [_ \"mongodb://localhost/test\"] @@ -72,29 +71,21 @@ (let [{result "retval"} (PersistentArrayMap/create (.toMap result))] result))) +(defn- process-and-run-native [query] + (let [results (eval-raw-command (:query (:native query)))] + (if (sequential? results) results + [results]))) -;; # STRUCTURED QUERY PROCESSOR - -;; ## AGGREGATION IMPLEMENTATIONS -(def ^:dynamic *collection-name* - "String name of the collection (i.e., `Table`) that we're currently querying against." - nil) -(def ^:dynamic *constraints* - "Monger clauses generated from query dict `filter` clauses; bound dynamically so we can insert these as appropriate for various types of aggregations." - nil) +;;; # STRUCTURED QUERY PROCESSOR -(defn aggregate - "Generate a Monger `aggregate` form." - [& forms] - `(mc/aggregate ^DB *mongo-connection* ~*collection-name* [~@(when *constraints* - [{$match *constraints*}]) - ~@(filter identity forms)])) +;;; ## FORMATTING -;; Return qualified string name of FIELD, e.g. `venue` or `venue.address`. -(defmulti field->name (fn - (^String [this] (class this)) - (^String [this separator] (class this)))) +(defmulti field->name + "Return qualified string name of FIELD, e.g. `venue` or `venue.address`." + (fn + (^String [this] (class this)) + (^String [this separator] (class this)))) (defmethod field->name Field ([this] @@ -111,7 +102,6 @@ :avg "avg" :count "count" :distinct "count" - :stddev "stddev" :sum "sum")))) (defmethod field->name DateTimeField @@ -120,198 +110,7 @@ ([this separator] (field->name (:field this) separator))) -(defn- field->$str - "Given a FIELD, return a `$`-qualified field name for use in a Mongo aggregate query, e.g. `\"$user_id\"`." - [field] - (format "$%s" (field->name field))) - -(defn- aggregation:rows [] - `(doall (with-collection ^DB *mongo-connection* ~*collection-name* - ~@(when *constraints* [`(find ~*constraints*)]) - ~@(mapcat apply-clause (dissoc (:query *query*) :filter))))) - -(defn- aggregation:count - ([] - `[{:count (mc/count ^DB *mongo-connection* ~*collection-name* - ~*constraints*)}]) - ([field] - `[{:count (mc/count ^DB *mongo-connection* ~*collection-name* - (merge ~*constraints* - {~(field->name field) {$exists true}}))}])) - -(defn- aggregation:avg [field] - (aggregate {$group {"_id" nil - "avg" {$avg (field->$str field)}}} - {$project {"_id" false, "avg" true}})) - -(defn- aggregation:distinct [field] - ;; Unfortunately trying to do a MongoDB distinct aggregation runs out of memory if there are more than a few thousand values - ;; because Monger currently doesn't expose any way to enable allowDiskUse in aggregations - ;; (see https://groups.google.com/forum/#!searchin/clojure-mongodb/$2BallowDiskUse/clojure-mongodb/3qT34rZSFwQ/tYCxj5coo8gJ). - ;; - ;; We also can't effectively limit the number of values considered in the aggregation meaning simple things like determining categories - ;; in sync (which only needs to know if distinct count is < 40, meaning it can theoretically stop as soon as it sees the 40th value) - ;; will still barf on large columns. - ;; - ;; It's faster and better-behaved to just implement this logic in Clojure-land for the time being. - ;; Since it's lazy we can handle large data sets (I've ran this successfully over 500,000+ document collections w/o issue). - [{:count (let [values (transient (set [])) - limit (:limit (:query *query*)) - keep-taking? (if limit (fn [_] - (< (count values) limit)) - (constantly true)) - field-id (or (:field-id field) ; Field - (:field-id (:field field)))] ; DateTimeField - (->> (i/field-values-lazy-seq @(ns-resolve 'metabase.driver.mongo 'driver) (sel :one field/Field :id field-id)) ; resolve driver at runtime to avoid circular deps - (filter identity) - (map hash) - (map #(conj! values %)) - (take-while keep-taking?) - dorun) - (count values))}]) - -(defn- aggregation:sum [field] - (aggregate {$group {"_id" nil ; TODO - I don't think this works for _id - "sum" {$sum (field->$str field)}}} - {$project {"_id" false, "sum" true}})) - -(defn- match-aggregation [{:keys [aggregation-type field]}] - (if-not field - ;; aggregations with no Field - (case aggregation-type - :rows (aggregation:rows) - :count (aggregation:count)) - ;; aggregations with a field - ((case aggregation-type - :avg aggregation:avg - :count aggregation:count - :distinct aggregation:distinct - :sum aggregation:sum) ; TODO -- stddev isn't implemented for mongo - field))) - - -;; ## BREAKOUT -;; This is similar to the aggregation stuff but has to be implemented separately since Mongo doesn't really have -;; GROUP BY functionality the same way SQL does. -;; This is annoying, since it effectively duplicates logic we have in the aggregation definitions above and the -;; clause definitions below, but the query we need to generate is different enough that I haven't found a cleaner -;; way of doing this yet. -(defn- breakout-aggregation->field-name+expression - "Match AGGREGATION clause of a structured query that contains a `breakout` clause, and return - a pair containing `[field-name aggregation-expression]`, which are used to generate the Mongo aggregate query." - [{:keys [aggregation-type field]}] - ;; AFAIK these are the only aggregation types that make sense in combination with a breakout clause or are we missing something? - ;; At any rate these seem to be the most common use cases, so we can add more here if and when they're needed. - (if-not field - (case aggregation-type - :rows nil - :count ["count" {$sum 1}]) - (case aggregation-type - :avg ["avg" {$avg (field->$str field)}] - :sum ["sum" {$sum (field->$str field)}]))) - -;;; BREAKOUT FIELD NAME ESCAPING FOR $GROUP -;; We're not allowed to use field names that contain a period in the Mongo aggregation $group stage. -;; Not OK: -;; {"$group" {"source.username" {"$first" {"$source.username"}, "_id" "$source.username"}}, ...} -;; -;; For *nested* Fields, we'll replace the '.' with '___', and restore the original names afterward. -;; Escaped: -;; {"$group" {"source___username" {"$first" {"$source.username"}, "_id" "$source.username"}}, ...} - -(defn ag-unescape-nested-field-names - "Restore the original, unescaped nested Field names in the keys of RESULTS. - E.g. `:source___service` becomes `:source.service`" - [results] - ;; Build a map of escaped key -> unescaped key by looking at the keys in the first result - ;; e.g. {:source___username :source.username} - (let [replacements (into {} (for [k (keys (first results))] - (let [k-str (name k) - unescaped (s/replace k-str #"___" ".")] - (when-not (= k-str unescaped) - {k (keyword unescaped)}))))] - ;; If the map is non-empty then map set/rename-keys over the results with it - (if-not (seq replacements) - results - (for [row results] - (set/rename-keys row replacements))))) - -(defn- do-breakout - "Generate a Monger query from a structured QUERY dictionary that contains a `breakout` clause. - Since the Monger query we generate looks very different from ones we generate when no `breakout` clause - is present, this is essentialy a separate implementation :/" - [{aggregation :aggregation, breakout-fields :breakout, order-by :order-by, limit :limit, :as query}] - (let [;; Shadow the top-level definition of field->name with one that will use "___" as the separator instead of "." - field->escaped-name (u/rpartial field->name "___") - [ag-field ag-clause] (breakout-aggregation->field-name+expression aggregation) - fields (map field->escaped-name breakout-fields) - $fields (map field->$str breakout-fields) - fields->$fields (zipmap fields $fields)] - `(ag-unescape-nested-field-names - ~(aggregate {$group (merge {"_id" (if (= (count fields) 1) (first $fields) - fields->$fields)} - (when (and ag-field ag-clause) - {ag-field ag-clause}) - (into {} (for [[field $field] fields->$fields] - (when-not (= field "_id") - {field {$first $field}}))))} - {$sort (->> order-by - (mapcat (fn [{:keys [field direction]}] - [(field->escaped-name field) (case direction - :ascending 1 - :descending -1)])) - (apply sorted-map))} - {$project (merge {"_id" false} - (when ag-field - {ag-field true}) - (zipmap fields (repeat true)))} - (when limit - {$limit limit}))))) - -;; ## PROCESS-STRUCTURED - -(defn process-structured - "Process a structured MongoDB QUERY. - This establishes some bindings, then: - - * queries that contain `breakout` clauses are handled by `do-breakout` - * other queries are handled by `match-aggregation`, which hands off to the - appropriate fn defined by a `defaggregation`." - [{:keys [source-table aggregation breakout] :as query}] - (binding [*collection-name* (:name source-table) - *constraints* (when-let [filter-clause (:filter query)] - (apply-clause [:filter filter-clause]))] - (if (seq breakout) (do-breakout query) - (match-aggregation aggregation)))) - - -;; ## CLAUSE APPLICATION 2.0 - -(def ^:private clauses - "Used by `defclause` to store the clause definitions generated by it." - (atom '())) - -(defmacro ^:private defclause - "Generate a new clause definition that will be called inside of a `match` statement - whenever CLAUSE matches MATCH-BINDING. - - In general, these should emit a vector of forms to be included in the generated Monger query; - however, `filter` is handled a little differently (see below)." - [clause match-binding & body] - `(swap! clauses concat '[[~clause ~match-binding] (try - ~@body - (catch Throwable e# - (log/error (color/red ~(format "Failed to process '%s' clause:" (name clause)) - (.getMessage e#)))))])) - -;; ### CLAUSE DEFINITIONS - -;; ### fields -(defclause :fields fields - `[(fields ~(mapv field->name fields))]) - - -;; ### filter +;;; ## FILTERING (defmulti format-value class) @@ -324,6 +123,22 @@ (defmethod format-value DateTimeValue [{^java.sql.Timestamp value :value}] (java.util.Date. (.getTime value))) +;; TODO - this doesn't work 100% +;; in filters we're not applying bucketing for things like "=" +(defmethod format-value RelativeDateTimeValue [{:keys [amount unit field], :as r}] + (let [cal (Calendar/getInstance) + [unit multiplier] (case (or unit :day) + :minute [Calendar/MINUTE 1] + :hour [Calendar/HOUR 1] + :day [Calendar/DATE 1] + :week [Calendar/DATE 7] + :month [Calendar/MONTH 1] + :quarter [Calendar/MONTH 3] + :year [Calendar/YEAR 1])] + (.set cal unit (+ (.get cal unit) + (* amount multiplier))) + (.getTime cal))) + (defn- parse-filter-subclause [{:keys [filter-type field value] :as filter}] (let [field (when field (field->name field)) value (when value (format-value value))] @@ -339,7 +154,9 @@ :contains {field (re-pattern value)} :starts-with {field (re-pattern (str \^ value))} :ends-with {field (re-pattern (str value \$))} - := {field value} + := {:$eq [{:$dateToString {:format "%Y-%m-%d" + :date "$timestamp"}} + value]} :!= {field {$ne value}} :< {field {$lt value}} :> {field {$gt value}} @@ -353,43 +170,221 @@ :else (parse-filter-subclause clause))) -(defclause :filter filter-clause - (parse-filter-clause filter-clause)) - - -;; ### limit - -(defclause :limit value - `[(limit ~value)]) - -;; ### order_by -(defclause :order-by subclauses - (let [sort-options (mapcat (fn [{:keys [field direction]}] - [(field->name field) (case direction - :ascending 1 - :descending -1)]) - subclauses)] - (when (seq sort-options) - `[(sort (array-map ~@sort-options))]))) - -;; ### page -(defclause :page page-clause - (let [{page-num :page items-per-page :items} page-clause - num-to-skip (* (dec page-num) items-per-page)] - `[(skip ~num-to-skip) - (limit ~items-per-page)])) - +;;; ## CLAUSE APPLICATION + +(defmulti field->$ class) + +(defmethod field->$ Field [this] + (str \$ (field->name this))) + +(defn- current-timezone-offset + "MongoDB doesn't really do timezone support, so we will *totally* fake it. + When dealing with a `DateTimeField`, we'll add the current timezone offset to its value + and apply the appropriate suffix to the strings we generate + so grouping happens for the current timezone, instead of UTC. + TODO - we should upgrade this to handle arbitrary timezones like via the Query Dict like the SQL DBs." + [] + (let [ms (.getOffset (java.util.TimeZone/getDefault) (System/currentTimeMillis)) + seconds (/ ms 1000) + minutes (/ seconds 60) + hours (/ minutes 60) + minutes (mod minutes 60)] + {:ms ms + :str (format "%s%02d:%02d" + (if (< hours 0) "-" "+") + (Math/abs ^Integer hours) + minutes)})) + +(defmethod field->$ DateTimeField [{unit :unit, {:keys [special-type], :as ^Field field} :field}] + (let [tz-offset (current-timezone-offset) + $field (field->$ field) + $field {$add [(:ms tz-offset) + (cond + (= special-type :timestamp_milliseconds) + {$add [(java.util.Date. 0) $field]} + + (= special-type :timestamp_seconds) + {$add [(java.util.Date. 0) {$multiply [$field 1000]}]} + + :else $field)]} + date->string (fn [format-str] + {:___date {:$dateToString {:format (str format-str (:str tz-offset)) + :date $field}}})] + (case unit + :default $field + :minute (date->string "%Y-%m-%dT%H:%M:00") + :minute-of-hour {$minute $field} + :hour (date->string "%Y-%m-%dT%H:00:00") + :hour-of-day {$hour $field} + :day (date->string "%Y-%m-%dT00:00:00") + :day-of-week {$dayOfWeek $field} + :day-of-month {$dayOfMonth $field} + :day-of-year {$dayOfYear $field} + :week nil + :week-of-year {$week $field} + :month (date->string "%Y-%m") + :month-of-year {$month $field} + :quarter nil + :quarter-of-year nil #_{$divide [{$add [{$month $field} 2]} + 3]} + :year {$year $field}))) + +(defn- handle-order-by [{:keys [order-by]} pipeline] + (if-not (seq order-by) + pipeline + (conj pipeline + {$sort (into (array-map) (for [{:keys [field direction]} order-by] + {(field->name field) (case direction + :ascending 1 + :descending -1)}))}))) + +(defn- handle-filter [{filter-clause :filter} pipeline] + (if-not filter-clause + pipeline + (conj pipeline + {$match (parse-filter-clause filter-clause)}))) + +(defn- handle-fields [{:keys [fields]} pipeline] + (if-not (seq fields) + pipeline + (conj pipeline + {$project (into (array-map) (for [field fields] + {(field->name field) (field->$ field)}))}))) + +(defn- handle-limit [{:keys [limit]} pipeline] + (if-not limit + pipeline + (conj pipeline {$limit limit}))) + +(def ^:private ^:const ag-type->field-name + {:avg "avg" + :count "count" + :distinct "count" + :sum "sum"}) + +(defn- ag-type->group-by-clause [{:keys [aggregation-type field]}] + (if-not field + (case aggregation-type + :count {$sum 1}) + (case aggregation-type + :avg {$avg (field->$ field)} + :count {$sum {$cond {:if (field->$ field) + :then 1 + :else 0}}} + :distinct {$addToSet (field->$ field)} + :sum {$sum (field->$ field)}))) + +(defn- handle-breakout+aggregation [{breakout-fields :breakout, {ag-type :aggregation-type, :as aggregation} :aggregation} pipeline] + (if (or (not ag-type) + (= ag-type :rows)) + pipeline + (let [ag-field-name (ag-type->field-name ag-type)] + (vec (concat pipeline + (filter identity + [(when (seq breakout-fields) + {$project {"_id" "$_id" + "___group" (into {} (for [field breakout-fields] ; create a totally sweet made-up column called __group + {(field->name field "___") (field->$ field)}))}}) ; to store the fields we'd like to group by + {$group {"_id" (when (seq breakout-fields) + "$___group") + ag-field-name (ag-type->group-by-clause aggregation)}} + {$sort {"_id" 1}} + {$project (merge {"_id" false + ag-field-name (if (= ag-type :distinct) + {$size "$count"} ; HACK + true)} + (into {} (for [field breakout-fields] + {(field->name field "___") (format "$_id.%s" (field->name field "___"))})))}])))))) + +(defn- handle-page [{{page-num :page items-per-page :items, :as page-clause} :page} pipeline] + (if-not page-clause + pipeline + (conj pipeline + {$skip (* items-per-page (dec page-num))} + {$limit items-per-page}))) + + +;;; # process + run + +(defn- process-structured [query] + (->> [] + (handle-filter query) + (handle-breakout+aggregation query) + (handle-order-by query) + (handle-fields query) + (handle-limit query) + (handle-page query))) -;; ### APPLY-CLAUSE +;;; BREAKOUT FIELD NAME ESCAPING FOR $GROUP +;; We're not allowed to use field names that contain a period in the Mongo aggregation $group stage. +;; Not OK: +;; {"$group" {"source.username" {"$first" {"$source.username"}, "_id" "$source.username"}}, ...} +;; +;; For *nested* Fields, we'll replace the '.' with '___', and restore the original names afterward. +;; Escaped: +;; {"$group" {"source___username" {"$first" {"$source.username"}, "_id" "$source.username"}}, ...} +(defn- unescape-nested-field-names + "Restore the original, unescaped nested Field names in the keys of RESULTS. + E.g. `:source___service` becomes `:source.service`" + [results] + ;; Build a map of escaped key -> unescaped key by looking at the keys in the first result + ;; e.g. {:source___username :source.username} + (let [replacements (into {} (for [k (keys (first results))] + (let [k-str (name k) + unescaped (s/replace k-str #"___" ".")] + (when-not (= k-str unescaped) + {k (keyword unescaped)}))))] + ;; If the map is non-empty then map set/rename-keys over the results with it + (if-not (seq replacements) + results + (for [row results] + (set/rename-keys row replacements))))) -(defmacro match-clause - "Generate a `match` form against all the clauses defined by `defclause`." - [clause] - `(match ~clause - ~@@clauses - ~'_ nil)) -(defn apply-clause - "Match CLAUSE against a clause defined by `defclause`." - [clause] - (match-clause clause)) +(defn- unstringify-dates + "Convert string dates, which we wrap in dictionaries like `{:___date <str>}`, back to `Timestamps`. + This can't be done within the Mongo aggregation framework itself." + [results] + (for [row results] + (into {} (for [[k v] row] + {k (if (and (map? v) + (:___date v)) + (u/parse-iso8601 (:___date v)) + v)})))) + +(defn- process-and-run-structured [{database :database, {{source-table-name :name} :source-table} :query, :as query}] + {:pre [(map? database) + (string? source-table-name)]} + (binding [*query* query] + (let [generated-query (process-structured (:query query))] + (log-monger-form generated-query) + (println "source-table-name generated-query ->" source-table-name generated-query) + (println "database ->" database) + (->> (with-mongo-connection [_ database] + (mc/aggregate *mongo-connection* source-table-name generated-query + :allow-disk-use true)) + ;; ((fn [results] + ;; (println "RESULTS->" (u/pprint-to-str 'cyan results)) + ;; results)) + unescape-nested-field-names + unstringify-dates)))) + +(defn a [] + (metabase.driver/process-query {:type "query" + :database 3 + :query {:aggregation ["rows"] + :source_table 23 + :filter ["CONTAINS" 400 "BBQ"] + :order_by [[412 "ascending"]]}})) + +(defn x [] + (metabase.test.data.datasets/with-dataset :mongo + (require 'metabase.driver.query-processor-test) + (println @(resolve 'metabase.driver.query-processor-test/checkins:1-per-day)) + (metabase.test.data/with-temp-db [_ @(resolve 'metabase.driver.query-processor-test/checkins:1-per-day)] + (driver/process-query + {:database (metabase.test.data/db-id) + :type :query + :query {:source_table (metabase.test.data/id :checkins) + :aggregation ["count"] + :filter ["TIME_INTERVAL" (metabase.test.data/id :checkins :timestamp) "current" "day"]}})))) diff --git a/test/metabase/driver/query_processor_test.clj b/test/metabase/driver/query_processor_test.clj index d676c6f5f454f7c18717728b5c52c94293dada68..049b79b6e81e8ec2fd20b776f123ad9da8077170 100644 --- a/test/metabase/driver/query_processor_test.clj +++ b/test/metabase/driver/query_processor_test.clj @@ -1091,7 +1091,7 @@ limit 10 return rows))) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} +(datasets/expect-with-all-datasets [[#inst "2015-06-01T10:31" 1] [#inst "2015-06-01T16:06" 1] [#inst "2015-06-01T17:23" 1] @@ -1104,7 +1104,7 @@ [#inst "2015-06-02T11:11" 1]] (sad-toucan-incidents-with-bucketing :default)) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} +(datasets/expect-with-all-datasets [[#inst "2015-06-01T10:31" 1] [#inst "2015-06-01T16:06" 1] [#inst "2015-06-01T17:23" 1] @@ -1117,7 +1117,7 @@ [#inst "2015-06-02T11:11" 1]] (sad-toucan-incidents-with-bucketing :minute)) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} +(datasets/expect-with-all-datasets [[0 5] [1 4] [2 2] @@ -1130,7 +1130,7 @@ [9 1]] (sad-toucan-incidents-with-bucketing :minute-of-hour)) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} +(datasets/expect-with-all-datasets [[#inst "2015-06-01T10" 1] [#inst "2015-06-01T16" 1] [#inst "2015-06-01T17" 1] @@ -1143,7 +1143,7 @@ [#inst "2015-06-02T13" 1]] (sad-toucan-incidents-with-bucketing :hour)) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} +(datasets/expect-with-all-datasets [[0 8] [1 9] [2 7] @@ -1156,7 +1156,7 @@ [9 7]] (sad-toucan-incidents-with-bucketing :hour-of-day)) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} +(datasets/expect-with-all-datasets [[#inst "2015-06-01T07" 8] [#inst "2015-06-02T07" 9] [#inst "2015-06-03T07" 9] @@ -1179,7 +1179,7 @@ [7 22]] (sad-toucan-incidents-with-bucketing :day-of-week)) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} +(datasets/expect-with-all-datasets [[1 8] [2 9] [3 9] @@ -1192,7 +1192,7 @@ [10 10]] (sad-toucan-incidents-with-bucketing :day-of-month)) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} +(datasets/expect-with-all-datasets [[152 8] [153 9] [154 9] @@ -1205,7 +1205,7 @@ [161 10]] (sad-toucan-incidents-with-bucketing :day-of-year)) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} +(datasets/expect-with-all-datasets [[#inst "2015-05-31T07" 49] [#inst "2015-06-07T07" 47] [#inst "2015-06-14T07" 39] @@ -1213,7 +1213,7 @@ [#inst "2015-06-28T07" 7]] (sad-toucan-incidents-with-bucketing :week)) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} +(datasets/expect-with-all-datasets [[23 49] [24 47] [25 39] @@ -1221,11 +1221,11 @@ [27 7]] (sad-toucan-incidents-with-bucketing :week-of-year)) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} +(datasets/expect-with-all-datasets [[#inst "2015-06-01T07" 200]] (sad-toucan-incidents-with-bucketing :month)) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} +(datasets/expect-with-all-datasets [[6 200]] (sad-toucan-incidents-with-bucketing :month-of-year)) @@ -1237,7 +1237,7 @@ [[2 200]] (sad-toucan-incidents-with-bucketing :quarter-of-year)) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} +(datasets/expect-with-all-datasets [[2015 200]] (sad-toucan-incidents-with-bucketing :year)) @@ -1260,22 +1260,22 @@ filter = ["datetime_field" (id :checkins :timestamp) "as" (name field-grouping)] (apply vector "relative_datetime" relative-datetime-args) return first-row first))) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} 4 (count-of-grouping checkins:4-per-minute :minute "current")) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} 4 (count-of-grouping checkins:4-per-minute :minute -1 "minute")) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} 4 (count-of-grouping checkins:4-per-minute :minute 1 "minute")) +(datasets/expect-with-all-datasets 4 (count-of-grouping checkins:4-per-minute :minute "current")) +(datasets/expect-with-all-datasets 4 (count-of-grouping checkins:4-per-minute :minute -1 "minute")) +(datasets/expect-with-all-datasets 4 (count-of-grouping checkins:4-per-minute :minute 1 "minute")) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} 4 (count-of-grouping checkins:4-per-hour :hour "current")) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} 4 (count-of-grouping checkins:4-per-hour :hour -1 "hour")) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} 4 (count-of-grouping checkins:4-per-hour :hour 1 "hour")) +(datasets/expect-with-all-datasets 4 (count-of-grouping checkins:4-per-hour :hour "current")) +(datasets/expect-with-all-datasets 4 (count-of-grouping checkins:4-per-hour :hour -1 "hour")) +(datasets/expect-with-all-datasets 4 (count-of-grouping checkins:4-per-hour :hour 1 "hour")) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} 1 (count-of-grouping checkins:1-per-day :day "current")) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} 1 (count-of-grouping checkins:1-per-day :day -1 "day")) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} 1 (count-of-grouping checkins:1-per-day :day 1 "day")) +(datasets/expect-with-all-datasets 1 (count-of-grouping checkins:1-per-day :day "current")) +(datasets/expect-with-all-datasets 1 (count-of-grouping checkins:1-per-day :day -1 "day")) +(datasets/expect-with-all-datasets 1 (count-of-grouping checkins:1-per-day :day 1 "day")) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} 7 (count-of-grouping checkins:1-per-day :week "current")) +(datasets/expect-with-all-datasets 7 (count-of-grouping checkins:1-per-day :week "current")) ;; SYNTACTIC SUGAR -(datasets/expect-with-datasets #{:h2 :postgres :mysql} +(datasets/expect-with-all-datasets 1 (with-temp-db [_ checkins:1-per-day] (-> (driver/process-query @@ -1286,7 +1286,7 @@ :filter ["TIME_INTERVAL" (id :checkins :timestamp) "current" "day"]}}) :data :rows first first))) -(datasets/expect-with-datasets #{:h2 :postgres :mysql} +(datasets/expect-with-all-datasets 7 (with-temp-db [_ checkins:1-per-day] (-> (driver/process-query