Skip to content
Snippets Groups Projects
Commit 66b1f5e4 authored by Cam Saül's avatar Cam Saül
Browse files

Remove Q macro; return innermost schema validation errors :scream_cat:

parent 2dca5608
Branches
Tags
No related merge requests found
......@@ -12,6 +12,7 @@
[metabase.driver :as driver]
[metabase.driver.generic-sql :as sql]
[metabase.driver.query-processor :as qp]
metabase.driver.query-processor.interface
[metabase.util :as u])
(:import java.sql.Timestamp
java.util.Date
......
......@@ -6,6 +6,7 @@
[clojure.walk :as walk]
[korma.core :as k]
[medley.core :as m]
schema.utils
[swiss.arrows :refer [<<-]]
[metabase.db :refer :all]
[metabase.driver :as driver]
......@@ -15,7 +16,8 @@
[resolve :as resolve])
(metabase.models [field :refer [Field], :as field]
[foreign-key :refer [ForeignKey]])
[metabase.util :as u]))
[metabase.util :as u])
(:import (schema.utils NamedError ValidationError)))
;; # CONSTANTS
......@@ -53,20 +55,45 @@
(or (not ag-type)
(= ag-type :rows))))
(defn- fail [query, ^Throwable e, & [additional-info]]
(merge {:status :failed
:class (class e)
:error (or (.getMessage e) (str e))
:stacktrace (u/filtered-stacktrace e)
:query (dissoc query :database :driver)
:expanded-query (try (dissoc (resolve/resolve (expand/expand query)) :database :driver)
(catch Throwable e
{:error (or (.getMessage e) (str e))
:stacktrace (u/filtered-stacktrace e) }))}
(when-let [data (ex-data e)]
{:ex-data data})
additional-info))
(defn- explain-schema-validation-error
"Return a nice error message to explain the schema validation error."
[error]
(println "ERROR:" error)
(cond
(instance? NamedError error) (let [nested-error (.error ^NamedError error)] ; recurse until we find the innermost nested named error, which is the reason we actually failed
(if (instance? NamedError nested-error)
(recur nested-error)
(or (when (map? nested-error)
(when-let [nested-error (first (filter (partial instance? NamedError)
(vals nested-error)))]
(explain-schema-validation-error nested-error)))
(.name ^NamedError error))))
(instance? ValidationError error) (schema.utils/validation-error-explain error)))
(defn- wrap-catch-exceptions [qp]
(fn [query]
(try (qp query)
(catch clojure.lang.ExceptionInfo e
(fail query e (when-let [data (ex-data e)]
(when (= (:type data) :schema.core/error)
(when-let [error (explain-schema-validation-error (:error data))]
{:error error})))))
(catch Throwable e
{:status :failed
:class (class e)
:error (or (.getMessage e) (str e))
:stacktrace (u/filtered-stacktrace e)
:query (dissoc query :database :driver)
:expanded-query (try (dissoc (resolve/resolve (expand/expand query)) :database :driver)
(catch Throwable e
{:error (or (.getMessage e) (str e))
:stacktrace (u/filtered-stacktrace e) }))}))))
(fail query e)))))
(defn- pre-expand [qp]
......
......@@ -59,10 +59,6 @@
metabase.driver.query_processor.interface.Field
(if-let [parent (:parent this)]
;; Nested Mongo fields come back inside of their parent when you specify them in the fields clause
;; e.g. (Q fields venue...name) will return rows like {:venue {:name "Kyle's Low-Carb Grill"}}
;; Until we fix this the right way we'll just include the parent Field in the :query-fields list so the pattern
;; matching works correctly.
[this parent]
[this])
......
......@@ -20,15 +20,6 @@
StringFilter
ValuePlaceholder)))
;; *driver* is always bound when running queries the normal way, e.g. via `metabase.driver/process-query`
;; It is not neccesarily bound when using various functions like `fk->` in the REPL.
;; The check is not performed in those cases to allow flexibility when composing queries for tests or interactive development
(defn- assert-driver-supports
"When `*driver*` is bound, assert that is supports keyword FEATURE."
[feature]
(when *driver*
(when-not (contains? (driver/features *driver*) feature)
(throw (Exception. (str (name feature) " is not supported by this driver."))))))
;;; # ------------------------------------------------------------ Token dispatch ------------------------------------------------------------
......@@ -50,6 +41,8 @@
:and :or :inside :between := :!= :< :> :<= :>= :starts-with :contains :ends-with :time-interval
;; fields
:fk-> :datetime-field
;; order-by subclauses
:asc :desc
;; values
:relative-datetime})
......@@ -105,7 +98,7 @@
(fk-> 100 200) ; refer to Field 200, which is part of another Table; join to the other table via our foreign key 100"
[fk-field-id :- s/Int, dest-field-id :- s/Int]
(assert-driver-supports :foreign-keys)
(i/assert-driver-supports :foreign-keys)
(i/map->FieldPlaceholder {:fk-field-id fk-field-id, :field-id dest-field-id}))
......@@ -143,7 +136,7 @@
(defn stddev
"Aggregation clause. Return the standard deviation of values of F."
[f]
(assert-driver-supports :standard-deviation-aggregations)
(i/assert-driver-supports :standard-deviation-aggregations)
(ag-with-field :stddev f))
(s/defn ^:always-validate count :- i/CountAggregation
......@@ -284,10 +277,25 @@
(vector? subclause) (let [[f direction] subclause]
{:field (field f), :direction (normalize-token direction)})))
(s/defn ^:always-validate asc :- i/OrderBy
"order-by subclause. Specify that results should be returned in ascending order for Field or AgRef F.
(order-by {} (asc 100))"
[f :- i/FieldPlaceholderOrAgRef]
{:field (field f), :direction :ascending})
(s/defn ^:always-validate desc :- i/OrderBy
"order-by subclause. Specify that results should be returned in ascending order for Field or AgRef F.
(order-by {} (desc 100))"
[f :- i/FieldPlaceholderOrAgRef]
{:field (field f), :direction :descending})
(defn order-by
"Specify how ordering should be done for this query.
(order-by {} [20 :ascending]) ; order by field 20
(order-by {} (asc 20)) ; order by field 20
(order-by {} [20 :ascending]) ; order by field 20 (legacy syntax)
(order-by {} [(aggregate-field 0) :descending]) ; order by the aggregate field (e.g. :count)"
[query & subclauses]
(assoc query :order-by (mapv maybe-parse-order-by-subclause subclauses)))
......@@ -342,10 +350,9 @@
[outer-query]
(update outer-query :query expand-inner))
(defn validate-query
(def ^{:arglists '([query])} validate-query
"Check that a given query is valid, returning it as-is if so."
[query]
(s/validate i/Query query))
(s/validator i/Query))
(defmacro query
"Build a query by threading an (initially empty) map through each form in BODY with `->`.
......@@ -356,11 +363,20 @@
~@body
validate-query))
(s/defn ^:always-validate run-query* [query :- i/Query]
(s/defn ^:always-validate run-query*
"Call `driver/process-query` on expanded inner QUERY, looking up the `Database` ID for the `source-table.`
(run-query* (query (source-table 5) ...))"
[query :- i/Query]
(let [db-id (db/sel :one :field [Table :db_id], :id (:source-table query))]
(driver/process-query {:database db-id
:type :query
:query query})))
(defmacro run-query [& body]
(defmacro run-query
"Build and run a query.
(run-query (source-table 5) ...)"
{:style/indent 0}
[& body]
`(run-query* (query ~@body)))
......@@ -3,6 +3,7 @@
This namespace should just contain definitions of various protocols and record types; associated logic
should go in `metabase.driver.query-processor.expand`."
(:require [schema.core :as s]
[metabase.driver :as driver]
[metabase.models.field :as field]
[metabase.util :as u])
(:import clojure.lang.Keyword
......@@ -11,6 +12,16 @@
(def ^:dynamic *driver*
nil)
;; *driver* is always bound when running queries the normal way, e.g. via `metabase.driver/process-query`
;; It is not neccesarily bound when using various functions like `fk->` in the REPL.
;; The check is not performed in those cases to allow flexibility when composing queries for tests or interactive development
(defn assert-driver-supports
"When `*driver*` is bound, assert that is supports keyword FEATURE."
[feature]
(when *driver*
(when-not (contains? (driver/features *driver*) feature)
(throw (Exception. (str (name feature) " is not supported by this driver."))))))
;; Expansion Happens in a Few Stages:
;; 1. A query dict is parsed via pattern-matching code in the Query Expander.
;; field IDs and values are replaced with FieldPlaceholders and ValuePlaceholders, respectively.
......@@ -62,16 +73,16 @@
(def ^:const datetime-field-units
"Valid units for a `DateTimeField`."
"Invalid units for a `DateTimeField`."
#{:default :minute :minute-of-hour :hour :hour-of-day :day :day-of-week :day-of-month :day-of-year
:week :week-of-year :month :month-of-year :quarter :quarter-of-year :year})
(def ^:const relative-datetime-value-units
"Valid units for a `RelativeDateTimeValue`."
"Invalid units for a `RelativeDateTimeValue`."
#{:minute :hour :day :week :month :quarter :year})
(def DatetimeFieldUnit (s/named (apply s/enum datetime-field-units) "Valid datetime unit for a field"))
(def DatetimeValueUnit (s/named (apply s/enum relative-datetime-value-units) "Valid datetime unit for a relative datetime"))
(def DatetimeFieldUnit (s/named (apply s/enum datetime-field-units) "Invalid datetime unit for a field"))
(def DatetimeValueUnit (s/named (apply s/enum relative-datetime-value-units) "Invalid datetime unit for a relative datetime"))
(defn datetime-field-unit? [unit]
(contains? datetime-field-units (keyword unit)))
......@@ -102,22 +113,25 @@
;; Replace Field IDs with these during first pass
(s/defrecord FieldPlaceholder [field-id :- s/Int
fk-field-id :- (s/maybe s/Int)
fk-field-id :- (s/maybe (s/both s/Int
(s/named (s/pred (fn [_] (or (assert-driver-supports :foreign-keys)
true)))
"foreign-keys is not supported by this driver.")))
datetime-unit :- (s/maybe (apply s/enum datetime-field-units))])
(s/defrecord AgFieldRef [index :- s/Int]) ; e.g. 0
(def FieldPlaceholderOrAgRef (s/named (s/cond-pre FieldPlaceholder AgFieldRef) "Valid field (field ID or aggregate field reference)"))
(def FieldPlaceholderOrAgRef (s/named (s/cond-pre FieldPlaceholder AgFieldRef) "Invalid field (not a field ID or aggregate field reference)"))
(s/defrecord RelativeDatetime [amount :- s/Int
unit :- (s/maybe DatetimeValueUnit)])
(def LiteralDatetimeString (s/constrained s/Str u/date-string? "ISO-8601 datetime string literal"))
(def LiteralDatetime (s/named (s/cond-pre java.sql.Date LiteralDatetimeString) "Datetime literal (ISO-8601 string or java.sql.Date)"))
(def Datetime (s/named (s/cond-pre RelativeDatetime LiteralDatetime) "Valid datetime (ISO-8601 string literal or relative-datetime form)"))
(def OrderableValue (s/named (s/cond-pre s/Num Datetime) "Orderable value (number or datetime)"))
(def AnyValue (s/named (s/maybe (s/cond-pre s/Bool s/Str OrderableValue)) "Valid value (nil, boolean, number, string, or relative-datetime form)"))
(def LiteralDatetimeString (s/both s/Str (s/named (s/pred u/date-string?) "Invalid ISO-8601 datetime string literal")))
(def LiteralDatetime (s/named (s/cond-pre java.sql.Date LiteralDatetimeString) "Invalid datetime literal (must be ISO-8601 string or java.sql.Date)"))
(def Datetime (s/named (s/cond-pre RelativeDatetime LiteralDatetime) "Invalid datetime (must ISO-8601 string literal or a relative-datetime form)"))
(def OrderableValue (s/named (s/cond-pre s/Num Datetime) "Invalid orrderable value (must be number or datetime)"))
(def AnyValue (s/named (s/maybe (s/cond-pre s/Bool s/Str OrderableValue)) "Invalid value (must be nil, boolean, number, string, or a relative-datetime form)"))
;; Replace values with these during first pass over Query.
;; Include associated Field ID so appropriate the info can be found during Field resolution
......@@ -137,13 +151,18 @@
(def CountAggregation {:aggregation-type (s/eq :count)
(s/optional-key :field) FieldPlaceholder})
(def OtherAggregation {:aggregation-type (s/named (s/enum :avg :cumulative-sum :distinct :stddev :sum) "Valid aggregation type")
:field FieldPlaceholder})
(def OtherAggregation (s/both {:aggregation-type (s/named (s/enum :avg :cumulative-sum :distinct :stddev :sum) "Invalid aggregation type")
:field FieldPlaceholder}
(s/named (s/pred (fn [{:keys [aggregation-type]}]
(when (= aggregation-type :stddev)
(assert-driver-supports :standard-deviation-aggregations))
true))
"standard-deviation-aggregations is not supported by this driver.")))
(def Aggregation (s/named (s/if #(= (get % :aggregation-type) :count)
CountAggregation
OtherAggregation)
"Valid aggregation clause"))
"Invalid aggregation clause."))
(s/defrecord EqualityFilter [filter-type :- (s/enum := :!=)
......@@ -166,19 +185,19 @@
(def SimpleFilter (s/cond-pre EqualityFilter ComparisonFilter BetweenFilter StringFilter))
(s/defrecord CompoundFilter [compound-type :- (s/enum :and :or)
subclauses :- [(s/named (s/cond-pre SimpleFilter CompoundFilter) "Valid filter subclause in compound (and/or) filter")]])
subclauses :- [(s/named (s/cond-pre SimpleFilter CompoundFilter) "Invalid filter subclause in compound (and/or) filter")]])
(def Filter (s/named (s/cond-pre SimpleFilter CompoundFilter) "Valid filter clause"))
(def Filter (s/named (s/cond-pre SimpleFilter CompoundFilter) "Invalid filter clause"))
(def OrderBy (s/named {:field FieldPlaceholderOrAgRef
:direction (s/named (s/enum :ascending :descending) "Valid order-by direction")}
"Valid order-by subclause"))
:direction (s/named (s/enum :ascending :descending) "Invalid order-by direction")}
"Invalid order-by subclause"))
(def Page (s/named {:page s/Int
:items s/Int}
"Valid page clause"))
"Invalid page clause"))
(def Query
......
......@@ -120,7 +120,7 @@
#{:standard-deviation-aggregations}
;; HACK SQLite doesn't support ALTER TABLE ADD CONSTRAINT FOREIGN KEY and I don't have all day to work around this
;; so for now we'll just skip the foreign key stuff in the tests.
(when (config/is-test?)
(when true #_(config/is-test?)
#{:foreign-keys})))})
sql/ISQLDriver
(merge (sql/ISQLDriverDefaultsMixin)
......
(ns metabase.driver.mysql-test
(:require [expectations :refer :all]
[metabase.driver.query-processor.expand :as ql]
[metabase.test.data :as data]
(metabase.test.data [datasets :refer [expect-with-engine]]
[interface :refer [def-database-definition]])
[metabase.test.util.q :refer [Q]]))
[interface :refer [def-database-definition]])))
;; MySQL allows 0000-00-00 dates, but JDBC does not; make sure that MySQL is converting them to NULL when returning them like we asked
(def-database-definition ^:private ^:const all-zero-dates
......@@ -12,5 +13,6 @@
(expect-with-engine :mysql
[[1 nil]]
(Q dataset metabase.driver.mysql-test/all-zero-dates
return rows of exciting-moments-in-history))
(data/dataset all-zero-dates
(ql/run-query
(ql/source-table (data/id :exciting-moments-in-history)))))
(ns metabase.driver.postgres-test
(:require [expectations :refer :all]
[metabase.driver.generic-sql :as sql]
[metabase.driver.query-processor.expand :as ql]
[metabase.test.data :as data]
(metabase.test.data [datasets :refer [expect-with-engine]]
[interface :refer [def-database-definition]])
[metabase.test.util.q :refer [Q]])
[metabase.util :as u])
(:import metabase.driver.postgres.PostgresDriver))
;; # Check that SSL params get added the connection details in the way we'd like
......@@ -45,25 +47,24 @@
[#uuid "7a5ce4a2-0958-46e7-9685-1a4eaa3bd08a"]
[#uuid "84ed434e-80b4-41cf-9c88-e334427104ae"]]])
;; Check that we can load a Postgres Database with a :UUIDField
(expect-with-engine :postgres
{:cols [{:description nil, :base_type :IntegerField, :schema_name "public", :name "id", :display_name "Id", :preview_display true, :special_type :id, :target nil, :extra_info {}}
{:description nil, :base_type :UUIDField, :schema_name "public", :name "user_id", :display_name "User Id", :preview_display true, :special_type :category, :target nil, :extra_info {}}],
:columns ["id" "user_id"],
:rows [[1 #uuid "4f01dcfd-13f7-430c-8e6f-e505c0851027"]
[2 #uuid "4652b2e7-d940-4d55-a971-7e484566663e"]
[3 #uuid "da1d6ecc-e775-4008-b366-c38e7a2e8433"]
[4 #uuid "7a5ce4a2-0958-46e7-9685-1a4eaa3bd08a"]
[5 #uuid "84ed434e-80b4-41cf-9c88-e334427104ae"]]}
(-> (Q dataset metabase.driver.postgres-test/with-uuid use postgres
return :data
aggregate rows of users)
(update :cols (partial mapv #(dissoc % :id :table_id)))))
[{:name "id", :base_type :IntegerField}
{:name "user_id", :base_type :UUIDField}]
(->> (data/dataset with-uuid
(ql/run-query
(ql/source-table (data/id :users))))
:data
:cols
(mapv (u/rpartial select-keys [:name :base_type]))))
;; Check that we can filter by a UUID Field
(expect-with-engine :postgres
[[2 #uuid "4652b2e7-d940-4d55-a971-7e484566663e"]]
(Q dataset metabase.driver.postgres-test/with-uuid use postgres
return rows
aggregate rows of users
filter = user_id "4652b2e7-d940-4d55-a971-7e484566663e"))
(-> (data/dataset with-uuid
(ql/run-query
(ql/source-table (data/id :users))
(ql/filter (ql/= (data/id :users :user_id) "4652b2e7-d940-4d55-a971-7e484566663e"))))
:data :rows))
......@@ -281,18 +281,6 @@
`(ql/run-query (ql/source-table (id ~(keyword table)))
~@(map (partial $->id (keyword table)) forms)))
(defn- do-with-dataset [dataset f]
(let [dataset-var (ns-resolve 'metabase.test.data.dataset-definitions dataset)]
(when-not dataset-var
(throw (Exception. (format "Dataset definition not found: metabase.test.data.dataset-definitions/%s" dataset))))
(with-db (get-or-create-database! @dataset-var)
(f))))
(defmacro dataset
{:style/indent 1}
[dataset & body]
`(do-with-dataset '~dataset (fn [] ~@body)))
;; # THE TESTS THEMSELVES (!)
......@@ -1104,10 +1092,9 @@
["twitter" 98]
["yelp" 90]]
:columns ["source.service" "count"]}
(->> (dataset geographical-tips
(query tips
(ql/aggregation :count)
(ql/breakout $tips.source.service)))
(->> (query tips
(ql/aggregation :count)
(ql/breakout $tips.source.service))
:data (#(dissoc % :cols)) (format-rows-by [str int])))
;;; Nested Field in FIELDS
......
......@@ -44,6 +44,33 @@
[db & body]
`(do-with-db ~db (fn [] ~@body)))
(defn do-with-dataset
"Bind `Database` for DATASET as the current DB and execute F.
DATASET is a *symbol* that can be resolved in the current namespace or in `metabase.test.data.dataset-definitions`:
(do-with-dataset 'some-local-db-def f)
(do-with-dataset 'some-other-ns/some-db-def f)
(do-with-dataset 'sad-toucan-incidents) ; metabase.test.data.dataset-definitions/sad-toucan-incidents"
[dataset f]
{:pre [(symbol? dataset)]}
(let [dataset-var (or (resolve dataset)
(ns-resolve 'metabase.test.data.dataset-definitions dataset))]
(when-not dataset-var
(throw (Exception. (format "Dataset definition not found: '%s' or 'metabase.test.data.dataset-definitions/%s'" dataset dataset))))
(with-db (get-or-create-database! @dataset-var)
(f))))
(defmacro dataset
"Convenience wrapper for `do-with-dataset`.
Bind `Database` for DATASET as the current DB and execute BODY.
DATASET is a unquoted symbol name of a dataset resolvable in the current namespace or in `metabase.test.data.dataset-definitions`.
(dataset sad-toucan-incidents
...)"
{:style/indent 1}
[dataset & body]
`(do-with-dataset '~dataset (fn [] ~@body)))
(defn format-name [nm]
(i/format-name *data-loader* (name nm)))
......
(ns metabase.test.util.q
"See https://github.com/metabase/metabase-init/wiki/Q-Cheatsheet"
(:refer-clojure :exclude [or and filter use = != < > <= >=])
(:require [clojure.core :as core]
[clojure.core.match :refer [match]]
[metabase.db :as db]
[metabase.driver :as driver]
[metabase.test.data :as data]
(metabase.test.data [datasets :as datasets]
dataset-definitions)
[metabase.util :as u]))
;;; # HELPER FNs
;;; ## TOKEN SPLITTING
(def ^:private ^:const top-level-tokens
'#{use of dataset return aggregate breakout fields filter limit order page})
(defn- qualify-token [token]
(symbol (str "metabase.test.util.q/" token)))
(defn- qualify-form [[f & args]]
`(~(qualify-token f) ~@args))
(defn- split-with-tokens [tokens args]
(loop [acc [], current-group [], [arg & more] args]
(cond
(nil? arg) (->> (conj acc (apply list current-group))
(core/filter seq)
(map qualify-form))
(contains? tokens arg) (recur (conj acc (apply list current-group)) [arg] more)
:else (recur acc (conj current-group arg) more))))
;;; ## ID LOOKUP
(def ^:dynamic *table-name* nil)
(defmacro field [f]
(core/or
(if-not (symbol? f) f
(let [f (name f)]
(u/cond-let
;; x->y <-> ["fk->" x y]
[[_ from to] (re-matches #"^(.+)->(.+)$" f)]
["fk->" `(field ~(symbol from)) `(field ~(symbol to))]
;; x...y <-> ?
[[_ f sub] (re-matches #"^(.+)\.\.\.(.+)$" f)]
`(~@(macroexpand-1 `(field ~(symbol f))) ~(keyword sub))
;; ag.0 <-> ["aggregation" 0]
[[_ ag-field-index] (re-matches #"^ag\.(\d+)$" f)]
["aggregation" (Integer/parseInt ag-field-index)]
;; table.field <-> (id table field)
[[_ table field] (re-matches #"^([^\.]+)\.([^\.]+)$" f)]
`(data/id ~(keyword table) ~(keyword field)))))
;; fallback : (id *table-name* field)
`(data/id *table-name* ~(keyword f))))
(defn resolve-dataset [dataset]
(var-get (core/or (resolve dataset)
(ns-resolve 'metabase.test.data.dataset-definitions dataset)
(throw (Exception. (format "Don't know how to find dataset '%s'." dataset))))))
;;; # DSL KEYWORD MACROS
;;; ## USE
(defmacro use [query db]
(assoc-in query [:context :engine] (keyword db)))
;;; ## OF
(defmacro of [query table-name]
(-> query
(assoc-in [:query :source_table] `(data/id ~(keyword table-name)))
(assoc-in [:context :table-name] (keyword table-name))))
;;; ## DATASET
(defmacro dataset [query dataset-name]
(assoc-in query [:context :dataset] `'~dataset-name))
;;; ## RETURN
(defmacro return [query & args]
(assoc-in query [:context :return] (vec (mapcat (fn [arg]
(cond
(core/= arg 'rows) [:data :rows]
(core/= arg 'first-row) [:data :rows first]
:else [arg]))
args))))
;;; ## AGGREGATE
(defmacro aggregate [query & args]
(assoc-in query [:query :aggregation] (match (vec args)
['rows] ["rows"]
['count] ["count"]
['count id] ["count" `(field ~id)]
['avg id] ["avg" `(field ~id)]
['distinct id] ["distinct" `(field ~id)]
['stddev id] ["stddev" `(field ~id)]
['sum id] ["sum" `(field ~id)]
['cum-sum id] ["cum_sum" `(field ~id)])))
;;; ## BREAKOUT
(defmacro breakout [query & fields]
(assoc-in query [:query :breakout] (vec (for [field fields]
`(field ~field)))))
;;; ## FIELDS
(defmacro fields [query & fields]
(assoc-in query [:query :fields] (vec (for [field fields]
`(field ~field)))))
;;; ## FILTER
(def ^:const ^:private filter-clause-tokens
'#{inside not-null is-null between starts-with ends-with contains = != < > <= >=})
(defmacro and [& clauses]
`["AND" ~@clauses])
(defmacro or [& clauses]
`["OR" ~@clauses])
(defmacro inside [{:keys [lat lon]}]
`["INSIDE" (field ~(:field lat)) (field ~(:field lon)) ~(:max lat) ~(:min lon) ~(:min lat) ~(:max lon)])
(defmacro not-null [field]
`["NOT_NULL" (field ~field)])
(defmacro is-null [field]
`["IS_NULL" (field ~field)])
(defmacro between [field min max]
`["BETWEEN" (field ~field) ~min ~max])
(defmacro starts-with [field arg]
`["STARTS_WITH" (field ~field) ~arg])
(defmacro ends-with [field arg]
`["ENDS_WITH" (field ~field) ~arg])
(defmacro contains [field arg]
`["CONTAINS" (field ~field) ~arg])
(defmacro = [field & args]
`["=" (field ~field) ~@args])
(defmacro != [field & args]
`["!=" (field ~field) ~@args])
(defmacro < [field arg]
`["<" (field ~field) ~arg])
(defmacro <= [field arg]
`["<=" (field ~field) ~arg])
(defmacro > [field arg]
`[">" (field ~field) ~arg])
(defmacro >= [field arg]
`[">=" (field ~field) ~arg])
(defn- filter-split [tokens]
(->> (loop [clauses [], current-clause [], [token & more] tokens]
(cond
(nil? token) (conj clauses (apply list current-clause))
(core/= token 'and) (conj clauses (apply list current-clause) `(and ~@(filter-split more)))
(core/= token 'or) (conj clauses (apply list current-clause) `(or ~@(filter-split more)))
(contains? filter-clause-tokens token) (recur (conj clauses (apply list current-clause))
[(qualify-token token)]
more)
:else (recur clauses
(conj current-clause token)
more)))
(core/filter seq)))
(defmacro filter* [& args]
(let [[filter-clause & more] (filter-split args)]
`(~@filter-clause ~@more)))
(defmacro filter [query & args]
(assoc-in query [:query :filter] `(filter* ~@args)))
;;; ## LIMIT
(defmacro limit [query limit]
{:pre [(integer? limit)]}
(assoc-in query [:query :limit] limit))
;;; ## ORDER
(defmacro order* [field-symb]
(let [[_ field +-] (re-matches #"^(.+[^\-+])([\-+])?$" (name field-symb))]
(assert field (format "Invalid field passed to order: '%s'" field-symb))
[`(field ~(symbol field)) (case (keyword (core/or +- '+))
:+ "ascending"
:- "descending")]))
(defmacro order [query & fields]
(assoc-in query [:query :order_by] (vec (for [field fields]
`(order* ~field)))))
;;; ## PAGE
(defmacro page [query page items-symb items]
(assert (and (integer? page)
(core/= items-symb 'items)
(integer? items))
"page clause should be of the form page <page-num> items <items-per-page>")
(assoc-in query [:query :page] {:page page
:items items}))
;;; # TOP-LEVEL MACRO IMPL
(defmacro with-temp-db [dataset query]
(if-not dataset
query
`(data/with-temp-db [~'_ (resolve-dataset ~dataset)]
~query)))
(defmacro with-engine [engine query]
(if-not engine
query
`(datasets/with-engine ~engine
~query)))
(defmacro Q*** [f {:keys [engine dataset return table-name]} query]
(assert table-name
"Table name not specified in query, did you include an 'of' clause?")
`(do (db/setup-db-if-needed)
(->> (with-engine ~engine
(binding [*table-name* ~table-name]
(with-temp-db ~dataset
(~f ~query))))
~@return)))
(defmacro Q** [f q & [form & more]]
(if-not form
`(Q*** ~f ~(:context q) ~(dissoc q :context))
`(Q** ~f ~(macroexpand `(-> ~q ~form)) ~@more)))
(defmacro Q* [f & args]
`(Q** ~f
{:database (data/id)
:type "query"
:query {}
:context {:engine nil
:dataset nil}}
~@(split-with-tokens top-level-tokens args)))
(defmacro Q
"Expand and run a query written with the `Q` shorthand DSL."
[& args]
`(Q* driver/process-query ~@args))
(defmacro Q-expand
"Expand (without running) a query written with the `Q` shorthand DSL."
[& args]
`(Q* identity ~@args))
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment