Skip to content
Snippets Groups Projects
Commit 90348400 authored by Cam Saul's avatar Cam Saul
Browse files

Re-work annotation -- less wonky, more efficient

parent b9f5c339
No related branches found
No related tags found
No related merge requests found
......@@ -10,6 +10,7 @@
;; This list isn't complete; add more forms as we come across them.
(define-clojure-indent
(api-let 2)
(assoc* 1)
(auto-parse 1)
(catch-api-exceptions 0)
(check 1)
......
......@@ -329,6 +329,12 @@ CorvusServices.service('CorvusCore', ['$resource', 'User', function($resource, U
}, {
'id': 'state',
'name': 'State'
}, {
id: 'timestamp_seconds',
name: 'UNIX Timestamp (Seconds)'
}, {
id: 'timestamp_milliseconds',
name: 'UNIX Timestamp (Milliseconds)'
}, {
'id': 'url',
'name': 'URL'
......
......@@ -22,14 +22,14 @@
(def ^:const available-drivers
"DB drivers that are available as a dictionary. Each key is a driver with dictionary of attributes.
ex: `:h2 {:id \"h2\" :name \"H2\"}`"
{:h2 {:id "h2"
:name "H2"
{:h2 {:id "h2"
:name "H2"
:example "file:[filename]"}
:postgres {:id "postgres"
:name "Postgres"
:postgres {:id "postgres"
:name "Postgres"
:example "host=[ip address] port=5432 dbname=examples user=corvus password=******"}
:mongo {:id "mongo"
:name "MongoDB"
:mongo {:id "mongo"
:name "MongoDB"
:example "mongodb://password:username@127.0.0.1:27017/db-name"}})
(def ^:const class->base-type
......@@ -166,20 +166,20 @@
[query {:keys [executed_by]
:as options}]
{:pre [(integer? executed_by)]}
(let [query-execution {:uuid (.toString (java.util.UUID/randomUUID))
:executor_id executed_by
:json_query query
:query_id nil
:version 0
:status :starting
:error ""
:started_at (u/new-sql-timestamp)
:finished_at (u/new-sql-timestamp)
:running_time 0
:result_rows 0
:result_file ""
:result_data "{}"
:raw_query ""
(let [query-execution {:uuid (.toString (java.util.UUID/randomUUID))
:executor_id executed_by
:json_query query
:query_id nil
:version 0
:status :starting
:error ""
:started_at (u/new-sql-timestamp)
:finished_at (u/new-sql-timestamp)
:running_time 0
:result_rows 0
:result_file ""
:result_data "{}"
:raw_query ""
:additional_info ""}]
(let [query-execution (assoc query-execution :start_time_millis (System/currentTimeMillis))]
(try
......@@ -197,9 +197,9 @@
(defn query-fail
"Save QueryExecution state and construct a failed query response"
[query-execution error-message]
(let [updates {:status :failed
:error error-message
:finished_at (u/new-sql-timestamp)
(let [updates {:status :failed
:error error-message
:finished_at (u/new-sql-timestamp)
:running_time (- (System/currentTimeMillis) (:start_time_millis query-execution))}]
;; record our query execution and format response
(-> query-execution
......@@ -207,21 +207,21 @@
(merge updates)
(save-query-execution)
;; this is just for the response for clien
(assoc :error error-message
(assoc :error error-message
:row_count 0
:data {:rows []
:cols []
:columns []}))))
:data {:rows []
:cols []
:columns []}))))
(defn query-complete
"Save QueryExecution state and construct a completed (successful) query response"
[query-execution query-result]
;; record our query execution and format response
(-> (u/assoc* query-execution
:status :completed
:finished_at (u/new-sql-timestamp)
:running_time (- (System/currentTimeMillis) (:start_time_millis <>))
:result_rows (get query-result :row_count 0))
:status :completed
:finished_at (u/new-sql-timestamp)
:running_time (- (System/currentTimeMillis) (:start_time_millis <>))
:result_rows (get query-result :row_count 0))
(dissoc :start_time_millis)
(save-query-execution)
;; at this point we've saved and we just need to massage things into our final response format
......
......@@ -8,7 +8,6 @@
[metabase.driver.query-processor :as qp]
(metabase.driver.generic-sql [native :as native]
[util :refer :all])
[metabase.driver.generic-sql.query-processor.annotate :as annotate]
(metabase.models [database :refer [Database]]
[field :refer [Field]]
[table :refer [Table]])
......@@ -34,6 +33,15 @@
`(let [entity# (table-id->korma-entity ~source_table)]
(select entity# ~@forms)))))
(defn- uncastify
"Remove CAST statements from a column name if needed.
(uncastify \"DATE\") -> \"DATE\"
(uncastify \"CAST(DATE AS DATE)\") -> \"DATE\""
[column-name]
(let [column-name (name column-name)]
(keyword (or (second (re-find #"CAST\(([^\s]+) AS [\w]+\)" column-name))
column-name))))
(defn process-structured
"Convert QUERY into a korma `select` form, execute it, and annotate the results."
......@@ -42,9 +50,10 @@
(map? (:query query))
(= (name (:type query)) "query")]}
(try
(->> (process query)
eval
(annotate/annotate query))
(binding [qp/*uncastify-fn* uncastify]
(->> (process query)
eval
(qp/annotate query)))
(catch java.sql.SQLException e
(let [^String message (or (->> (.getMessage e) ; error message comes back like "Error message ... [status-code]" sometimes
(re-find #"(?s)(^.*)\s+\[[\d-]+\]$") ; status code isn't useful and makes unit tests hard to write so strip it off
......@@ -142,12 +151,12 @@
[_ field-id & _] {(field-id->kw field-id)
;; If the field in question is a date field we need to cast the YYYY-MM-DD string that comes back from the UI to a SQL date
(let [cast-value-if-needed (fn [v]
(if-not (string? v) v
(cond
(date-field-id? field-id) (u/parse-date-yyyy-mm-dd v)
(= (field-id->special-type field-id)
:timestamp_seconds) (u/date-yyyy-mm-dd->unix-timestamp v)
:else v)))]
(cond (= (type v) java.sql.Date) `(raw ~(format "CAST('%s' AS DATE)" (.toString ^java.sql.Date v)))
(not (string? v)) v
(date-field-id? field-id) (u/parse-date-yyyy-mm-dd v)
(= (field-id->special-type field-id)
:timestamp_seconds) (u/date-yyyy-mm-dd->unix-timestamp v)
:else v))]
(match subclause
["NOT_NULL" _] ['not= nil]
["IS_NULL" _] ['= nil]
......
(ns metabase.driver.generic-sql.query-processor.annotate
"Functions related to annotating results returned by the Query Processor."
(:require [metabase.db :refer :all]
[metabase.driver.query-processor :as qp]
[metabase.driver.generic-sql.util :as gsu]
[metabase.models.field :refer [Field field->fk-table]]))
(declare get-column-names
get-column-info
uncastify)
(defn annotate
"Take raw RESULTS from running QUERY and convert them to the format expected by the front-end.
Add the following columns (under `:data`):
* `:rows` a sequence of result rows
* `:columns` ordered sequence of column names
* `:cols` ordered sequence of information about each column, such as `:base_type` and `:special_type`"
[query results]
(let [column-names (get-column-names query results)
column-name-kws (map keyword column-names)]
{:rows (->> results
(map (fn [row]
(map row column-name-kws))))
:columns (map uncastify column-names)
:cols (get-column-info query column-names)}))
(defn- order-columns
[query castified-field-names]
(binding [qp/*uncastify-fn* uncastify]
(qp/order-columns query castified-field-names)))
(defn- get-column-names
"Get an ordered seqences of column names for the results.
If a `fields` clause was specified in the Query Dict, we want to return results in the same order."
[query results]
(let [field-ids (-> query :query :fields)
fields-clause-fields (when-not (or (empty? field-ids)
(= field-ids [nil]))
(let [field-id->name (->> (sel :many [Field :id :name :base_type]
:id [in field-ids]) ; Fetch names of fields from `fields` clause
(map (fn [{:keys [id name base_type]}] ; build map of field-id -> field-name
{id (gsu/field-name+base-type->castified-key name base_type)}))
(into {}))]
(map field-id->name field-ids))) ; now get names in same order as the IDs
other-fields (->> (first results)
keys ; Get the names of any other fields that were returned (i.e., `sum`)
(filter #(not (contains? (set fields-clause-fields) %)))
(order-columns query))]
(->> (concat fields-clause-fields other-fields) ; Return a combined vector. Convert them to strs, otherwise korma
(filter identity) ; remove any nils -- don't want a NullPointerException
(map name)))) ; will qualify them like `"METABASE_FIELD"."FOLLOWERS_COUNT"
(defn- uncastify
"Remove CAST statements from a column name if needed.
(uncastify \"DATE\") -> \"DATE\"
(uncastify \"CAST(DATE AS DATE)\") -> \"DATE\""
[column-name]
(or (second (re-find #"CAST\(([^\s]+) AS [\w]+\)" column-name))
column-name))
(defn get-column-info
"Wrapper for `metabase.driver.query-processor/get-column-info` that calls `uncastify` on column names."
[query column-names]
(qp/get-column-info query (map uncastify column-names)))
......@@ -257,15 +257,8 @@
;; TODO - This is similar to the implementation in generic-sql; can we combine them and move it into metabase.driver.query-processor?
(defn annotate-results
"Add column information, `row_count`, etc. to the results of a Mongo QP query."
[{:keys [source_table] :as query} results]
{:pre [(integer? source_table)]}
(let [field-name->field (sel :many :field->obj [Field :name] :table_id source_table)
column-keys (qp/order-columns {:query query} (keys (first results)))
column-names (map name column-keys)]
{:columns column-names
:cols (qp/get-column-info {:query query} column-names)
:rows (map #(map % column-keys)
results)}))
[query results]
(qp/annotate query results))
;; ## CLAUSE APPLICATION 2.0
......@@ -313,7 +306,7 @@
identity))))))
(defn- cast-value-if-needed
"* Convert dates (which come back as `YYYY-MM-DD` strings) to `java.util.Date`
"* Convert dates (which come back as `YYYY-MM-DD` strings) to `java.sql.Date`
* Convert ID strings to `ObjectId`
* Return other values as-is"
[field-id ^String value]
......
......@@ -7,7 +7,8 @@
[metabase.db :refer :all]
[metabase.driver.interface :as i]
[metabase.driver.query-processor.expand :as expand]
[metabase.models.field :refer [Field field->fk-table]]
(metabase.models [field :refer [Field]]
[foreign-key :refer [ForeignKey]])
[metabase.util :as u]))
(declare add-implicit-breakout-order-by
......@@ -251,7 +252,7 @@
;;; ### CONVERT-TIMESTAMPS-TO-DATES
(defn convert-timestamps-to-dates
(defn convert-unix-timestamps-to-dates
"Convert the values of Unix timestamps (for `Fields` whose `:special_type` is `:timestamp_seconds` or `:timestamp_milliseconds`) to dates."
[{:keys [cols rows], :as results}]
(let [timestamp-seconds-col-indecies (u/indecies-satisfying #(= (:special_type %) :timestamp_seconds) cols)
......@@ -264,8 +265,8 @@
(update-in results [:rows] #(for [row %]
(for [[i val] (m/indexed row)]
(cond
(contains? timestamp-seconds-col-indecies i) (java.sql.Timestamp. (* val 1000))
(contains? timestamp-millis-col-indecies i) (java.sql.Timestamp. val)
(contains? timestamp-seconds-col-indecies i) (java.sql.Date. (* val 1000))
(contains? timestamp-millis-col-indecies i) (java.sql.Date. val)
:else val)))))))
......@@ -290,121 +291,140 @@
"Apply post-processing steps to the RESULTS of a QUERY, such as applying cumulative sum."
[driver query results]
{:pre [(map? query)
(map? results)]}
(map? results)
(sequential? (:columns results))
(sequential? (:cols results))
(sequential? (:rows results))]}
;; Double-check that there are no duplicate columns in results
(assert (= (count (:columns results))
(count (set (:columns results))))
(format "Duplicate columns in results: %s" (vec (:columns results))))
(->> results
limit-max-result-rows
(#(case (keyword (:type query))
:native %
:query (post-process-cumulative-sum (:query query) %)))
convert-timestamps-to-dates
convert-unix-timestamps-to-dates
add-row-count-and-status))
;; # COMMON ANNOTATION FNS
(defn get-column-info
"Get extra information about result columns. This is done by looking up matching `Fields` for the `Table` in QUERY or looking up
information about special columns such as `count` via `get-special-column-info`."
[{{table-id :source_table} :query, :as query} column-names]
{:pre [(integer? table-id)
(every? string? column-names)]}
(let [columns (->> (sel :many [Field :id :table_id :name :description :base_type :special_type] ; lookup columns with matching names for this Table
:table_id table-id :name [in (set column-names)])
(map (fn [{:keys [name] :as column}] ; build map of column-name -> column
{name (-> (select-keys column [:id :table_id :name :description :base_type :special_type])
(assoc :extra_info (if-let [fk-table (field->fk-table column)]
{:target_table_id (:id fk-table)}
{})))}))
(into {}))]
(->> column-names
(map (fn [column-name]
(try
(or (columns column-name) ; try to get matching column from the map we build earlier
(get-special-column-info query column-name)) ; if it's not there then it's a special column like `count`
(catch Throwable _ ; If for some reason column info lookup failed just return empty info map
{:name column-name ; TODO - should we log this ? It shouldn't be happening ideally
:id nil
:table_id nil
:description nil
:base_type :UnknownField
:special_type nil})))))))
(defn get-special-column-info
"Get info like `:base_type` and `:special_type` for a special aggregation column like `count` or `sum`."
[query column-name]
{:pre [(:query query)]}
(merge {:name column-name
:id nil
:table_id nil
:description nil}
(let [aggregation-type (keyword column-name) ; For aggregations of a specific Field (e.g. `sum`)
field-aggregation? (contains? #{:avg :stddev :sum} aggregation-type)] ; lookup the field we're aggregating and return its
(if field-aggregation? (sel :one :fields [Field :base_type :special_type] ; type info. (The type info of the aggregate result
:id (-> query :query :aggregation second)) ; will be the same.)
(case aggregation-type ; Otherwise for general aggregations such as `count`
:count {:base_type :IntegerField ; just return hardcoded type info
:special_type :number})))))
;; # ANNOTATION 2.0
(def ^:dynamic *uncastify-fn*
"Function that should be called to transform a column name from the set of results to one that matches a `Field` in the DB.
The default implementation returns the column name as is; others, such as `generic-sql`, provide implementations that remove
remove casting statements and the like."
remove casting statements and the like.
This function should accept and return *keywords*."
identity)
;; TODO - since this was moved over from generic SQL some of its functionality should be reworked. And dox updated.
;; (Since castification is basically SQL-specific it would make sense to handle castification / decastification separately)
;; Fix this when I'm not burnt out on driver code
(defn -order-columns
"Don't use this directly; use `order-columns`.
This broken out for testability -- it doesn't depend on data from the DB."
[fields breakout-field-ids field-field-ids castified-field-names]
;; Basically we want to convert both BREAKOUT-FIELD-IDS and CASTIFIED-FIELD-NAMES to maps like:
;; {:name "updated_at"
;; :id 224
;; :castified (keyword "CAST(updated_at AS DATE)")
;; :position 21}
;; Then we can order things appropriately and return the castified names.
(let [uncastified->castified (zipmap (map #(*uncastify-fn* (name %)) castified-field-names) castified-field-names)
fields (map #(assoc % :castified (uncastified->castified (:name %)))
fields)
id->field (zipmap (map :id fields) fields)
castified->field (zipmap (map :castified fields) fields)
breakout-fields (->> breakout-field-ids
(map id->field))
field-fields (->> field-field-ids
(map id->field))
other-fields (->> castified-field-names
(map (fn [castified-name]
(or (castified->field castified-name)
{:castified castified-name ; for aggregate fields like 'count' create a fake map
:position 0}))) ; with position 0 so it is returned ahead of the other fields
(filter #(not (or (contains? (set breakout-field-ids)
(:id %))
(contains? (set field-field-ids)
(:id %)))))
(sort-by :position))]
(->> (concat breakout-fields field-fields other-fields)
(map :castified)
(filter identity))))
(defn order-columns
"Return CASTIFIED-FIELD-NAMES in the order we'd like to display them in the output.
They should be ordered as follows:
1. All breakout fields, in the same order as BREAKOUT-FIELD-IDS
2. Any aggregate fields like `count`
3. Fields included in the `fields` clause
4. All other columns in the same order as `Field.position`."
[{{source-table :source_table, breakout-field-ids :breakout, field-field-ids :fields} :query} castified-field-names]
{:post [(every? keyword? %)]}
(try
(-order-columns (sel :many :fields [Field :id :name :position] :table_id source-table)
breakout-field-ids
(when-not (:fields-is-implicit @*internal-context*) field-field-ids)
castified-field-names)
(catch Exception e
(.printStackTrace e)
(log/error (.getMessage e)))))
;; ## Ordering
;;
;; Fields should be returned in the following order:
;;
;; 1. Breakout Fields
;; 2. Aggregation Fields (e.g. sum, count)
;; 3. Fields clause Fields, if they were added explicitly
;; 4. All other Fields, sorted by :position
(defn- order-cols
"Construct a sequence of column keywords that should be used for pulling ordered rows from RESULTS.
FIELDS should be a sequence of all `Fields` for the `Table` associated with QUERY."
[{{breakout-ids :breakout, fields-ids :fields} :query} results fields]
{:post [(= (set %)
(set (keys (first results))))]}
;; Order needs to be [breakout-cols aggregate-cols fields-cols other-cols]
(let [field-id->field (zipmap (map :id fields) fields)
;; Get IDs from Fields clause *if* it was added explicitly and other all other Field IDs for Table. Filter out :breakout field IDs
all-field-ids (map :id fields)
non-breakout-ids (->> (when-not (:fields-is-implicit @*internal-context*) fields-ids)
(concat all-field-ids)
(filter (complement (partial contains? (set breakout-ids))))
distinct)
;; Get all the keywords returned by the results
result-kws (set (keys (first results)))
;; Convert breakout/non-breakout IDs to keywords
ids->kws #(some->> (map field-id->field %)
(map :name)
(map keyword)
(filter (partial contains? result-kws)))
breakout-kws (ids->kws breakout-ids)
non-breakout-kws (ids->kws non-breakout-ids)
;; Get the results kws specific to :aggregation (not part of breakout/non-breakout-kws)
ag-kws (->> result-kws
(filter (complement (partial contains? (set (concat breakout-kws non-breakout-kws))))))]
;; Create a combined sequence of aggregate result KWs + other ordered kws
(->> (concat breakout-kws ag-kws non-breakout-kws))))
(defn- add-fields-extra-info
"Add `:extra_info` about `ForeignKeys` to `Fields` whose `special_type` is `:fk`."
[fields]
;; Get a sequence of add Field IDs that have a :special_type of FK
(let [fk-field-ids (->> fields
(filter #(= (:special_type %) :fk))
(map :id)
(filter identity))
;; Fetch maps of the info we need for :extra_info if there are any FK Fields
field-id->dest-field-id (when (seq fk-field-ids)
(sel :many :field->field [ForeignKey :origin_id :destination_id], :origin_id [in fk-field-ids]))
dest-field-id->table-id (when (seq fk-field-ids)
(sel :many :id->field [Field :table_id], :id [in (vals field-id->dest-field-id)]))]
;; Add :extra_info to every Field. Empty if it's not an FK, otherwise add a map with target Table ID
(for [{:keys [special_type], :as field} fields]
(cond-> field
(:id field) (assoc :extra_info (if-not (= special_type :fk) {}
{:target_table_id (->> (:id field)
field-id->dest-field-id
dest-field-id->table-id)}))))))
(defn- get-cols-info
"Get column info for the `:cols` part of the QP results."
[{{[ag-type ag-field-id] :aggregation} :query} fields ordered-col-kws]
(let [field-kw->field (zipmap (map #(keyword (:name %)) fields)
fields)
field-id->field (delay (zipmap (map :id fields) ; a delay since we probably won't need it
fields))]
(->> (for [col-kw ordered-col-kws]
(or
;; If col-kw is a known Field return that
(field-kw->field col-kw)
;; Otherwise it is an aggregation column like :sum, build a map of information to return
(let [ag-type (keyword ag-type)]
(assert ag-type)
(merge {:name (name col-kw)
:id nil
:table_id nil
:description nil}
(let [ag-type (keyword ag-type)]
(cond
;; avg, stddev, and sum should inherit the base_type and special_type from the Field they're aggregating
(contains? #{:avg :stddev :sum} ag-type) (-> (@field-id->field ag-field-id)
(select-keys [:base_type :special_type]))
;; count should always be IntegerField/number
(= ag-type :count) {:base_type :IntegerField
:special_type :number}))))))
;; Add FK info the the resulting Fields
add-fields-extra-info)))
(defn annotate
"Take a sequence of RESULTS of executing QUERY and return the \"annotated\" results we pass to postprocessing -- the map with `:cols`, `:columns`, and `:rows`.
RESULTS should be a sequence of *maps*, keyed by result column -> value."
[{{:keys [source_table]} :query, :as query}, results]
{:pre [(integer? source_table)]}
(let [results (if-not *uncastify-fn* results
(for [row results]
(m/map-keys *uncastify-fn* row)))
fields (sel :many :fields [Field :id :table_id :name :description :base_type :special_type]
:table_id source_table
:active true
(order :position :asc)
(order :id :desc)) ; not sure why we're ordering things this way but this is what the tests expect so (?)
ordered-col-kws (order-cols query results fields)]
{:rows (for [row results]
(map row ordered-col-kws))
:columns (map name ordered-col-kws)
:cols (get-cols-info query fields ordered-col-kws)}))
......@@ -5,7 +5,7 @@
and various QP components wouldn't need to implement that logic themselves.
That's the ultimate endgoal of this namespace: parse a Query dict and return an *expanded* form with relevant information added,
values already parsed (e.g. date strings will be converted to `java.util.Date` / Unix timestamps as appropriate), in a more
values already parsed (e.g. date strings will be converted to `java.sql.Date` / Unix timestamps as appropriate), in a more
Clojure-friendly format (e.g. using keywords like `:not-null` instead of strings like `\"NOT_NULL\"`, and using maps instead of
position-dependent vectors for things like like the `BETWEEN` filter clause. We'll also be able to add useful utility methods to various
bits of the Query Language, since they're typed. On top of that, we'll see a big performance improvment when various QP modules aren't
......@@ -237,18 +237,24 @@
(collapse-one [_]
["BETWEEN" field min-val max-val]))
(defn- collapse-filter-type [^clojure.lang.Keyword filter-type]
(-> filter-type
name
(s/replace #"-" "_")
s/upper-case))
(defrecord Filter:Field+Value [^Keyword filter-type
^Field field
^Value value]
ICollapse
(collapse-one [_]
[(s/upper-case (name filter-type)) field value]))
[(collapse-filter-type filter-type) field value]))
(defrecord Filter:Field [^Keyword filter-type
^Field field]
ICollapse
(collapse-one [_]
[(s/upper-case (name filter-type)) field]))
[(collapse-filter-type filter-type) field]))
;; ### Parsers
......
......@@ -88,16 +88,6 @@
(let [dest-id (sel :one :field [ForeignKey :destination_id] :origin_id id)]
(sel :one Field :id dest-id))))
(defn field->fk-table
"Attempts to follow a `ForeignKey` from the the given `Field` to a destination `Table`.
This is a simple convenience for calling `field->fk-field` then hydrating :table
Only evaluates if the given field has :special_type `fk`, otherwise does nothing."
[field]
(-> (field->fk-field field)
(hydrate :table)
:table))
(defmethod post-select Field [_ {:keys [table_id] :as field}]
(u/assoc* field
:table (delay (sel :one 'metabase.models.table/Table :id table_id))
......
......@@ -49,9 +49,11 @@
(java.text.SimpleDateFormat. "yyyy-MM-dd"))
(defn parse-date-yyyy-mm-dd
"Parse a date in the `yyyy-mm-dd` format and return a `java.util.Date`."
^java.util.Date [^String date]
(.parse simple-date-format date))
"Parse a date in the `yyyy-mm-dd` format and return a `java.sql.Date`."
^java.sql.Date [^String date]
(-> (.parse simple-date-format date)
.getTime
java.sql.Date.))
(defn date-yyyy-mm-dd->unix-timestamp
"Convert a string DATE in the `YYYY-MM-DD` format to a Unix timestamp in seconds."
......
(ns metabase.driver.generic-sql.query-processor.annotate-test
(:require [expectations :refer :all]
[metabase.driver.generic-sql.query-processor.annotate :refer :all]
[metabase.driver.query-processor :as qp]
[metabase.test.util :refer [resolve-private-fns]]))
(resolve-private-fns metabase.driver.generic-sql.query-processor.annotate uncastify)
(defn -order-columns [& args]
(binding [qp/*uncastify-fn* uncastify]
(apply qp/-order-columns args)))
;; ## TESTS FOR -ORDER-COLUMNS
(def ^:const ^:private mock-fields
[{:position 0, :name "id", :id 224}
{:position 1, :name "name", :id 341}
{:position 2, :name "author_name", :id 453}
{:position 3, :name "allows_suggestions", :id 433}
{:position 4, :name "author_url", :id 455}
{:position 5, :name "content_creator_id", :id 446}
{:position 6, :name "created_at", :id 263}
{:position 7, :name "description", :id 375}
{:position 8, :name "external_url", :id 424}
{:position 9, :name "updated_at", :id 284}])
(def ^:const ^:private mock-castified-field-names
[:allows_suggestions
:description
:author_url
:name
(keyword "CAST(updated_at AS DATE)")
:id
:author_name
:external_url
:content_creator_id
(keyword "CAST(created_at AS DATE)")])
;; Check that `Field.order` is respected. No breakout fields
(expect [:id
:name
:author_name
:allows_suggestions
:author_url
:content_creator_id
(keyword "CAST(created_at AS DATE)")
:description
:external_url
(keyword "CAST(updated_at AS DATE)")]
(-order-columns mock-fields [] [] mock-castified-field-names))
;; Check that breakout fields are returned first, in order, before other fields
(expect [:description
:allows_suggestions
:id
:name
:author_name
:author_url
:content_creator_id
(keyword "CAST(created_at AS DATE)")
:external_url
(keyword "CAST(updated_at AS DATE)")]
(-order-columns mock-fields [375 433] [] mock-castified-field-names))
;; Check that aggregate fields are returned ahead of other fields
(expect [:allows_suggestions
:description
:count
:id
:name
:author_name
:author_url
:content_creator_id
(keyword "CAST(created_at AS DATE)")
:external_url
(keyword "CAST(updated_at AS DATE)")]
(-order-columns mock-fields [433 375] [] (concat [:count] mock-castified-field-names)))
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment