diff --git a/frontend/test/__runner__/backend.js b/frontend/test/__runner__/backend.js index 909e59ebb57c2eb2bb8ea4d984daf23686015bd5..9cd0b5e615a81960c0e84ec743c5144c139db343 100644 --- a/frontend/test/__runner__/backend.js +++ b/frontend/test/__runner__/backend.js @@ -60,6 +60,7 @@ export const BackendResource = createSharedResource("BackendResource", { ], { env: { + MB_DB_TYPE: "h2", MB_DB_FILE: server.dbFile, MB_JETTY_PORT: server.port, }, diff --git a/src/metabase/api/common.clj b/src/metabase/api/common.clj index 9360e677d376cdbbce63273eb34b7116980b2817..b047d565b33993f5369ffe08263cde7ce18f4181 100644 --- a/src/metabase/api/common.clj +++ b/src/metabase/api/common.clj @@ -499,11 +499,6 @@ (check (not (:archived object)) [404 {:message (tru "The object has been archived."), :error_code "archived"}]))) -(defn with-current-user-info - "Associates the login-attributes of the current users to `m`" - [m] - (assoc m :user @*current-user*)) - (s/defn column-will-change? :- s/Bool "Helper for PATCH-style operations to see if a column is set to change when `object-updates` (i.e., the input to the endpoint) is applied. diff --git a/src/metabase/api/field.clj b/src/metabase/api/field.clj index 839c29401a6201eeecb4f96a661629e9c4897255..2b5a77c2214e595d77f3d2d2e1e7ebb654f53073 100644 --- a/src/metabase/api/field.clj +++ b/src/metabase/api/field.clj @@ -264,19 +264,18 @@ "Generate the MBQL query used to power FieldValues search in `search-values` below. The actual query generated differs slightly based on whether the two Fields are the same Field." [field search-field value limit] - (api/with-current-user-info - {:database (db-id field) - :type :query - :query {:source-table (table-id field) - :filter [:starts-with [:field-id (u/get-id search-field)] value {:case-sensitive false}] - ;; if both fields are the same then make sure not to refer to it twice in the `:breakout` clause. - ;; Otherwise this will break certain drivers like BigQuery that don't support duplicate - ;; identifiers/aliases - :breakout (if (= (u/get-id field) (u/get-id search-field)) - [[:field-id (u/get-id field)]] - [[:field-id (u/get-id field)] - [:field-id (u/get-id search-field)]]) - :limit limit}})) + {:database (db-id field) + :type :query + :query {:source-table (table-id field) + :filter [:starts-with [:field-id (u/get-id search-field)] value {:case-sensitive false}] + ;; if both fields are the same then make sure not to refer to it twice in the `:breakout` clause. + ;; Otherwise this will break certain drivers like BigQuery that don't support duplicate + ;; identifiers/aliases + :breakout (if (= (u/get-id field) (u/get-id search-field)) + [[:field-id (u/get-id field)]] + [[:field-id (u/get-id field)] + [:field-id (u/get-id search-field)]]) + :limit limit}}) (s/defn search-values "Search for values of `search-field` that start with `value` (up to `limit`, if specified), and return like @@ -304,7 +303,8 @@ [result result])))) (api/defendpoint GET "/:id/search/:search-id" - "Search for values of a Field that match values of another Field when breaking out by the " + "Search for values of a Field with `search-id` that start with `value`. See docstring for + `metabase.api.field/search-values` for a more detailed explanation." [id search-id value limit] {value su/NonBlankString limit (s/maybe su/IntStringGreaterThanZero)} diff --git a/src/metabase/api/search.clj b/src/metabase/api/search.clj index a4c839a202b03413b6e148ab2ca9e69c737e5ece..b6fbad58df7118404b46c0206c803fb2db4f2aeb 100644 --- a/src/metabase/api/search.clj +++ b/src/metabase/api/search.clj @@ -2,7 +2,8 @@ (:require [clojure.string :as str] [compojure.core :refer [GET]] [honeysql.helpers :as h] - [metabase.api.common :refer [*current-user-id* *current-user-permissions-set* check-403 defendpoint define-routes]] + [metabase.api.common :refer [*current-user-id* *current-user-permissions-set* check-403 defendpoint + define-routes]] [metabase.models [card :refer [Card]] [card-favorite :refer [CardFavorite]] @@ -45,7 +46,7 @@ default-columns) (defn- ->column - "Returns the column name. If the column is aliased, i.e. [`:original_namd` `:aliased_name`], return the aliased + "Returns the column name. If the column is aliased, i.e. [`:original_name` `:aliased_name`], return the aliased column name" [column-or-aliased] (if (sequential? column-or-aliased) @@ -95,7 +96,7 @@ [query-map entity-type entity-columns] (let [col-name->column (u/key-by ->column entity-columns) cols-or-nils (make-canonical-columns entity-type col-name->column)] - (apply h/merge-select query-map (concat cols-or-nils )))) + (apply h/merge-select query-map (concat cols-or-nils)))) ;; TODO - not used anywhere except `merge-name-and-archived-search` anymore so we can roll it into that (s/defn ^:private merge-name-search diff --git a/src/metabase/driver/bigquery.clj b/src/metabase/driver/bigquery.clj index e60c8ac159b736439eaa9a5326cd2aa3dd52d4fb..b3292c3f978c740189260166e1d7f91e7fe0c699 100644 --- a/src/metabase/driver/bigquery.clj +++ b/src/metabase/driver/bigquery.clj @@ -1,6 +1,5 @@ (ns metabase.driver.bigquery - (:require [cheshire.core :as json] - [clj-time + (:require [clj-time [coerce :as tcoerce] [core :as time] [format :as tformat]] @@ -33,7 +32,8 @@ (:import com.google.api.client.googleapis.auth.oauth2.GoogleCredential com.google.api.client.http.HttpRequestInitializer [com.google.api.services.bigquery Bigquery Bigquery$Builder BigqueryScopes] - [com.google.api.services.bigquery.model QueryRequest QueryResponse Table TableCell TableFieldSchema TableList TableList$Tables TableReference TableRow TableSchema] + [com.google.api.services.bigquery.model QueryRequest QueryResponse Table TableCell TableFieldSchema TableList + TableList$Tables TableReference TableRow TableSchema] honeysql.format.ToSql java.sql.Time [java.util Collections Date] @@ -445,16 +445,11 @@ (defn- field->identifier "Generate appropriate identifier for a Field for SQL parameters. (NOTE: THIS IS ONLY USED FOR SQL PARAMETERS!)" - ;; TODO - Making a DB call for each field to fetch its dataset is inefficient and makes me cry, but this method is + ;; TODO - Making 2 DB calls for each field to fetch its dataset is inefficient and makes me cry, but this method is ;; currently only used for SQL params so it's not a huge deal at this point [{table-id :table_id, :as field}] - ;; manually write the query here to save us from having to do 2 seperate queries... - (let [[{:keys [details table-name]}] (db/query {:select [[:database.details :details] [:table.name :table-name]] - :from [[:metabase_table :table]] - :left-join [[:metabase_database :database] - [:= :database.id :table.db_id]] - :where [:= :table.id (u/get-id table-id)]}) - details (json/parse-string (u/jdbc-clob->str details) keyword)] + (let [{table-name :name, database-id :db_id} (db/select-one ['Table :name :db_id], :id (u/get-id table-id)) + details (db/select-one-field :details 'Database, :id (u/get-id database-id))] (map->BigQueryIdentifier {:dataset-name (:dataset-id details), :table-name table-name, :field-name (:name field)}))) (defn- field->breakout-identifier [driver field] diff --git a/test/metabase/driver/bigquery_test.clj b/test/metabase/driver/bigquery_test.clj index a36a2a060a53e6e45a93d6e210882bbf63aa9aa7..63fb5c139b9cb110e27a6b0cc160743e74529aa4 100644 --- a/test/metabase/driver/bigquery_test.clj +++ b/test/metabase/driver/bigquery_test.clj @@ -177,9 +177,9 @@ (defn- native-timestamp-query [db-or-db-id timestamp-str timezone-str] (-> (qp/process-query - {:native {:query (format "select datetime(TIMESTAMP \"%s\", \"%s\")" timestamp-str timezone-str) - :type :native} - :database (u/get-id db-or-db-id)}) + {:database (u/get-id db-or-db-id) + :type :native + :native {:query (format "select datetime(TIMESTAMP \"%s\", \"%s\")" timestamp-str timezone-str)}}) :data :rows ffirst))