Skip to content
Snippets Groups Projects
Unverified Commit 90151667 authored by Cal Herries's avatar Cal Herries Committed by GitHub
Browse files

Extend "Add check for connection status before sync" to presto-jdbc, oracle, and vertica (#36119)

parent 676fcd8c
No related branches found
No related tags found
No related merge requests found
......@@ -52,9 +52,9 @@
'("https://www.googleapis.com/auth/bigquery"
"https://www.googleapis.com/auth/drive"))
(defn- database->client
^BigQuery [database]
(let [creds (bigquery.common/database-details->service-account-credential (:details database))
(defn- database-details->client
^BigQuery [details]
(let [creds (bigquery.common/database-details->service-account-credential details)
bq-bldr (doto (BigQueryOptions/newBuilder)
(.setCredentials (.createScoped creds bigquery-scopes)))]
(.. bq-bldr build getService)))
......@@ -65,25 +65,23 @@
(defn- list-tables
"Fetch all tables (new pages are loaded automatically by the API)."
(^Iterable [database]
(list-tables database false))
(^Iterable [{{:keys [project-id dataset-filters-type dataset-filters-patterns]} :details, :as database} validate-dataset?]
(list-tables (database->client database)
(or project-id (bigquery.common/database-details->credential-project-id (:details database)))
dataset-filters-type
dataset-filters-patterns
(boolean validate-dataset?)))
(^Iterable [^BigQuery client ^String project-id ^String filter-type ^String filter-patterns ^Boolean validate-dataset?]
(let [datasets (.listDatasets client project-id (u/varargs BigQuery$DatasetListOption))
inclusion-patterns (when (= "inclusion" filter-type) filter-patterns)
exclusion-patterns (when (= "exclusion" filter-type) filter-patterns)
(^Iterable [database-details]
(list-tables database-details {:validate-dataset? false}))
(^Iterable [{:keys [project-id dataset-filters-type dataset-filters-patterns] :as details} {:keys [validate-dataset?]}]
(let [client (database-details->client details)
project-id (or project-id (bigquery.common/database-details->credential-project-id details))
datasets (.listDatasets client project-id (u/varargs BigQuery$DatasetListOption))
inclusion-patterns (when (= "inclusion" dataset-filters-type) dataset-filters-patterns)
exclusion-patterns (when (= "exclusion" dataset-filters-type) dataset-filters-patterns)
dataset-iter (for [^Dataset dataset (.iterateAll datasets)
:let [^DatasetId dataset-id (.. dataset getDatasetId)]
:when (driver.s/include-schema? inclusion-patterns
exclusion-patterns
(.getDataset dataset-id))]
dataset-id)]
(when (and (not= filter-type "all") validate-dataset? (zero? (count dataset-iter)))
(when (and (not= dataset-filters-type "all")
validate-dataset?
(zero? (count dataset-iter)))
(throw (ex-info (tru "Looks like we cannot find any matching datasets.")
{::driver/can-connect-message? true})))
(apply concat (for [^DatasetId dataset-id dataset-iter]
......@@ -94,7 +92,7 @@
(defmethod driver/describe-database :bigquery-cloud-sdk
[_ database]
(let [tables (list-tables database)]
(let [tables (list-tables (:details database))]
{:tables (set (for [^Table table tables
:let [^TableId table-id (.getTableId table)
^String dataset-id (.getDataset table-id)]]
......@@ -103,7 +101,7 @@
(defmethod driver/can-connect? :bigquery-cloud-sdk
[_ details-map]
;; check whether we can connect by seeing whether listing tables succeeds
(try (some? (list-tables {:details details-map} ::validate-dataset))
(try (some? (list-tables details-map {:validate-dataset? true}))
(catch Exception e
(when (::driver/can-connect-message? (ex-data e))
(throw e))
......@@ -115,7 +113,7 @@
(mu/defn ^:private get-table :- (ms/InstanceOfClass Table)
(^Table [{{:keys [project-id]} :details, :as database} dataset-id table-id]
(get-table (database->client database) project-id dataset-id table-id))
(get-table (database-details->client (:details database)) project-id dataset-id table-id))
(^Table [^BigQuery client :- (ms/InstanceOfClass BigQuery)
project-id :- [:maybe ::lib.schema.common/non-blank-string]
......@@ -297,7 +295,7 @@
(defn- execute-bigquery-on-db
^TableResult [database sql parameters cancel-chan cancel-requested?]
(execute-bigquery
(database->client database)
(database-details->client (:details database))
sql
parameters
cancel-chan
......
......@@ -69,7 +69,7 @@
(defn- bigquery
"Get an instance of a `Bigquery` client."
^BigQuery []
(#'bigquery/database->client {:details (test-db-details)}))
(#'bigquery/database-details->client (test-db-details)))
(defn project-id
"BigQuery project ID that we're using for tests, either from the env var `MB_BIGQUERY_TEST_PROJECT_ID`, or if that is
......@@ -82,7 +82,10 @@
(defmethod tx/dbdef->connection-details :bigquery-cloud-sdk
[_driver _context {:keys [database-name]}]
(assoc (test-db-details) :dataset-id (test-dataset-id database-name) :include-user-id-and-hash true))
(assoc (test-db-details)
:dataset-filters-type "inclusion"
:dataset-filters-patterns (test-dataset-id database-name)
:include-user-id-and-hash true))
;;; -------------------------------------------------- Loading Data --------------------------------------------------
......
......@@ -612,6 +612,14 @@
[_driver _database _table]
nil)
(defmethod driver/can-connect? :presto-jdbc
[driver {:keys [catalog], :as details}]
(and ((get-method driver/can-connect? :sql-jdbc) driver details)
(sql-jdbc.conn/with-connection-spec-for-testing-connection [spec [driver details]]
;; jdbc/query is used to see if we throw, we want to ignore the results
(jdbc/query spec (format "SHOW SCHEMAS FROM %s" catalog))
true)))
;;; +----------------------------------------------------------------------------------------------------------------+
;;; | sql-jdbc implementations |
;;; +----------------------------------------------------------------------------------------------------------------+
......
......@@ -3,6 +3,7 @@
[cheshire.core :as json]
[clojure.test :refer :all]
[metabase.driver :as driver]
[metabase.driver.h2 :as h2]
[metabase.driver.impl :as driver.impl]
[metabase.plugins.classloader :as classloader]
[metabase.task.sync-databases :as task.sync-databases]
......@@ -79,12 +80,47 @@
:field-definitions [{:field-name "foo", :base-type :type/Text}]
:rows [["bar"]]}]}))
(deftest can-connect-with-destroy-db-test
(testing "driver/can-connect? should fail or throw after destroying a database"
(mt/test-drivers (->> (mt/normal-drivers)
;; athena is a special case because connections aren't made with a single database,
;; but to an S3 bucket that may contain many databases
(remove #{:athena}))
(let [database-name (mt/random-name)
dbdef (basic-db-definition database-name)]
(mt/dataset dbdef
(let [db (mt/db)
details (tx/dbdef->connection-details driver/*driver* :db dbdef)]
(testing "can-connect? should return true before deleting the database"
(is (true? (binding [h2/*allow-testing-h2-connections* true]
(driver/can-connect? driver/*driver* details)))))
;; release db resources like connection pools so we don't have to wait to finish syncing before destroying the db
(driver/notify-database-updated driver/*driver* db)
(testing "after deleting a database, can-connect? should return false or throw an exception"
(let [;; in the case of some cloud databases, the test database is never created, and can't or shouldn't be destroyed.
;; so fake it by changing the database details
details (case driver/*driver*
(:redshift :snowfake :vertica) (assoc details :db (mt/random-name))
:oracle (assoc details :service-name (mt/random-name))
:presto-jdbc (assoc details :catalog (mt/random-name))
;; otherwise destroy the db and use the original details
(do
(tx/destroy-db! driver/*driver* dbdef)
details))]
(is (false? (try
(binding [h2/*allow-testing-h2-connections* true]
(driver/can-connect? driver/*driver* details))
(catch Exception _
false))))))
;; clean up the database
(t2/delete! :model/Database (u/the-id db))))))))
(deftest check-can-connect-before-sync-test
(testing "Database sync should short-circuit and fail if the database at the connection has been deleted (metabase#7526)"
(mt/test-drivers (->> (mt/normal-drivers)
;; athena is a special case because connections aren't made with a single database,
;; but to an S3 bucket that may contain many databases
(remove #{:athena :oracle :vertica :presto-jdbc}))
(remove #{:athena}))
(let [database-name (mt/random-name)
dbdef (basic-db-definition database-name)]
(mt/dataset dbdef
......@@ -95,7 +131,7 @@
(fn [[log-level throwable message]]
(and (= log-level :warn)
(instance? clojure.lang.ExceptionInfo throwable)
(re-matches #"^Cannot sync Database (.+): (.+)" message)))
(re-matches #"^Cannot sync Database ([\s\S]+): ([\s\S]+)" message)))
(mt/with-log-messages-for-level :warn
(#'task.sync-databases/sync-and-analyze-database*! (u/the-id db))))))]
(testing "sense checks before deleting the database"
......@@ -107,11 +143,16 @@
;; release db resources like connection pools so we don't have to wait to finish syncing before destroying the db
(driver/notify-database-updated driver/*driver* db)
;; destroy the db
(if (contains? #{:redshift :snowflake} driver/*driver*)
;; in the case of some cloud databases, the test database is never created, and shouldn't be destroyed.
;; so fake it by redefining the database name on the dbdef
(t2/update! :model/Database (u/the-id db)
{:details (assoc (:details (mt/db)) :db "fake-db-name-that-definitely-wont-be-used")})
(if (contains? #{:redshift :snowflake :vertica :presto-jdbc :oracle} driver/*driver*)
;; in the case of some cloud databases, the test database is never created, and can't or shouldn't be destroyed.
;; so fake it by changing the database details
(let [details (:details (mt/db))
new-details (case driver/*driver*
(:redshift :snowflake :vertica) (assoc details :db (mt/random-name))
:oracle (assoc details :service-name (mt/random-name))
:presto-jdbc (assoc details :catalog (mt/random-name)))]
(t2/update! :model/Database (u/the-id db) {:details new-details}))
;; otherwise destroy the db and use the original details
(tx/destroy-db! driver/*driver* dbdef))
(testing "after deleting a database, sync should fail"
(testing "1: sync-and-analyze-database! should log a warning and fail early"
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment