From 25b1bcd9726d45b7b079cceea1539d2b6e3ddf48 Mon Sep 17 00:00:00 2001
From: Cam Saul <cammsaul@gmail.com>
Date: Wed, 19 Jun 2019 13:44:34 -0700
Subject: [PATCH] Fix regression against syncing SparkSQL string type caused by
 #10207 (#10222)

---
 .../src/metabase/driver/hive_like.clj         | 39 ++++++++++---------
 .../test/metabase/driver/hive_like_test.clj   | 10 +++++
 .../test/metabase/driver/sparksql_test.clj    |  2 +-
 3 files changed, 31 insertions(+), 20 deletions(-)
 create mode 100644 modules/drivers/sparksql/test/metabase/driver/hive_like_test.clj

diff --git a/modules/drivers/sparksql/src/metabase/driver/hive_like.clj b/modules/drivers/sparksql/src/metabase/driver/hive_like.clj
index f9302c77290..198457de930 100644
--- a/modules/drivers/sparksql/src/metabase/driver/hive_like.clj
+++ b/modules/drivers/sparksql/src/metabase/driver/hive_like.clj
@@ -23,25 +23,26 @@
 
 (defmethod sql-jdbc.sync/database-type->base-type :hive-like [_ database-type]
   (condp re-matches (name database-type)
-    #"boolean"               :type/Boolean
-    #"tinyint"               :type/Integer
-    #"smallint"              :type/Integer
-    #"int"                   :type/Integer
-    #"bigint"                :type/BigInteger
-    #"float"                 :type/Float
-    #"double"                :type/Float
-    #"double precision"      :type/Double
-    #"decimal.*"             :type/Decimal
-    #"char.*"                :type/Text
-    #"varchar.*"             :type/Text
-    #"binary*"               :type/*
-    #"date"                  :type/Date
-    #"time"                  :type/Time
-    #"timestamp"             :type/DateTime
-    #"interval"              :type/*
-    #"array.*"               :type/Array
-    #"map"                   :type/Dictionary
-    #".*"                    :type/*))
+    #"boolean"          :type/Boolean
+    #"tinyint"          :type/Integer
+    #"smallint"         :type/Integer
+    #"int"              :type/Integer
+    #"bigint"           :type/BigInteger
+    #"float"            :type/Float
+    #"double"           :type/Float
+    #"double precision" :type/Double
+    #"decimal.*"        :type/Decimal
+    #"char.*"           :type/Text
+    #"varchar.*"        :type/Text
+    #"string.*"         :type/Text
+    #"binary*"          :type/*
+    #"date"             :type/Date
+    #"time"             :type/Time
+    #"timestamp"        :type/DateTime
+    #"interval"         :type/*
+    #"array.*"          :type/Array
+    #"map"              :type/Dictionary
+    #".*"               :type/*))
 
 (defmethod sql.qp/current-datetime-fn :hive-like [_] :%now)
 
diff --git a/modules/drivers/sparksql/test/metabase/driver/hive_like_test.clj b/modules/drivers/sparksql/test/metabase/driver/hive_like_test.clj
new file mode 100644
index 00000000000..fe62c8bab9c
--- /dev/null
+++ b/modules/drivers/sparksql/test/metabase/driver/hive_like_test.clj
@@ -0,0 +1,10 @@
+(ns metabase.driver.hive-like-test
+  (:require [expectations :refer [expect]]
+            [metabase.driver.sql-jdbc.sync :as sql-jdbc.sync]))
+
+;; make sure the various types we use for running tests are actually mapped to the correct DB type
+(expect :type/Text     (sql-jdbc.sync/database-type->base-type :hive-like :string))
+(expect :type/Integer  (sql-jdbc.sync/database-type->base-type :hive-like :int))
+(expect :type/Date     (sql-jdbc.sync/database-type->base-type :hive-like :date))
+(expect :type/DateTime (sql-jdbc.sync/database-type->base-type :hive-like :timestamp))
+(expect :type/Float    (sql-jdbc.sync/database-type->base-type :hive-like :double))
diff --git a/modules/drivers/sparksql/test/metabase/driver/sparksql_test.clj b/modules/drivers/sparksql/test/metabase/driver/sparksql_test.clj
index 39294845bd5..1d19451793d 100644
--- a/modules/drivers/sparksql/test/metabase/driver/sparksql_test.clj
+++ b/modules/drivers/sparksql/test/metabase/driver/sparksql_test.clj
@@ -1,5 +1,5 @@
 (ns metabase.driver.sparksql-test
-  (:require [expectations :refer :all]
+  (:require [expectations :refer [expect]]
             [metabase.driver.sql.query-processor :as sql.qp]))
 
 ;; Make sure our custom implementation of `apply-page` works the way we'd expect
-- 
GitLab