Skip to content
Snippets Groups Projects
Unverified Commit b90b0c69 authored by Howon Lee's avatar Howon Lee Committed by GitHub
Browse files

Limit of length of row for json parsing (fixes #22636 for most cases) (#22769)

Limit the length of the row for JSON parsing. This was the simplest way to make another attack at #22636.
parent d2680f7c
Branches jest-role-hidden
No related tags found
No related merge requests found
......@@ -190,21 +190,14 @@
(binding [*enum-types* (enum-types driver database)]
(sql-jdbc.sync/describe-table driver database table)))
(def ^:const max-nested-field-columns
"Maximum number of nested field columns."
100)
;; Describe the nested fields present in a table (currently and maybe forever just JSON),
;; including if they have proper keyword and type stability.
;; Not to be confused with existing nested field functionality for mongo,
;; since this one only applies to JSON fields, whereas mongo only has BSON (JSON basically) fields.
(defmethod sql-jdbc.sync/describe-nested-field-columns :postgres
[driver database table]
(let [spec (sql-jdbc.conn/db->pooled-connection-spec database)
fields (sql-jdbc.describe-table/describe-nested-field-columns driver spec table)]
(if (> (count fields) max-nested-field-columns)
#{}
fields)))
(let [spec (sql-jdbc.conn/db->pooled-connection-spec database)]
(sql-jdbc.describe-table/describe-nested-field-columns driver spec table)))
;;; +----------------------------------------------------------------------------------------------------------------+
;;; | metabase.driver.sql impls |
......
......@@ -208,6 +208,11 @@
"Number of rows to sample for describe-nested-field-columns"
500)
(def ^:dynamic *nested-field-column-max-row-length*
"Max string length for a row for nested field column before we just give up on parsing it.
Marked as mutable because we mutate it for tests."
50000)
(defn- flattened-row [field-name row]
(letfn [(flatten-row [row path]
(lazy-seq
......@@ -237,10 +242,16 @@
(into {} (map (fn [[k v]] [k (type-by-parsing-string v)]) flat-row))))))
(defn- describe-json-xform [member]
((comp (map #(for [[k v] %] [k (json/parse-string v)]))
((comp (map #(for [[k v] %
:when (< (count v) *nested-field-column-max-row-length*)]
[k (json/parse-string v)]))
(map #(into {} %))
(map row->types)) member))
(def ^:const max-nested-field-columns
"Maximum number of nested field columns."
100)
(defn- describe-json-rf
"Reducing function that takes a bunch of maps from row->types,
and gets them to conform to the type hierarchy,
......@@ -323,7 +334,6 @@
field-hash (apply hash-set (filter some? valid-fields))]
field-hash))
;; The name's nested field columns but what the people wanted (issue #708)
;; was JSON so what they're getting is JSON.
(defn describe-nested-field-columns
......@@ -344,4 +354,6 @@
query (jdbc/reducible-query spec sql-args)
field-types (transduce describe-json-xform describe-json-rf query)
fields (field-types->fields field-types)]
fields)))))
(if (> (count fields) max-nested-field-columns)
#{}
fields))))))
......@@ -82,6 +82,23 @@
(is (= {} (#'sql-jdbc.describe-table/row->types arr-row)))
(is (= {[:zlob "blob"] java.lang.Long} (#'sql-jdbc.describe-table/row->types obj-row))))))
(deftest dont-parse-long-json-xform-test
(testing "obnoxiously long json should not even get parsed (#22636)"
;; Generating an actually obnoxiously long json took too long,
;; and actually copy-pasting an obnoxiously long string in there looks absolutely terrible,
;; so this rebinding is what you get
(let [obnoxiously-long-json "{\"bob\": \"dobbs\"}"
json-map {:somekey obnoxiously-long-json}]
(with-redefs [sql-jdbc.describe-table/*nested-field-column-max-row-length* 3]
(is (= {}
(transduce
#'sql-jdbc.describe-table/describe-json-xform
#'sql-jdbc.describe-table/describe-json-rf [json-map]))))
(is (= {[:somekey "bob"] java.lang.String}
(transduce
#'sql-jdbc.describe-table/describe-json-xform
#'sql-jdbc.describe-table/describe-json-rf [json-map]))))))
(deftest describe-nested-field-columns-test
(testing "flattened-row"
(let [row {:bob {:dobbs 123 :cobbs "boop"}}
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment