diff --git a/bin/build/src/release/elastic_beanstalk.clj b/bin/build/src/release/elastic_beanstalk.clj
index 529151b3697736cca18caccce6938624f4874858..7618ac8f0550189930afc02d08d1787bd1ccd75c 100644
--- a/bin/build/src/release/elastic_beanstalk.clj
+++ b/bin/build/src/release/elastic_beanstalk.clj
@@ -6,7 +6,7 @@
    [metabuild-common.core :as u]
    [release.common :as c]
    [release.common.http :as common.http]
-   [release.common.upload :as upload]
+   [release.common.upload :as common.upload]
    [stencil.core :as stencil]
    [stencil.loader]))
 
@@ -102,9 +102,9 @@
 (defn- upload-artifacts! []
   (u/step "Upload Elastic Beanstalk artifacts"
     (u/step "Upload metabase-aws-eb.zip"
-      (upload/upload-artifact! archive-path "metabase-aws-eb.zip"))
+      (common.upload/upload-artifact! archive-path "metabase-aws-eb.zip"))
     (u/step "Upload launch-aws-eb.html"
-      (upload/upload-artifact! html-file-path "launch-aws-eb.html"))))
+      (common.upload/upload-artifact! html-file-path "launch-aws-eb.html"))))
 
 ;; TODO -- we should merge the EB build logic into this script, it's still an ancient bash script
 (defn publish-elastic-beanstalk-artifacts! []
diff --git a/bin/build/src/release/uberjar.clj b/bin/build/src/release/uberjar.clj
index e35f76720b40e9110119fc1328ee3ab7978b83cb..c4a3898848dd6567dfb27af066d641c89195f9c5 100644
--- a/bin/build/src/release/uberjar.clj
+++ b/bin/build/src/release/uberjar.clj
@@ -6,7 +6,7 @@
    [release.common :as c]
    [release.common.hash :as hash]
    [release.common.http :as common.http]
-   [release.common.upload :as upload]))
+   [release.common.upload :as common.upload]))
 
 (defn build-uberjar! []
   (u/step "Build uberjar"
@@ -36,7 +36,7 @@
 (defn upload-uberjar! []
   (u/step "Upload uberjar and validate"
     (u/step (format "Upload uberjar to %s" (c/artifact-download-url "metabase.jar"))
-      (upload/upload-artifact! c/uberjar-path "metabase.jar"))
+      (common.upload/upload-artifact! c/uberjar-path "metabase.jar"))
     ;; TODO -- would be a lot faster to copy to copy s3 -> s3 instead of uploading twice
     (let [latest-download-url (c/artifact-download-url "latest" "metabase.jar")]
       (cond
@@ -48,5 +48,5 @@
 
         :else
         (u/step (format "Upload uberjar to %s" latest-download-url)
-          (upload/upload-artifact! c/uberjar-path "latest" "metabase.jar"))))
+          (common.upload/upload-artifact! c/uberjar-path "latest" "metabase.jar"))))
     (validate-uberjar)))
diff --git a/e2e/support/assets/dog_breeds.csv b/e2e/support/assets/dog_breeds.csv
new file mode 100644
index 0000000000000000000000000000000000000000..f490710dbc04f8ca08363b7acb65888052a18459
--- /dev/null
+++ b/e2e/support/assets/dog_breeds.csv
@@ -0,0 +1,98 @@
+Name,min_life_expectancy,max_life_expectancy,max_height_male,max_height_female,max_weight_male,max_weight_female,min_height_male,min_height_female,min_weight_male,min_weight_female,good_with_children,good_with_other_dogs,shedding,grooming,drooling,coat_length,good_with_strangers,playfulness,protectiveness,trainability,energy,barking
+Golden Retriever,10,12,24,24,75,65,23,23,65,55,5,5,4,2,2,1,5,4,3,5,3,1
+Dachshund,12,16,9,9,32,32,8,8,16,16,3,4,2,2,2,2,4,4,4,4,3,5
+Labrador Retriever,10,12,24.5,24.5,80,70,22.5,22.5,65,55,5,5,4,2,2,1,5,5,3,5,5,3
+Great Dane,7,10,32,32,175,140,30,30,140,110,3,3,3,1,4,1,3,4,5,3,4,3
+Boxer,10,12,25,25,80,65,23,23,65,50,5,3,2,2,3,1,4,4,4,4,4,3
+Dalmatian,11,13,24,24,70,70,19,19,45,45,3,3,4,2,2,1,4,4,4,4,4,3
+Poodle (Miniature),10,18,15,15,15,15,10,10,10,10,5,3,1,4,1,1,5,5,3,5,4,4
+Bullmastiff,7,9,27,27,130,120,25,25,110,100,3,3,3,1,3,1,3,3,5,4,4,1
+Beagle,10,15,16,15,20,30,14,13,15,20,5,5,3,2,1,1,3,4,2,3,4,4
+Border Collie,12,15,22,22,55,55,19,19,30,30,3,3,3,3,1,1,4,5,3,5,5,4
+Siberian Husky,12,14,23.5,23.5,60,50,21,21,45,35,5,5,4,2,1,1,5,5,1,3,5,5
+Shih Tzu,10,18,10.5,10.5,16,16,9,9,9,9,5,5,1,4,1,1,3,3,3,4,3,3
+Pomeranian,12,16,7,7,7,7,6,6,3,3,3,3,2,3,1,1,3,3,4,3,3,4
+Cardigan Welsh Corgi,12,15,12.5,12.5,38,34,10.5,10.5,30,25,4,3,3,2,1,1,4,4,3,4,4,5
+Greyhound,10,13,30,30,70,65,28,28,65,60,3,4,2,1,1,1,3,3,3,3,4,3
+Chow Chow,8,12,20,20,70,70,17,17,45,45,3,2,3,3,3,1,2,3,5,3,3,1
+Chinese Shar-Pei,8,12,20,20,60,60,18,18,45,45,3,3,3,1,3,1,3,3,4,3,3,3
+Cocker Spaniel,10,14,15.5,15.5,30,25,14.5,14.5,25,20,5,5,3,4,2,1,4,3,3,4,4,3
+Great Pyrenees,10,12,32,32,120,90,27,27,110,80,3,3,3,2,3,1,3,3,5,3,3,3
+West Highland White Terrier,13,15,11,11,20,20,11,11,15,15,5,3,3,3,1,1,4,5,5,3,4,5
+Bernese Mountain Dog,7,10,27.5,27.5,115,95,25,25,80,70,5,5,5,3,3,1,4,4,3,4,4,3
+Bull Terrier,12,13,22,22,70,70,21,21,50,50,3,1,3,2,1,1,4,4,3,3,4,3
+Newfoundland,9,10,28,28,150,120,28,28,130,100,5,5,3,2,5,1,5,3,5,3,3,1
+Basset Hound,12,13,15,14,65,65,12,11,40,40,5,5,2,3,4,1,3,3,3,3,2,4
+German Longhaired Pointer,12,14,28,28,80,80,22,22,55,55,5,4,3,1,2,1,4,4,3,5,4,3
+Pug,13,15,13,13,18,18,10,10,14,14,5,4,4,2,1,1,5,5,3,4,3,1
+Boston Terrier,11,13,17,17,25,25,15,15,12,12,5,4,2,2,1,1,5,5,3,4,4,2
+Maltese,12,15,9,9,8.8,8.8,7,7,6.6,6.6,3,3,1,4,1,1,3,3,4,3,3,3
+Bichon Frise,14,15,11.5,11.5,18,18,9.5,9.5,12,12,5,5,1,5,1,1,5,4,2,4,4,3
+American Bulldog,10,12,28,24,100,80,20,20,75,60,3,3,2,0,0,1,0,0,0,4,0,1
+Chihuahua,14,16,8,8,6,6,5,5,4,4,1,3,2,1,1,2,2,4,4,3,4,5
+Rottweiler,9,10,27,27,135,100,24,24,95,80,3,3,3,1,3,1,3,4,5,5,3,1
+Vizsla,12,14,24,24,60,55,22,22,55,44,5,4,3,2,2,1,4,5,3,5,5,3
+Akita,10,14,28,28,130,100,26,26,100,70,3,1,3,3,1,1,2,3,5,3,4,2
+Shetland Sheepdog,12,14,16,16,25,25,13,13,15,15,5,5,3,3,1,1,2,5,5,5,4,5
+Bloodhound,10,12,27,27,110,100,25,25,90,80,3,3,3,2,5,1,3,3,2,4,3,5
+Irish Setter,12,15,27,27,70,60,27,27,70,60,5,5,3,3,2,1,5,5,3,4,5,3
+Komondor,10,12,30,27,130,110,28,25,110,88,3,2,1,4,2,1,3,3,5,4,3,3
+Otterhound,10,13,27,27,115,80,27,27,115,80,3,3,2,2,3,1,4,3,3,4,3,5
+Beauceron,10,12,27.5,27.5,110,110,25.5,25.5,70,70,3,3,4,3,1,1,2,3,4,3,5,3
+Treeing Walker Coonhound,12,13,27,27,70,70,22,22,50,50,5,5,3,1,3,1,3,4,3,5,5,4
+Giant Schnauzer,12,15,27.5,27.5,85,75,25.5,25.5,60,55,3,3,3,4,2,1,3,4,5,5,5,3
+Shiba Inu,13,16,16.5,16.5,23,17,14.5,14.5,23,17,3,3,3,2,1,1,3,3,5,2,3,3
+Norfolk Terrier,12,16,10,10,12,12,9,9,11,11,5,3,3,2,1,1,5,4,4,3,4,4
+Afghan Hound,12,18,27,27,60,60,25,25,50,50,3,3,1,4,1,1,3,3,3,1,4,3
+Brittany,12,14,20.5,20.5,40,40,17.5,17.5,30,30,4,4,3,3,1,1,3,4,3,5,5,3
+Chinook,12,15,26,26,90,65,24,24,55,50,5,5,3,3,1,1,3,3,4,4,3,5
+Doberman Pinscher,10,12,28,28,100,90,26,26,75,60,5,3,4,1,2,1,4,4,5,5,5,3
+Pekingese,12,14,9,9,14,14,6,6,7,7,3,3,3,3,1,1,3,4,4,3,3,1
+Whippet,12,15,22,22,40,40,19,19,25,25,5,5,2,1,1,1,3,4,3,3,4,1
+Basenji,13,14,17,17,24,22,17,17,24,22,3,3,2,1,1,1,3,3,3,2,4,1
+Bolognese,12,14,12,12,9,9,10,10,5.5,5.5,3,3,1,3,1,1,3,4,2,3,4,3
+Smooth Fox Terrier,12,15,15.5,15.5,18,17,15.5,15.5,18,15,3,3,3,2,1,1,3,4,5,3,4,5
+Field Spaniel,12,13,18,18,50,50,18,18,35,35,5,4,3,2,2,1,4,3,3,5,3,3
+Havanese,14,16,11.5,11.5,13,13,8.5,8.5,7,7,5,5,2,3,1,1,5,5,3,4,3,4
+Japanese Chin,10,12,11,11,11,11,8,8,7,7,3,5,3,2,1,1,3,3,3,3,3,2
+Keeshond,12,15,18,18,45,45,18,18,35,35,5,5,3,3,2,1,5,5,5,5,4,4
+American Eskimo Dog,13,15,26,26,65,65,24,24,45,45,5,3,3,3,1,1,5,3,3,4,4,3
+American Hairless Terrier,14,16,16,16,16,16,12,12,12,12,5,3,1,1,1,1,3,3,3,5,3,3
+American Leopard Hound,12,15,27,27,70,70,21,21,45,45,5,3,3,1,1,1,3,3,3,3,4,3
+American Water Spaniel,10,14,18,18,45,40,15,15,30,25,3,3,1,3,1,1,3,3,3,5,3,3
+Anatolian Shepherd Dog,11,13,29,29,150,120,29,29,110,80,3,3,3,2,1,1,1,3,5,2,3,3
+Barbet,12,14,24.5,24.5,65,65,19,19,35,35,5,5,1,3,1,2,3,3,3,4,3,3
+Bergamasco Sheepdog,13,15,23.5,23.5,84,71,23.5,23.5,70,57,3,3,1,1,2,1,3,3,4,3,3,1
+Berger Picard,12,13,25.5,25.5,70,70,23.5,23.5,50,50,3,3,3,1,1,1,3,3,4,4,4,2
+Boerboel,9,11,27,27,200,200,24,24,150,150,4,2,3,2,3,1,3,3,5,4,3,3
+Borzoi,9,14,33,31,105,85,30,27,75,60,3,3,3,2,1,1,3,3,3,2,4,2
+Bracco Italiano,10,14,27,27,90,90,21,21,55,55,4,4,2,1,2,1,3,3,3,5,4,2
+Cane Corso,9,12,27.5,27.5,110,99,25,25,99,88,3,3,2,1,3,1,3,3,5,4,4,3
+Coton de Tulear,15,19,11,11,15,13,10,10,9,8,5,5,2,4,1,1,5,4,3,4,3,1
+Papillon,14,16,11,11,10,10,8,8,5,5,5,3,3,2,1,1,5,5,4,5,4,5
+Plott Hound,12,14,25,25,60,55,20,20,50,40,0,0,0,0,0,0,0,0,0,0,0,0
+Pumi,12,13,18.5,18.5,29,24,16,16,27,22,3,3,1,2,2,1,3,4,4,5,5,3
+Xoloitzcuintli,13,18,25,25,60,60,19,19,28,28,3,3,1,1,1,1,3,4,3,4,4,3
+Tibetan Mastiff,10,12,30,28,150,120,26,24,90,70,3,3,4,3,3,1,1,3,5,3,3,3
+Samoyed,12,14,23.5,23.5,65,50,21,21,45,35,5,3,3,3,1,1,5,5,4,4,4,5
+Schipperke,12,14,13,13,16,16,11,11,10,10,3,3,3,2,1,1,3,4,5,4,3,4
+Rhodesian Ridgeback,10,12,27,27,85,70,25,25,85,70,5,3,3,2,2,1,3,3,5,4,3,2
+Russian Toy,12,14,11,11,6.6,6.6,8,8,3.3,3.3,3,3,3,2,1,1,3,4,4,4,3,4
+Nova Scotia Duck Tolling Retriever,12,14,21,21,50,50,18,18,35,35,5,4,3,2,2,1,3,5,3,5,5,2
+Mudi,12,14,18.5,18.5,29,29,15,15,18,18,3,3,2,1,2,1,3,5,4,5,5,4
+Alaskan Klee Kai,13,16,28,28,130,100,26,26,100,70,0,0,3,2,0,1,0,0,0,3,4,0
+Jindo,14,14,22,22,50,50,18,18,30,30,3,3,3,2,1,1,3,3,5,4,4,3
+Irish Terrier,13,15,18,18,27,25,18,18,27,25,5,1,2,1,1,1,3,3,5,3,3,3
+Italian Greyhound,14,15,15,15,14,14,13,13,7,7,3,5,3,1,1,1,5,4,3,4,3,3
+Hovawart,10,14,28,28,90,90,23,23,65,65,4,4,3,2,2,1,4,4,5,4,3,1
+Cavalier King Charles Spaniel,12,15,13,13,18,18,12,12,13,13,5,5,2,2,2,1,4,3,3,4,3,3
+Alaskan Malamute,10,14,25,25,85,75,25,25,85,75,3,3,3,3,1,1,3,3,4,5,4,3
+Australian Cattle Dog,12,16,20,20,50,50,18,18,35,35,3,3,3,1,1,1,3,3,4,4,5,1
+Miniature Pinscher,12,16,12.5,12.5,10,10,10,10,8,8,3,4,3,1,1,1,3,4,5,3,5,5
+Belgian Malinois,14,16,26,26,80,60,24,24,60,40,3,3,3,2,1,1,3,3,4,5,4,3
+Staffordshire Bull Terrier,12,14,16,16,38,34,14,14,28,24,5,3,2,2,3,1,4,4,5,5,4,3
+American Staffordshire Terrier,12,16,19,19,70,55,18,18,55,40,3,3,2,1,1,1,4,3,5,3,3,3
+Australian Shepherd,12,15,23,23,65,55,20,20,50,40,5,3,3,2,1,1,3,4,3,5,5,3
+Pembroke Welsh Corgi,12,13,12,12,31,28,10,10,24,24,3,4,4,2,1,1,4,4,5,4,4,4
+Yorkshire Terrier,11,15,8,8,7,7,7,7,7,7,5,3,1,5,1,1,5,4,5,4,4,4
+French Bulldog,10,12,13,13,28,26,11,11,20,18,5,4,3,1,3,1,5,5,3,4,3,1
diff --git a/e2e/support/assets/star_wars_characters.csv b/e2e/support/assets/star_wars_characters.csv
new file mode 100644
index 0000000000000000000000000000000000000000..d3c948d2a6495fd40f694be8201debccd7c4b8bf
--- /dev/null
+++ b/e2e/support/assets/star_wars_characters.csv
@@ -0,0 +1,88 @@
+name,height,mass,hair_color,skin_color,eye_color,birth_year,gender,homeworld,species
+Luke Skywalker,172,77,blond,fair,blue,19BBY,male,Tatooine,Human
+C-3PO,167,75,n/a,gold,yellow,112BBY,n/a,Tatooine,Droid
+R2-D2,96,32,n/a,"white, blue",red,33BBY,n/a,Naboo,Droid
+Darth Vader,202,136,none,white,yellow,41.9BBY,male,Tatooine,Human
+Leia Organa,150,49,brown,light,brown,19BBY,female,Alderaan,Human
+Owen Lars,178,120,"brown, grey",light,blue,52BBY,male,Tatooine,Human
+Beru Whitesun lars,165,75,brown,light,blue,47BBY,female,Tatooine,Human
+R5-D4,97,32,n/a,"white, red",red,unknown,n/a,Tatooine,Droid
+Biggs Darklighter,183,84,black,light,brown,24BBY,male,Tatooine,Human
+Obi-Wan Kenobi,182,77,"auburn, white",fair,blue-gray,57BBY,male,Stewjon,Human
+Anakin Skywalker,188,84,blond,fair,blue,41.9BBY,male,Tatooine,Human
+Wilhuff Tarkin,180,unknown,"auburn, grey",fair,blue,64BBY,male,Eriadu,Human
+Chewbacca,228,112,brown,unknown,blue,200BBY,male,Kashyyyk,Wookiee
+Han Solo,180,80,brown,fair,brown,29BBY,male,Corellia,Human
+Greedo,173,74,n/a,green,black,44BBY,male,Rodia,Rodian
+Jabba Desilijic Tiure,175,"1,358",n/a,"green-tan, brown",orange,600BBY,hermaphrodite,Nal Hutta,Hutt
+Wedge Antilles,170,77,brown,fair,hazel,21BBY,male,Corellia,Human
+Jek Tono Porkins,180,110,brown,fair,blue,unknown,male,Bestine IV,Human
+Yoda,66,17,white,green,brown,896BBY,male,unknown,Yoda's species
+Palpatine,170,75,grey,pale,yellow,82BBY,male,Naboo,Human
+Boba Fett,183,78.2,black,fair,brown,31.5BBY,male,Kamino,Human
+IG-88,200,140,none,metal,red,15BBY,none,unknown,Droid
+Bossk,190,113,none,green,red,53BBY,male,Trandosha,Trandoshan
+Lando Calrissian,177,79,black,dark,brown,31BBY,male,Socorro,Human
+Lobot,175,79,none,light,blue,37BBY,male,Bespin,Human
+Ackbar,180,83,none,brown mottle,orange,41BBY,male,Mon Cala,Mon Calamari
+Mon Mothma,150,unknown,auburn,fair,blue,48BBY,female,Chandrila,Human
+Arvel Crynyd,unknown,unknown,brown,fair,brown,unknown,male,unknown,Human
+Wicket Systri Warrick,88,20,brown,brown,brown,8BBY,male,Endor,Ewok
+Nien Nunb,160,68,none,grey,black,unknown,male,Sullust,Sullustan
+Qui-Gon Jinn,193,89,brown,fair,blue,92BBY,male,unknown,Human
+Nute Gunray,191,90,none,mottled green,red,unknown,male,Cato Neimoidia,Neimodian
+Finis Valorum,170,unknown,blond,fair,blue,91BBY,male,Coruscant,Human
+Jar Jar Binks,196,66,none,orange,orange,52BBY,male,Naboo,Gungan
+Roos Tarpals,224,82,none,grey,orange,unknown,male,Naboo,Gungan
+Rugor Nass,206,unknown,none,green,orange,unknown,male,Naboo,Gungan
+Ric Olié,183,unknown,brown,fair,blue,unknown,male,Naboo,NA
+Watto,137,unknown,black,"blue, grey",yellow,unknown,male,Toydaria,Toydarian
+Sebulba,112,40,none,"grey, red",orange,unknown,male,Malastare,Dug
+Quarsh Panaka,183,unknown,black,dark,brown,62BBY,male,Naboo,NA
+Shmi Skywalker,163,unknown,black,fair,brown,72BBY,female,Tatooine,Human
+Darth Maul,175,80,none,red,yellow,54BBY,male,Dathomir,Zabrak
+Bib Fortuna,180,unknown,none,pale,pink,unknown,male,Ryloth,Twi'lek
+Ayla Secura,178,55,none,blue,hazel,48BBY,female,Ryloth,Twi'lek
+Dud Bolt,94,45,none,"blue, grey",yellow,unknown,male,Vulpter,Vulptereen
+Gasgano,122,unknown,none,"white, blue",black,unknown,male,Troiken,Xexto
+Ben Quadinaros,163,65,none,"grey, green, yellow",orange,unknown,male,Tund,Toong
+Mace Windu,188,84,none,dark,brown,72BBY,male,Haruun Kal,Human
+Ki-Adi-Mundi,198,82,white,pale,yellow,92BBY,male,Cerea,Cerean
+Kit Fisto,196,87,none,green,black,unknown,male,Glee Anselm,Nautolan
+Eeth Koth,171,unknown,black,brown,brown,unknown,male,Iridonia,Zabrak
+Adi Gallia,184,50,none,dark,blue,unknown,female,Coruscant,Tholothian
+Saesee Tiin,188,unknown,none,pale,orange,unknown,male,Iktotch,Iktotchi
+Yarael Poof,264,unknown,none,white,yellow,unknown,male,Quermia,Quermian
+Plo Koon,188,80,none,orange,black,22BBY,male,Dorin,Kel Dor
+Mas Amedda,196,unknown,none,blue,blue,unknown,male,Champala,Chagrian
+Gregar Typho,185,85,black,dark,brown,unknown,male,Naboo,Human
+Cordé,157,unknown,brown,light,brown,unknown,female,Naboo,Human
+Cliegg Lars,183,unknown,brown,fair,blue,82BBY,male,Tatooine,Human
+Poggle the Lesser,183,80,none,green,yellow,unknown,male,Geonosis,Geonosian
+Luminara Unduli,170,56.2,black,yellow,blue,58BBY,female,Mirial,Mirialan
+Barriss Offee,166,50,black,yellow,blue,40BBY,female,Mirial,Mirialan
+Dormé,165,unknown,brown,light,brown,unknown,female,Naboo,Human
+Dooku,193,80,white,fair,brown,102BBY,male,Serenno,Human
+Bail Prestor Organa,191,unknown,black,tan,brown,67BBY,male,Alderaan,Human
+Jango Fett,183,79,black,tan,brown,66BBY,male,Concord Dawn,Human
+Zam Wesell,168,55,blonde,"fair, green, yellow",yellow,unknown,female,Zolan,Clawdite
+Dexter Jettster,198,102,none,brown,yellow,unknown,male,Ojom,Besalisk
+Lama Su,229,88,none,grey,black,unknown,male,Kamino,Kaminoan
+Taun We,213,unknown,none,grey,black,unknown,female,Kamino,Kaminoan
+Jocasta Nu,167,unknown,white,fair,blue,unknown,female,Coruscant,Human
+Ratts Tyerell,79,15,none,"grey, blue",unknown,unknown,male,Aleen Minor,Aleena
+R4-P17,96,unknown,none,"silver, red","red, blue",unknown,female,unknown,NA
+Wat Tambor,193,48,none,"green, grey",unknown,unknown,male,Skako,Skakoan
+San Hill,191,unknown,none,grey,gold,unknown,male,Muunilinst,Muun
+Shaak Ti,178,57,none,"red, blue, white",black,unknown,female,Shili,Togruta
+Grievous,216,159,none,"brown, white","green, yellow",unknown,male,Kalee,Kaleesh
+Tarfful,234,136,brown,brown,blue,unknown,male,Kashyyyk,Wookiee
+Raymus Antilles,188,79,brown,light,brown,unknown,male,Alderaan,Human
+Sly Moore,178,48,none,pale,white,unknown,female,Umbara,NA
+Tion Medon,206,80,none,grey,black,unknown,male,Utapau,Pau'an
+Finn,unknown,unknown,black,dark,dark,unknown,male,unknown,Human
+Rey,unknown,unknown,brown,light,hazel,unknown,female,unknown,Human
+Poe Dameron,unknown,unknown,brown,light,brown,unknown,male,unknown,Human
+BB8,unknown,unknown,none,none,black,unknown,none,unknown,Droid
+Captain Phasma,unknown,unknown,unknown,unknown,unknown,unknown,female,unknown,NA
+Padmé Amidala,165,45,brown,light,brown,46BBY,female,Naboo,Human
diff --git a/e2e/test/scenarios/collections/uploads.cy.spec.js b/e2e/test/scenarios/collections/uploads.cy.spec.js
new file mode 100644
index 0000000000000000000000000000000000000000..4e6ecbf0054016a957835babe1c6f3677ab263ac
--- /dev/null
+++ b/e2e/test/scenarios/collections/uploads.cy.spec.js
@@ -0,0 +1,102 @@
+import { restore, queryWritableDB, resyncDatabase } from "e2e/support/helpers";
+
+import { WRITABLE_DB_ID } from "e2e/support/cypress_data";
+
+const FIXTURE_PATH = "../../e2e/support/assets";
+
+const testFiles = [
+  {
+    fileName: "dog_breeds.csv",
+    tableName: "dog_breeds",
+    rowCount: 97,
+  },
+  {
+    fileName: "star_wars_characters.csv",
+    tableName: "star_wars_characters",
+    rowCount: 87,
+  },
+];
+
+describe("CSV Uploading", { tags: ["@external", "@actions"] }, () => {
+  ["postgres"].forEach(dialect => {
+    describe(`CSV Uploading (${dialect})`, () => {
+      beforeEach(() => {
+        restore(`${dialect}-writable`);
+        cy.signInAsAdmin();
+
+        cy.request("POST", "/api/collection", {
+          name: `Uploads Collection`,
+          color: "#000000", // shockingly, this unused field is required
+          parent_id: null,
+        }).then(({ body: { id: collectionId } }) => {
+          cy.wrap(collectionId).as("collectionId");
+        });
+        resyncDatabase({ dbId: WRITABLE_DB_ID });
+        enableUploads();
+      });
+
+      testFiles.forEach(testFile => {
+        it(`Can upload ${testFile.fileName} to a collection`, () => {
+          cy.get("@collectionId").then(collectionId =>
+            cy.visit(`/collection/${collectionId}`),
+          );
+
+          cy.fixture(`${FIXTURE_PATH}/${testFile.fileName}`).then(file => {
+            cy.get("#upload-csv").selectFile(
+              {
+                contents: Cypress.Buffer.from(file),
+                fileName: testFile.fileName,
+                mimeType: "text/csv",
+              },
+              { force: true },
+            );
+          });
+
+          cy.findByRole("status").within(() => {
+            cy.findByText(/Uploading/i);
+            cy.findByText(testFile.fileName);
+
+            cy.findByText("Data added to Uploads Collection", {
+              timeout: 10 * 1000,
+            });
+          });
+
+          cy.get("main").within(() => cy.findByText("Uploads Collection"));
+
+          cy.findByTestId("collection-table").within(() => {
+            cy.findByText(testFile.tableName); // TODO: we should humanize model names
+          });
+
+          const tableQuery = `SELECT * FROM information_schema.tables WHERE table_name LIKE 'upload_${testFile.tableName}_%' ORDER BY table_name DESC LIMIT 1;`;
+
+          queryWritableDB(tableQuery, dialect).then(result => {
+            expect(result.rows.length).to.equal(1);
+            const tableName = result.rows[0].table_name;
+            queryWritableDB(`SELECT count(*) FROM ${tableName};`, dialect).then(
+              result => {
+                expect(Number(result.rows[0].count)).to.equal(
+                  testFile.rowCount,
+                );
+              },
+            );
+          });
+        });
+      });
+    });
+  });
+});
+
+function enableUploads() {
+  const settings = {
+    "uploads-enabled": true,
+    "uploads-database-id": WRITABLE_DB_ID,
+    "uploads-schema-name": "public",
+    "uploads-table-prefix": "upload_",
+  };
+
+  Object.entries(settings).forEach(([key, value]) => {
+    cy.request("PUT", `/api/setting/${key}`, {
+      value,
+    });
+  });
+}
diff --git a/frontend/src/metabase-types/api/mocks/settings.ts b/frontend/src/metabase-types/api/mocks/settings.ts
index 0c7b76e95b5c510e19cdb6bfcac30f5d3a7d52d2..e28dd8a418eb9d24e012b253620a426e0dcec1ca 100644
--- a/frontend/src/metabase-types/api/mocks/settings.ts
+++ b/frontend/src/metabase-types/api/mocks/settings.ts
@@ -198,5 +198,9 @@ export const createMockSettings = (opts?: Partial<Settings>): Settings => ({
   version: createMockVersion(),
   "version-info": createMockVersionInfo(),
   "version-info-last-checked": null,
+  "uploads-enabled": false,
+  "uploads-database-id": null,
+  "uploads-table-prefix": null,
+  "uploads-schema-name": null,
   ...opts,
 });
diff --git a/frontend/src/metabase-types/api/settings.ts b/frontend/src/metabase-types/api/settings.ts
index 030dc2d33f0320d280e45f0ca837019f3250ee63..01a2222d238c02f4d8eac1575f5d7411bdf55750 100644
--- a/frontend/src/metabase-types/api/settings.ts
+++ b/frontend/src/metabase-types/api/settings.ts
@@ -240,6 +240,10 @@ export interface Settings {
   version: Version;
   "version-info": VersionInfo | null;
   "version-info-last-checked": string | null;
+  "uploads-enabled": boolean;
+  "uploads-database-id": number | null;
+  "uploads-schema-name": string | null;
+  "uploads-table-prefix": string | null;
 }
 
 export type SettingKey = keyof Settings;
diff --git a/frontend/src/metabase-types/store/mocks/index.ts b/frontend/src/metabase-types/store/mocks/index.ts
index 93f57f29d7ad45fea4371b0ce0b16163e3b4b543..3ac092627859201e8a70bc7d455e0253ac4c6ae8 100644
--- a/frontend/src/metabase-types/store/mocks/index.ts
+++ b/frontend/src/metabase-types/store/mocks/index.ts
@@ -9,3 +9,4 @@ export * from "./qb";
 export * from "./settings";
 export * from "./setup";
 export * from "./state";
+export * from "./upload";
diff --git a/frontend/src/metabase-types/store/mocks/state.ts b/frontend/src/metabase-types/store/mocks/state.ts
index 73c8454cc9e162ca1b7c7943523a5fb3b7fb648c..4c08124b746283c29842744988c5365f382eaea3 100644
--- a/frontend/src/metabase-types/store/mocks/state.ts
+++ b/frontend/src/metabase-types/store/mocks/state.ts
@@ -11,6 +11,7 @@ import {
   createMockQueryBuilderState,
   createMockSettingsState,
   createMockSetupState,
+  createMockUploadState,
 } from "metabase-types/store/mocks";
 
 export const createMockState = (opts?: Partial<State>): State => ({
@@ -25,5 +26,6 @@ export const createMockState = (opts?: Partial<State>): State => ({
   qb: createMockQueryBuilderState(),
   settings: createMockSettingsState(),
   setup: createMockSetupState(),
+  upload: createMockUploadState(),
   ...opts,
 });
diff --git a/frontend/src/metabase-types/store/mocks/upload.ts b/frontend/src/metabase-types/store/mocks/upload.ts
new file mode 100644
index 0000000000000000000000000000000000000000..360e8651d0301277b612f5d6e3e29bbd85bef497
--- /dev/null
+++ b/frontend/src/metabase-types/store/mocks/upload.ts
@@ -0,0 +1,15 @@
+import { FileUpload } from "../upload";
+
+export const createMockUploadState = () => {
+  return {};
+};
+
+export const createMockUpload = (props?: Partial<FileUpload>): FileUpload => {
+  return {
+    id: Date.now(),
+    name: "test.csv",
+    status: "in-progress",
+    collectionId: "root",
+    ...props,
+  };
+};
diff --git a/frontend/src/metabase-types/store/state.ts b/frontend/src/metabase-types/store/state.ts
index b50f027a403a54d0bfd275ddbcf6f9c952b8dcaf..cb55acd97c6ee3941351215cd707b2a4cf131610 100644
--- a/frontend/src/metabase-types/store/state.ts
+++ b/frontend/src/metabase-types/store/state.ts
@@ -9,6 +9,7 @@ import { QueryBuilderState } from "./qb";
 import { ParametersState } from "./parameters";
 import { SettingsState } from "./settings";
 import { SetupState } from "./setup";
+import { FileUploadState } from "./upload";
 
 export interface State {
   admin: AdminState;
@@ -22,6 +23,7 @@ export interface State {
   parameters: ParametersState;
   settings: SettingsState;
   setup: SetupState;
+  upload: FileUploadState;
 }
 
 export type Dispatch<T = any> = (action: T) => void;
diff --git a/frontend/src/metabase-types/store/upload.ts b/frontend/src/metabase-types/store/upload.ts
new file mode 100644
index 0000000000000000000000000000000000000000..bfce71269c6468e2fa447e2949208bcbc697b12f
--- /dev/null
+++ b/frontend/src/metabase-types/store/upload.ts
@@ -0,0 +1,13 @@
+import { CollectionId } from "metabase-types/api";
+
+export type FileUpload = {
+  status: "complete" | "in-progress" | "error";
+  name: string;
+  collectionId: CollectionId;
+  modelId?: string;
+  message?: string;
+  error?: string;
+  id: number;
+};
+
+export type FileUploadState = Record<string, FileUpload>;
diff --git a/frontend/src/metabase/App.tsx b/frontend/src/metabase/App.tsx
index 4f08a31826337715adeb454fc0ec34c7b021a721..afae32826730aae522a32e444944de05789e75a3 100644
--- a/frontend/src/metabase/App.tsx
+++ b/frontend/src/metabase/App.tsx
@@ -24,7 +24,7 @@ import { initializeIframeResizer } from "metabase/lib/dom";
 import AppBanner from "metabase/components/AppBanner";
 import AppBar from "metabase/nav/containers/AppBar";
 import Navbar from "metabase/nav/containers/Navbar";
-import StatusListing from "metabase/status/containers/StatusListing";
+import StatusListing from "metabase/status/components/StatusListing";
 import { ContentViewportContext } from "metabase/core/context/ContentViewportContext";
 
 import { AppErrorDescriptor, State } from "metabase-types/store";
diff --git a/frontend/src/metabase/admin/settings/components/widgets/SettingSelect.jsx b/frontend/src/metabase/admin/settings/components/widgets/SettingSelect.jsx
index 9739b8a7874c95e9407461433df90b9d6d80b478..8cc5e86f667b0ac99462dcd937437bf416d3b59e 100644
--- a/frontend/src/metabase/admin/settings/components/widgets/SettingSelect.jsx
+++ b/frontend/src/metabase/admin/settings/components/widgets/SettingSelect.jsx
@@ -5,11 +5,11 @@ import cx from "classnames";
 import Select, { Option } from "metabase/core/components/Select";
 
 const SettingSelect = ({
-  className,
+  className = "",
   setting: { placeholder, value, options, defaultValue, searchProp, key },
   options: customOptions = options,
   onChange,
-  disabled,
+  disabled = false,
 }) => (
   <Select
     className={cx("SettingsInput", className)}
diff --git a/frontend/src/metabase/admin/settings/components/widgets/UploadSettingsWidget/UploadSettingsWidget.tsx b/frontend/src/metabase/admin/settings/components/widgets/UploadSettingsWidget/UploadSettingsWidget.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..f7d9b49d1bcb2040cd7fcb3eb13a70a48420e3c4
--- /dev/null
+++ b/frontend/src/metabase/admin/settings/components/widgets/UploadSettingsWidget/UploadSettingsWidget.tsx
@@ -0,0 +1,40 @@
+import React from "react";
+
+import Database from "metabase/entities/databases";
+import type { Database as DatabaseType } from "metabase-types/types/Database";
+
+import SettingSelect from "../SettingSelect";
+
+const getDatabaseOptions = (databases: DatabaseType[]) =>
+  databases
+    .filter(db => db?.settings?.["database-enable-actions"])
+    .map(db => ({ name: db.name, value: db.id }));
+
+interface UploadSettingProps {
+  databases: DatabaseType[];
+  setting: any;
+  onChange: (value: number) => void;
+}
+
+function UploadDbWidgetView({
+  databases,
+  setting,
+  onChange,
+}: UploadSettingProps) {
+  const databaseOptions = getDatabaseOptions(databases);
+  if (!databaseOptions?.length) {
+    return null;
+  }
+
+  return (
+    <SettingSelect
+      setting={{
+        ...setting,
+        options: databaseOptions,
+      }}
+      onChange={(dbId: number) => onChange(dbId)}
+    />
+  );
+}
+
+export const UploadDbWidget = Database.loadList()(UploadDbWidgetView);
diff --git a/frontend/src/metabase/admin/settings/components/widgets/UploadSettingsWidget/index.ts b/frontend/src/metabase/admin/settings/components/widgets/UploadSettingsWidget/index.ts
new file mode 100644
index 0000000000000000000000000000000000000000..60f8a43ced22994a6f71b81fc9abae0e73dfa3f3
--- /dev/null
+++ b/frontend/src/metabase/admin/settings/components/widgets/UploadSettingsWidget/index.ts
@@ -0,0 +1 @@
+export * from "./UploadSettingsWidget";
diff --git a/frontend/src/metabase/admin/settings/selectors.js b/frontend/src/metabase/admin/settings/selectors.js
index df672dc42b3f99daee671ffa331094c659f8a24a..a3dec68ba0f850425531a41d7fb942ae6353182a 100644
--- a/frontend/src/metabase/admin/settings/selectors.js
+++ b/frontend/src/metabase/admin/settings/selectors.js
@@ -11,6 +11,7 @@ import { getUserIsAdmin } from "metabase/selectors/user";
 import Breadcrumbs from "metabase/components/Breadcrumbs";
 import SettingCommaDelimitedInput from "./components/widgets/SettingCommaDelimitedInput";
 import CustomGeoJSONWidget from "./components/widgets/CustomGeoJSONWidget";
+import { UploadDbWidget } from "./components/widgets/UploadSettingsWidget";
 import SettingsLicense from "./components/SettingsLicense";
 import SiteUrlWidget from "./components/widgets/SiteUrlWidget";
 import HttpsOnlyWidget from "./components/widgets/HttpsOnlyWidget";
@@ -62,14 +63,14 @@ function updateSectionsWithPlugins(sections) {
 const SECTIONS = updateSectionsWithPlugins({
   setup: {
     name: t`Setup`,
-    order: 1,
+    order: 10,
     settings: [],
     component: SetupCheckList,
     adminOnly: true,
   },
   general: {
     name: t`General`,
-    order: 2,
+    order: 20,
     settings: [
       {
         key: "site-name",
@@ -134,7 +135,7 @@ const SECTIONS = updateSectionsWithPlugins({
   },
   updates: {
     name: t`Updates`,
-    order: 3,
+    order: 30,
     component: SettingsUpdatesForm,
     settings: [
       {
@@ -147,7 +148,7 @@ const SECTIONS = updateSectionsWithPlugins({
   },
   email: {
     name: t`Email`,
-    order: 4,
+    order: 40,
     component: SettingsEmailForm,
     settings: [
       {
@@ -221,19 +222,19 @@ const SECTIONS = updateSectionsWithPlugins({
   },
   slack: {
     name: "Slack",
-    order: 5,
+    order: 50,
     component: SlackSettings,
     settings: [],
   },
   authentication: {
     name: t`Authentication`,
-    order: 6,
+    order: 60,
     settings: [], // added by plugins
     adminOnly: true,
   },
   maps: {
     name: t`Maps`,
-    order: 7,
+    order: 70,
     settings: [
       {
         key: "map-tile-server-url",
@@ -252,7 +253,7 @@ const SECTIONS = updateSectionsWithPlugins({
   },
   localization: {
     name: t`Localization`,
-    order: 8,
+    order: 80,
     settings: [
       {
         display_name: t`Instance language`,
@@ -305,9 +306,49 @@ const SECTIONS = updateSectionsWithPlugins({
       },
     ],
   },
+  uploads: {
+    name: t`Uploads`,
+    order: 85,
+    adminOnly: true,
+    settings: [
+      {
+        key: "uploads-enabled",
+        display_name: t`Data Uploads`,
+        description: t`Enable admins to upload data to new database tables from CSV files.`,
+        type: "boolean",
+      },
+      {
+        key: "uploads-database-id",
+        getHidden: settings => !settings["uploads-enabled"],
+        display_name: t`Database`,
+        description: t`Identify a database where upload tables will be created.`,
+        placeholder: t`Select a database`,
+        widget: UploadDbWidget,
+      },
+      {
+        key: "uploads-schema-name",
+        getHidden: settings =>
+          !settings["uploads-enabled"] || !settings["uploads-database-id"],
+        display_name: t`Schema name`,
+        description: t`Identify a database schema where data upload tables will be created.`,
+        type: "string",
+        placeholder: "uploads",
+      },
+      {
+        key: "uploads-table-prefix",
+        getHidden: settings =>
+          !settings["uploads-enabled"] || !settings["uploads-database-id"],
+        display_name: t`Table prefix`,
+        description: t`Identify a table prefix for tables created by data uploads.`,
+        placeholder: "uploaded_",
+        type: "string",
+        required: false,
+      },
+    ],
+  },
   "public-sharing": {
     name: t`Public Sharing`,
-    order: 9,
+    order: 90,
     settings: [
       {
         key: "enable-public-sharing",
@@ -337,7 +378,7 @@ const SECTIONS = updateSectionsWithPlugins({
   },
   "embedding-in-other-applications": {
     name: t`Embedding`,
-    order: 10,
+    order: 100,
     settings: [
       {
         key: "enable-embedding",
@@ -488,13 +529,13 @@ const SECTIONS = updateSectionsWithPlugins({
   },
   license: {
     name: t`License`,
-    order: 11,
+    order: 110,
     component: SettingsLicense,
     settings: [],
   },
   caching: {
     name: t`Caching`,
-    order: 12,
+    order: 120,
     settings: [
       {
         key: "enable-query-caching",
@@ -588,7 +629,7 @@ const SECTIONS = updateSectionsWithPlugins({
   },
   metabot: {
     name: t`Metabot`,
-    order: 13,
+    order: 130,
     settings: [
       {
         key: "is-metabot-enabled",
diff --git a/frontend/src/metabase/collections/components/CollectionHeader/CollectionHeader.tsx b/frontend/src/metabase/collections/components/CollectionHeader/CollectionHeader.tsx
index f4928a3f6d4d37a2ba6272dafb3d27a2cd133eab..bc86a67a8744d8e7cc244440429a28682df4ce6a 100644
--- a/frontend/src/metabase/collections/components/CollectionHeader/CollectionHeader.tsx
+++ b/frontend/src/metabase/collections/components/CollectionHeader/CollectionHeader.tsx
@@ -2,12 +2,13 @@ import React from "react";
 import { withRouter } from "react-router";
 import type { Location } from "history";
 
-import { Collection } from "metabase-types/api";
+import { Collection, CollectionId } from "metabase-types/api";
 
 import CollectionCaption from "./CollectionCaption";
 import CollectionBookmark from "./CollectionBookmark";
 import CollectionMenu from "./CollectionMenu";
 import CollectionTimeline from "./CollectionTimeline";
+import CollectionUpload from "./CollectionUpload";
 
 import { HeaderActions, HeaderRoot } from "./CollectionHeader.styled";
 
@@ -20,6 +21,8 @@ export interface CollectionHeaderProps {
   onUpdateCollection: (entity: Collection, values: Partial<Collection>) => void;
   onCreateBookmark: (collection: Collection) => void;
   onDeleteBookmark: (collection: Collection) => void;
+  onUpload: (file: File, collectionId: CollectionId) => void;
+  canUpload: boolean;
 }
 
 const CollectionHeader = ({
@@ -31,6 +34,8 @@ const CollectionHeader = ({
   onUpdateCollection,
   onCreateBookmark,
   onDeleteBookmark,
+  onUpload,
+  canUpload,
 }: CollectionHeaderProps): JSX.Element => {
   return (
     <HeaderRoot>
@@ -46,6 +51,9 @@ const CollectionHeader = ({
           onCreateBookmark={onCreateBookmark}
           onDeleteBookmark={onDeleteBookmark}
         />
+        {canUpload && (
+          <CollectionUpload collection={collection} onUpload={onUpload} />
+        )}
         <CollectionMenu
           collection={collection}
           isAdmin={isAdmin}
diff --git a/frontend/src/metabase/collections/components/CollectionHeader/CollectionHeader.unit.spec.tsx b/frontend/src/metabase/collections/components/CollectionHeader/CollectionHeader.unit.spec.tsx
index e4b25b023afba1546911d9af2f140aff00de1292..8ce270532b3459ea6646da27868115b0f70221f6 100644
--- a/frontend/src/metabase/collections/components/CollectionHeader/CollectionHeader.unit.spec.tsx
+++ b/frontend/src/metabase/collections/components/CollectionHeader/CollectionHeader.unit.spec.tsx
@@ -309,9 +309,11 @@ const getProps = (
   collection: createMockCollection(),
   isAdmin: false,
   isBookmarked: false,
+  canUpload: false,
   isPersonalCollectionChild: false,
   onUpdateCollection: jest.fn(),
   onCreateBookmark: jest.fn(),
+  onUpload: jest.fn(),
   onDeleteBookmark: jest.fn(),
   location: {
     pathname: `/collection/1`,
diff --git a/frontend/src/metabase/collections/components/CollectionHeader/CollectionUpload.styled.tsx b/frontend/src/metabase/collections/components/CollectionHeader/CollectionUpload.styled.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..3d160a43118ff399faab0b8c6e20d874d8b83899
--- /dev/null
+++ b/frontend/src/metabase/collections/components/CollectionHeader/CollectionUpload.styled.tsx
@@ -0,0 +1,15 @@
+import styled from "@emotion/styled";
+
+import { color } from "metabase/lib/colors";
+
+export const UploadInput = styled.input`
+  display: none;
+`;
+
+export const LoadingStateContainer = styled.div`
+  display: flex;
+  transform: translateY(10px);
+  align-items: center;
+  height: 16px;
+  color: ${color("brand")};
+`;
diff --git a/frontend/src/metabase/collections/components/CollectionHeader/CollectionUpload.tsx b/frontend/src/metabase/collections/components/CollectionHeader/CollectionUpload.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..9c645902ece28732ff7026bbb6e538adfea4ad38
--- /dev/null
+++ b/frontend/src/metabase/collections/components/CollectionHeader/CollectionUpload.tsx
@@ -0,0 +1,36 @@
+import React from "react";
+
+import type { Collection, CollectionId } from "metabase-types/api";
+
+import { CollectionHeaderButton } from "./CollectionHeader.styled";
+import { UploadInput } from "./CollectionUpload.styled";
+
+export default function ColllectionUpload({
+  collection,
+  onUpload,
+}: {
+  collection: Collection;
+  onUpload: (file: File, collectionId: CollectionId) => void;
+}) {
+  const handleFileUpload = (event: React.ChangeEvent<HTMLInputElement>) => {
+    const file = event.target.files?.[0];
+    if (file !== undefined) {
+      onUpload(file, collection.id);
+    }
+  };
+
+  return (
+    <>
+      <label htmlFor="upload-csv">
+        <CollectionHeaderButton as="span" to="" icon="arrow_up" />
+      </label>
+      <UploadInput
+        id="upload-csv"
+        type="file"
+        accept="text/csv"
+        onChange={handleFileUpload}
+        data-testid="upload-input"
+      />
+    </>
+  );
+}
diff --git a/frontend/src/metabase/collections/components/UploadOverlay/UploadOverlay.styled.tsx b/frontend/src/metabase/collections/components/UploadOverlay/UploadOverlay.styled.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..c9e4fe144f2463c09a32997e4b739ade4284565a
--- /dev/null
+++ b/frontend/src/metabase/collections/components/UploadOverlay/UploadOverlay.styled.tsx
@@ -0,0 +1,21 @@
+import styled from "@emotion/styled";
+import { color, alpha } from "metabase/lib/colors";
+
+export const DragOverlay = styled.div<{ isDragActive: boolean }>`
+  position: absolute;
+  top: 0;
+  left: 0;
+  right: 0;
+  bottom: 0;
+  z-index: 1000;
+  background-color: ${alpha("white", 0.8)};
+  padding: 2rem;
+  font-size: 2rem;
+  color: ${color("text-dark")};
+  opacity: ${props => (props.isDragActive ? 1 : 0)};
+  transition: opacity 0.2s;
+  display: flex;
+  align-items: center;
+  justify-content: center;
+  pointer-events: none;
+`;
diff --git a/frontend/src/metabase/collections/components/UploadOverlay/UploadOverlay.tsx b/frontend/src/metabase/collections/components/UploadOverlay/UploadOverlay.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..d2890785c2cbcc3edbf9dcea5d7134a3705ead8e
--- /dev/null
+++ b/frontend/src/metabase/collections/components/UploadOverlay/UploadOverlay.tsx
@@ -0,0 +1,16 @@
+import React from "react";
+import { t } from "ttag";
+
+import { DragOverlay } from "./UploadOverlay.styled";
+
+export default function UploadOverlay({
+  isDragActive,
+}: {
+  isDragActive: boolean;
+}) {
+  return (
+    <DragOverlay isDragActive={isDragActive}>
+      {t`Drop a CSV file to upload to this collection`}
+    </DragOverlay>
+  );
+}
diff --git a/frontend/src/metabase/collections/components/UploadOverlay/index.ts b/frontend/src/metabase/collections/components/UploadOverlay/index.ts
new file mode 100644
index 0000000000000000000000000000000000000000..c01f8dcc2f3b3f7c846a51abbf4024f00d160418
--- /dev/null
+++ b/frontend/src/metabase/collections/components/UploadOverlay/index.ts
@@ -0,0 +1 @@
+export { default } from "./UploadOverlay";
diff --git a/frontend/src/metabase/collections/containers/CollectionContent.jsx b/frontend/src/metabase/collections/containers/CollectionContent.jsx
index 1d5029c2713a352b48a4f6bae110c21df8539231..b77784e6e1dfecfdd569713344e5ecf5bcdaf7fd 100644
--- a/frontend/src/metabase/collections/containers/CollectionContent.jsx
+++ b/frontend/src/metabase/collections/containers/CollectionContent.jsx
@@ -2,6 +2,7 @@
 import React, { useEffect, useState, useCallback } from "react";
 import _ from "underscore";
 import { connect } from "react-redux";
+import { useDropzone } from "react-dropzone";
 
 import { usePrevious, useMount } from "react-use";
 import Bookmark from "metabase/entities/bookmarks";
@@ -11,6 +12,7 @@ import Search from "metabase/entities/search";
 import { getUserIsAdmin } from "metabase/selectors/user";
 import { getMetadata } from "metabase/selectors/metadata";
 import { getIsBookmarked } from "metabase/collections/selectors";
+import { getSetting } from "metabase/selectors/settings";
 import { getIsNavbarOpen, openNavbar } from "metabase/redux/app";
 
 import BulkActions from "metabase/collections/components/BulkActions";
@@ -19,6 +21,7 @@ import Header from "metabase/collections/containers/CollectionHeader";
 import ItemsTable from "metabase/collections/components/ItemsTable";
 import PinnedItemOverview from "metabase/collections/components/PinnedItemOverview";
 import { isPersonalCollectionChild } from "metabase/collections/utils";
+import { uploadFile } from "metabase/redux/uploads";
 
 import ItemsDragLayer from "metabase/containers/dnd/ItemsDragLayer";
 import PaginationControls from "metabase/components/PaginationControls";
@@ -27,6 +30,9 @@ import { usePagination } from "metabase/hooks/use-pagination";
 import { useListSelect } from "metabase/hooks/use-list-select";
 import { isSmallScreen } from "metabase/lib/dom";
 import Databases from "metabase/entities/databases";
+
+import UploadOverlay from "../components/UploadOverlay";
+
 import {
   CollectionEmptyContent,
   CollectionMain,
@@ -48,11 +54,20 @@ const ALL_MODELS = [
 const itemKeyFn = item => `${item.id}:${item.model}`;
 
 function mapStateToProps(state, props) {
+  const uploadDbId = getSetting(state, "uploads-database-id");
+  const canAccessUploadsDb =
+    getSetting(state, "uploads-enabled") &&
+    uploadDbId &&
+    !!Databases.selectors.getObject(state, {
+      entityId: uploadDbId,
+    });
+
   return {
     isAdmin: getUserIsAdmin(state),
     isBookmarked: getIsBookmarked(state, props),
     metadata: getMetadata(state),
     isNavbarOpen: getIsNavbarOpen(state),
+    uploadsEnabled: canAccessUploadsDb,
   };
 }
 
@@ -60,6 +75,7 @@ const mapDispatchToProps = {
   openNavbar,
   createBookmark: (id, type) => Bookmark.actions.create({ id, type }),
   deleteBookmark: (id, type) => Bookmark.actions.delete({ id, type }),
+  uploadFile,
 };
 
 function CollectionContent({
@@ -74,6 +90,8 @@ function CollectionContent({
   metadata,
   isNavbarOpen,
   openNavbar,
+  uploadFile,
+  uploadsEnabled,
 }) {
   const [isBookmarked, setIsBookmarked] = useState(false);
   const [selectedItems, setSelectedItems] = useState(null);
@@ -110,6 +128,21 @@ function CollectionContent({
     setIsBookmarked(shouldBeBookmarked);
   }, [bookmarks, collectionId]);
 
+  const onDrop = useCallback(
+    acceptedFiles => {
+      uploadFile(acceptedFiles[0], collectionId);
+    },
+    [collectionId, uploadFile],
+  );
+
+  const { getRootProps, isDragActive } = useDropzone({
+    onDrop,
+    maxFiles: 1,
+    noClick: true,
+    noDragEventsBubbling: true,
+    accept: { "text/csv": [".csv"] },
+  });
+
   const handleBulkArchive = useCallback(async () => {
     try {
       await Promise.all(selected.map(item => item.setArchived(true)));
@@ -168,6 +201,10 @@ function CollectionContent({
     deleteBookmark(collectionId, "collection");
   };
 
+  const canUpload = uploadsEnabled && collection.can_write;
+
+  const rootProps = canUpload ? getRootProps() : {};
+
   const unpinnedQuery = {
     collection: collectionId,
     models: ALL_MODELS,
@@ -195,7 +232,8 @@ function CollectionContent({
         const hasPinnedItems = pinnedItems.length > 0;
 
         return (
-          <CollectionRoot>
+          <CollectionRoot {...rootProps}>
+            {canUpload && <UploadOverlay isDragActive={isDragActive} />}
             <CollectionMain>
               <Header
                 collection={collection}
@@ -207,6 +245,7 @@ function CollectionContent({
                 )}
                 onCreateBookmark={handleCreateBookmark}
                 onDeleteBookmark={handleDeleteBookmark}
+                canUpload={canUpload}
               />
               <PinnedItemOverview
                 databases={databases}
@@ -255,7 +294,7 @@ function CollectionContent({
                   }
 
                   return (
-                    <CollectionTable>
+                    <CollectionTable data-testid="collection-table">
                       <ItemsTable
                         databases={databases}
                         bookmarks={bookmarks}
diff --git a/frontend/src/metabase/collections/containers/CollectionContent.styled.tsx b/frontend/src/metabase/collections/containers/CollectionContent.styled.tsx
index b68f5a3512f7798c39702a498c8e4d187943a79b..b4f24c7f14cb54f0303fd89868c3a76ee06d405e 100644
--- a/frontend/src/metabase/collections/containers/CollectionContent.styled.tsx
+++ b/frontend/src/metabase/collections/containers/CollectionContent.styled.tsx
@@ -2,6 +2,8 @@ import styled from "@emotion/styled";
 
 export const CollectionRoot = styled.div`
   padding-top: 1rem;
+  min-height: 100%;
+  position: relative;
 `;
 
 export const CollectionMain = styled.div`
diff --git a/frontend/src/metabase/collections/containers/CollectionHeader/CollectionHeader.tsx b/frontend/src/metabase/collections/containers/CollectionHeader/CollectionHeader.tsx
index 42b56d7cdf2a8d816a49fee6c55a49ca61ef7ea6..ab32176e20664660454cbc654e89fe9e5fb808e1 100644
--- a/frontend/src/metabase/collections/containers/CollectionHeader/CollectionHeader.tsx
+++ b/frontend/src/metabase/collections/containers/CollectionHeader/CollectionHeader.tsx
@@ -1,11 +1,14 @@
 import { connect } from "react-redux";
 import Collections from "metabase/entities/collections";
+import { uploadFile } from "metabase/redux/uploads";
 import { Collection } from "metabase-types/api";
 import CollectionHeader from "../../components/CollectionHeader";
 
 const mapDispatchToProps = {
   onUpdateCollection: (collection: Collection, values: Partial<Collection>) =>
     Collections.actions.update(collection, values),
+
+  onUpload: uploadFile,
 };
 
 export default connect(null, mapDispatchToProps)(CollectionHeader);
diff --git a/frontend/src/metabase/lib/api.js b/frontend/src/metabase/lib/api.js
index 807c1b226e69ef9008ad0eadde86f31e45380c1d..bdf9fb019f288cece5529af46d7739b415504209 100644
--- a/frontend/src/metabase/lib/api.js
+++ b/frontend/src/metabase/lib/api.js
@@ -57,10 +57,10 @@ export class Api extends EventEmitter {
         ...methodOptions,
       };
 
-      return async (data, invocationOptions = {}) => {
+      return async (rawData, invocationOptions = {}) => {
         const options = { ...defaultOptions, ...invocationOptions };
         let url = urlTemplate;
-        data = { ...data };
+        const data = { ...rawData };
         for (const tag of url.match(/:\w+/g) || []) {
           const paramName = tag.slice(1);
           let value = data[paramName];
@@ -85,6 +85,10 @@ export class Api extends EventEmitter {
           ? { Accept: "application/json", "Content-Type": "application/json" }
           : {};
 
+        if (options.formData && options.fetch) {
+          delete headers["Content-Type"];
+        }
+
         if (isWithinIframe()) {
           headers["X-Metabase-Embedded"] = "true";
         }
@@ -95,9 +99,13 @@ export class Api extends EventEmitter {
 
         let body;
         if (options.hasBody) {
-          body = JSON.stringify(
-            options.bodyParamName != null ? data[options.bodyParamName] : data,
-          );
+          body = options.formData
+            ? rawData
+            : JSON.stringify(
+                options.bodyParamName != null
+                  ? data[options.bodyParamName]
+                  : data,
+              );
         } else {
           const qs = querystring.stringify(data);
           if (qs) {
@@ -154,9 +162,10 @@ export class Api extends EventEmitter {
   }
 
   _makeRequest(...args) {
+    const options = args[5];
     // this is temporary to not deal with failed cypress tests
     // we should switch to using fetch in all cases (metabase#28489)
-    if (isTest) {
+    if (isTest || options.fetch) {
       return this._makeRequestWithFetch(...args);
     } else {
       return this._makeRequestWithXhr(...args);
diff --git a/frontend/src/metabase/lib/settings.ts b/frontend/src/metabase/lib/settings.ts
index 6c4d736b7a51078dd1dbca453c0fda77b4b71089..43f4e3710ab7b86d37558c592006a27d10e24109 100644
--- a/frontend/src/metabase/lib/settings.ts
+++ b/frontend/src/metabase/lib/settings.ts
@@ -162,6 +162,10 @@ class MetabaseSettings {
     return this.get("anon-tracking-enabled") || false;
   }
 
+  uploadsEnabled() {
+    return !!(this.get("uploads-enabled") && this.get("uploads-database-id"));
+  }
+
   googleAnalyticsEnabled() {
     return this.get("ga-enabled") || false;
   }
diff --git a/frontend/src/metabase/lib/uploads.ts b/frontend/src/metabase/lib/uploads.ts
new file mode 100644
index 0000000000000000000000000000000000000000..82cd0be31ce858b7e69e47b6b4377cc52f2d10ff
--- /dev/null
+++ b/frontend/src/metabase/lib/uploads.ts
@@ -0,0 +1,10 @@
+import { FileUpload } from "metabase-types/store/upload";
+
+export const isUploadInProgress = (upload: FileUpload) =>
+  upload.status === "in-progress";
+
+export const isUploadCompleted = (upload: FileUpload) =>
+  upload.status === "complete";
+
+export const isUploadAborted = (upload: FileUpload) =>
+  upload.status === "error";
diff --git a/frontend/src/metabase/reducers-common.js b/frontend/src/metabase/reducers-common.js
index 621783dc9764be2925d228724bbd4914597c0476..7f80016b773c653af68d79d952382dbc1085e30f 100644
--- a/frontend/src/metabase/reducers-common.js
+++ b/frontend/src/metabase/reducers-common.js
@@ -6,6 +6,7 @@ import embed from "metabase/redux/embed";
 import requests from "metabase/redux/requests";
 import settings from "metabase/redux/settings";
 import undo from "metabase/redux/undo";
+import upload from "metabase/redux/uploads";
 // eslint-disable-next-line import/no-named-as-default
 import entities, { enhanceRequestsReducer } from "metabase/redux/entities";
 
@@ -22,4 +23,5 @@ export default {
   settings,
   undo,
   entities,
+  upload,
 };
diff --git a/frontend/src/metabase/redux/uploads.ts b/frontend/src/metabase/redux/uploads.ts
new file mode 100644
index 0000000000000000000000000000000000000000..80d3a53b983ae5789b2ec3f79491deef386bdc29
--- /dev/null
+++ b/frontend/src/metabase/redux/uploads.ts
@@ -0,0 +1,143 @@
+import { assocIn, dissocIn, updateIn } from "icepick";
+import { t } from "ttag";
+
+import { CardApi } from "metabase/services";
+import Collections from "metabase/entities/collections";
+
+import type { Dispatch, GetState, State } from "metabase-types/store";
+import type { CollectionId } from "metabase-types/api";
+import type { FileUploadState } from "metabase-types/store/upload";
+
+import {
+  createAction,
+  createThunkAction,
+  handleActions,
+} from "metabase/lib/redux";
+
+export const UPLOAD_FILE_TO_COLLECTION = "metabase/collection/UPLOAD_FILE";
+export const UPLOAD_FILE_TO_COLLECTION_START =
+  "metabase/collection/UPLOAD_FILE_START";
+export const UPLOAD_FILE_TO_COLLECTION_END =
+  "metabase/collection/UPLOAD_FILE_END";
+export const UPLOAD_FILE_TO_COLLECTION_ERROR =
+  "metabase/collection/UPLOAD_FILE_ERROR";
+export const UPLOAD_FILE_TO_COLLECTION_CLEAR =
+  "metabase/collection/UPLOAD_FILE_CLEAR";
+
+const MAX_UPLOAD_SIZE = 200 * 1024 * 1024; // 200MB
+const MAX_UPLOAD_STRING = "200MB";
+
+const CLEAR_AFTER_MS = 5000;
+
+const uploadStart = createAction(UPLOAD_FILE_TO_COLLECTION_START);
+const uploadEnd = createAction(UPLOAD_FILE_TO_COLLECTION_END);
+const uploadError = createAction(UPLOAD_FILE_TO_COLLECTION_ERROR);
+const clearUpload = createAction(UPLOAD_FILE_TO_COLLECTION_CLEAR);
+
+export const getAllUploads = (state: State) =>
+  Object.keys(state.upload).map(key => state.upload[key]);
+
+export const uploadFile = createThunkAction(
+  UPLOAD_FILE_TO_COLLECTION,
+  (file: File, collectionId: CollectionId) =>
+    async (dispatch: Dispatch, getState: GetState) => {
+      const id = Date.now();
+
+      const clear = () =>
+        setTimeout(() => {
+          dispatch(clearUpload({ id }));
+        }, CLEAR_AFTER_MS);
+
+      dispatch(
+        uploadStart({
+          id,
+          name: file.name,
+          collectionId,
+        }),
+      );
+
+      if (file.size > MAX_UPLOAD_SIZE) {
+        dispatch(
+          uploadError({
+            id,
+            message: t`You cannot upload files larger than ${MAX_UPLOAD_STRING}`,
+          }),
+        );
+        clear();
+        return;
+      }
+
+      try {
+        const formData = new FormData();
+        formData.append("file", file);
+        formData.append("collection_id", String(collectionId));
+        const response = await CardApi.uploadCSV(formData);
+
+        dispatch(
+          uploadEnd({
+            id,
+            modelId: response.model_id,
+          }),
+        );
+
+        dispatch(Collections.actions.invalidateLists());
+      } catch (err: any) {
+        dispatch(
+          uploadError({
+            id,
+            message:
+              err?.data?.message ?? t`There was an error uploading the file`,
+          }),
+        );
+      } finally {
+        clear();
+      }
+    },
+);
+
+interface UploadStartPayload {
+  id: number;
+  name: string;
+  collectionId: string;
+}
+
+interface UploadEndPayload {
+  id: number;
+}
+
+const upload = handleActions<
+  FileUploadState,
+  UploadStartPayload | UploadEndPayload
+>(
+  {
+    [UPLOAD_FILE_TO_COLLECTION_START]: {
+      next: (state, { payload }) =>
+        assocIn(state, [payload.id], {
+          ...payload,
+          status: "in-progress",
+        }),
+    },
+    [UPLOAD_FILE_TO_COLLECTION_END]: {
+      next: (state, { payload }) =>
+        updateIn(state, [payload.id], val => ({
+          ...val,
+          ...payload,
+          status: "complete",
+        })),
+    },
+    [UPLOAD_FILE_TO_COLLECTION_ERROR]: {
+      next: (state, { payload }) =>
+        updateIn(state, [payload.id], val => ({
+          ...val,
+          ...payload,
+          status: "error",
+        })),
+    },
+    [UPLOAD_FILE_TO_COLLECTION_CLEAR]: {
+      next: (state, { payload: { id } }) => dissocIn(state, [id]),
+    },
+  },
+  {},
+);
+
+export default upload;
diff --git a/frontend/src/metabase/redux/uploads.unit.spec.js b/frontend/src/metabase/redux/uploads.unit.spec.js
new file mode 100644
index 0000000000000000000000000000000000000000..6710576f0d9392b573e24a0ffb7fa739f2e0e651
--- /dev/null
+++ b/frontend/src/metabase/redux/uploads.unit.spec.js
@@ -0,0 +1,139 @@
+import fetchMock from "fetch-mock";
+
+import {
+  uploadFile,
+  UPLOAD_FILE_TO_COLLECTION_CLEAR,
+  UPLOAD_FILE_TO_COLLECTION_END,
+  UPLOAD_FILE_TO_COLLECTION_ERROR,
+  UPLOAD_FILE_TO_COLLECTION_START,
+} from "./uploads";
+
+const now = Date.now();
+
+const mockUploadCSV = (valid = true) => {
+  fetchMock.post(
+    "path:/api/card/from-csv",
+    valid
+      ? {
+          model_id: 3,
+        }
+      : {
+          throws: { data: { message: "It's dead Jim" } },
+        },
+  );
+};
+
+describe("csv uploads", () => {
+  describe("actions", () => {
+    let dispatch;
+    const file = new File(
+      [new Blob(["col1, col2 \n val1, val2"])],
+      "test.csv",
+      {
+        type: "text/csv",
+      },
+    );
+
+    beforeEach(() => {
+      dispatch = jest.fn();
+      jest.useFakeTimers().setSystemTime(now);
+    });
+
+    afterAll(() => {
+      jest.useRealTimers();
+    });
+
+    it("should handle file upload success", async () => {
+      mockUploadCSV();
+
+      await uploadFile(file, "root")(dispatch);
+      jest.advanceTimersByTime(6000);
+
+      expect(dispatch).toHaveBeenCalledWith({
+        type: UPLOAD_FILE_TO_COLLECTION_START,
+        payload: {
+          id: now,
+          name: "test.csv",
+          collectionId: "root",
+        },
+      });
+
+      expect(dispatch).toHaveBeenCalledWith({
+        type: UPLOAD_FILE_TO_COLLECTION_END,
+        payload: {
+          id: now,
+          modelId: 3,
+        },
+      });
+
+      expect(dispatch).toHaveBeenCalledWith({
+        type: UPLOAD_FILE_TO_COLLECTION_CLEAR,
+        payload: {
+          id: now,
+        },
+      });
+    });
+
+    it("should handle file upload error", async () => {
+      mockUploadCSV(false);
+
+      await uploadFile(file, "root")(dispatch);
+      jest.advanceTimersByTime(6000);
+
+      expect(dispatch).toHaveBeenCalledWith({
+        type: UPLOAD_FILE_TO_COLLECTION_START,
+        payload: {
+          id: now,
+          name: "test.csv",
+          collectionId: "root",
+        },
+      });
+
+      expect(dispatch).toHaveBeenCalledWith({
+        type: UPLOAD_FILE_TO_COLLECTION_ERROR,
+        payload: {
+          id: now,
+          message: "It's dead Jim",
+        },
+      });
+
+      expect(dispatch).toHaveBeenCalledWith({
+        type: UPLOAD_FILE_TO_COLLECTION_CLEAR,
+        payload: {
+          id: now,
+        },
+      });
+    });
+
+    it("Error on oversized files", async () => {
+      const bigFile = new File([""], "test.csv");
+      Object.defineProperty(bigFile, "size", { value: 200 * 1024 * 1024 + 1 });
+      await uploadFile(bigFile, "root")(dispatch);
+      jest.advanceTimersByTime(6000);
+
+      expect(dispatch).toHaveBeenCalledWith({
+        type: UPLOAD_FILE_TO_COLLECTION_START,
+        payload: {
+          id: now,
+          name: "test.csv",
+          collectionId: "root",
+        },
+      });
+
+      expect(dispatch).toHaveBeenCalledWith({
+        type: UPLOAD_FILE_TO_COLLECTION_ERROR,
+        payload: {
+          id: now,
+          message: "You cannot upload files larger than 200MB",
+        },
+      });
+
+      expect(dispatch).toHaveBeenCalledWith({
+        type: UPLOAD_FILE_TO_COLLECTION_CLEAR,
+        payload: {
+          id: now,
+        },
+      });
+    });
+  });
+});
diff --git a/frontend/src/metabase/services.js b/frontend/src/metabase/services.js
index 37ca88e81438374310357eccb90da6c37b33fce6..7a41dbc103807a3fdc7f7095c8b9526e2fdba017 100644
--- a/frontend/src/metabase/services.js
+++ b/frontend/src/metabase/services.js
@@ -149,6 +149,10 @@ export const CardApi = {
     ),
   ),
   create: POST("/api/card"),
+  uploadCSV: POST("/api/card/from-csv", {
+    formData: true,
+    fetch: true,
+  }),
   get: GET("/api/card/:cardId"),
   update: PUT("/api/card/:id"),
   delete: DELETE("/api/card/:id"),
diff --git a/frontend/src/metabase/status/components/DatabaseStatusLarge/DatabaseStatusLarge.tsx b/frontend/src/metabase/status/components/DatabaseStatusLarge/DatabaseStatusLarge.tsx
index 54b047796e9e1c4398f99c68b33b271e140b4a0d..0a8e379b78d3506fc7428a64988797929680daf9 100644
--- a/frontend/src/metabase/status/components/DatabaseStatusLarge/DatabaseStatusLarge.tsx
+++ b/frontend/src/metabase/status/components/DatabaseStatusLarge/DatabaseStatusLarge.tsx
@@ -6,23 +6,8 @@ import {
   isSyncInProgress,
 } from "metabase/lib/syncing";
 import { Database } from "metabase-types/api";
-import Ellipsified from "metabase/core/components/Ellipsified";
-import Icon from "../../../components/Icon";
-import useStatusVisibility from "../../hooks/use-status-visibility";
-import {
-  StatusCardRoot,
-  StatusCardIcon,
-  StatusCardBody,
-  StatusCardTitle,
-  StatusCardDescription,
-  StatusCardSpinner,
-  StatusCardIconContainer,
-  StatusRoot,
-  StatusHeader,
-  StatusTitle,
-  StatusToggle,
-  StatusBody,
-} from "./DatabaseStatusLarge.styled";
+
+import StatusLarge from "../StatusLarge";
 
 export interface DatabaseStatusLargeProps {
   databases: Database[];
@@ -32,72 +17,24 @@ export interface DatabaseStatusLargeProps {
 
 const DatabaseStatusLarge = ({
   databases,
-  isActive,
   onCollapse,
-}: DatabaseStatusLargeProps): JSX.Element => {
-  return (
-    <StatusRoot role="status">
-      <StatusHeader>
-        <StatusTitle>{getTitle(databases)}</StatusTitle>
-        <StatusToggle onClick={onCollapse}>
-          <Icon name="chevrondown" />
-        </StatusToggle>
-      </StatusHeader>
-      <StatusBody>
-        {databases.map(database => (
-          <StatusCard
-            key={database.id}
-            database={database}
-            isActive={isActive}
-          />
-        ))}
-      </StatusBody>
-    </StatusRoot>
-  );
-};
-
-interface StatusCardProps {
-  database: Database;
-  isActive?: boolean;
-}
-
-const StatusCard = ({
-  database,
   isActive,
-}: StatusCardProps): JSX.Element | null => {
-  const isVisible = useStatusVisibility(isActive || isSyncInProgress(database));
-
-  if (!isVisible) {
-    return null;
-  }
+}: DatabaseStatusLargeProps): JSX.Element => {
+  const status = {
+    title: getTitle(databases),
+    items: databases.map(database => ({
+      id: database.id,
+      title: database.name,
+      icon: "database",
+      description: getDescription(database),
+      isInProgress: isSyncInProgress(database),
+      isCompleted: isSyncCompleted(database),
+      isAborted: isSyncAborted(database),
+    })),
+  };
 
   return (
-    <StatusCardRoot key={database.id}>
-      <StatusCardIcon>
-        <Icon name="database" />
-      </StatusCardIcon>
-      <StatusCardBody>
-        <StatusCardTitle>
-          <Ellipsified>{database.name}</Ellipsified>
-        </StatusCardTitle>
-        <StatusCardDescription>
-          {getDescription(database)}
-        </StatusCardDescription>
-      </StatusCardBody>
-      {isSyncInProgress(database) && (
-        <StatusCardSpinner size={24} borderWidth={3} />
-      )}
-      {isSyncCompleted(database) && (
-        <StatusCardIconContainer>
-          <Icon name="check" size={12} />
-        </StatusCardIconContainer>
-      )}
-      {isSyncAborted(database) && (
-        <StatusCardIconContainer isError={true}>
-          <Icon name="warning" size={12} />
-        </StatusCardIconContainer>
-      )}
-    </StatusCardRoot>
+    <StatusLarge status={status} onCollapse={onCollapse} isActive={isActive} />
   );
 };
 
diff --git a/frontend/src/metabase/status/components/DatabaseStatusSmall/DatabaseStatusSmall.tsx b/frontend/src/metabase/status/components/DatabaseStatusSmall/DatabaseStatusSmall.tsx
index 0dd500f85fb90b482e36ce30bda312b2f1ba4460..e7ad3f26fbae551174c991916116cba3be2298f9 100644
--- a/frontend/src/metabase/status/components/DatabaseStatusSmall/DatabaseStatusSmall.tsx
+++ b/frontend/src/metabase/status/components/DatabaseStatusSmall/DatabaseStatusSmall.tsx
@@ -2,15 +2,8 @@ import React from "react";
 import { t } from "ttag";
 import { isReducedMotionPreferred } from "metabase/lib/dom";
 import { isSyncAborted, isSyncInProgress } from "metabase/lib/syncing";
-import Tooltip from "metabase/core/components/Tooltip";
 import { Database, InitialSyncStatus } from "metabase-types/api";
-import {
-  StatusRoot,
-  StatusIconContainer,
-  StatusIcon,
-  StatusContainer,
-  StatusSpinner,
-} from "./DatabaseStatusSmall.styled";
+import StatusSmall from "../StatusSmall";
 
 export interface DatabaseStatusSmallProps {
   databases: Database[];
@@ -24,18 +17,16 @@ const DatabaseStatusSmall = ({
   const status = getStatus(databases);
   const statusLabel = getStatusLabel(status);
   const hasSpinner = isSpinnerVisible(status);
+  const icon = getIconName(status);
 
   return (
-    <Tooltip tooltip={statusLabel}>
-      <StatusRoot role="status" aria-label={statusLabel} onClick={onExpand}>
-        <StatusContainer status={status}>
-          <StatusIconContainer status={status}>
-            <StatusIcon status={status} name={getIconName(status)} />
-          </StatusIconContainer>
-        </StatusContainer>
-        {hasSpinner && <StatusSpinner size={48} />}
-      </StatusRoot>
-    </Tooltip>
+    <StatusSmall
+      status={status}
+      statusLabel={statusLabel}
+      hasSpinner={hasSpinner}
+      icon={icon}
+      onExpand={onExpand}
+    />
   );
 };
 
diff --git a/frontend/src/metabase/status/components/FileUploadStatus/FileUploadStatus.tsx b/frontend/src/metabase/status/components/FileUploadStatus/FileUploadStatus.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..cde5732a0c8398eeec08437cbf517bbaaa982ffb
--- /dev/null
+++ b/frontend/src/metabase/status/components/FileUploadStatus/FileUploadStatus.tsx
@@ -0,0 +1,62 @@
+import React from "react";
+import { useSelector } from "react-redux";
+
+import { getAllUploads } from "metabase/redux/uploads";
+import Collections from "metabase/entities/collections/collections";
+import { Collection } from "metabase-types/api";
+import { FileUpload } from "metabase-types/store/upload";
+import { isUploadInProgress } from "metabase/lib/uploads";
+
+import useStatusVisibility from "../../hooks/use-status-visibility";
+import FileUploadStatusLarge from "../FileUploadStatusLarge";
+
+const FileUploadStatus = ({
+  collections = [],
+}: {
+  collections: Collection[];
+}) => {
+  const uploads = useSelector(getAllUploads);
+
+  const uploadCollections = collections.filter(collection =>
+    uploads.some(upload => upload.collectionId === collection.id),
+  );
+
+  return (
+    <>
+      {uploadCollections.map(collection => {
+        const collectionUploads = uploads.filter(
+          ({ collectionId }) => collectionId === collection.id,
+        );
+
+        return (
+          <FileUploadStatusContent
+            key={`uploads-${collection.id}`}
+            uploads={collectionUploads}
+            collection={collection}
+          />
+        );
+      })}
+    </>
+  );
+};
+
+const FileUploadStatusContent = ({
+  collection,
+  uploads,
+}: {
+  collection: Collection;
+  uploads: FileUpload[];
+}) => {
+  const isActive = uploads.some(isUploadInProgress);
+  const isVisible = useStatusVisibility(isActive);
+
+  if (!isVisible) {
+    return null;
+  }
+
+  return <FileUploadStatusLarge uploads={uploads} collection={collection} />;
+};
+
+export default Collections.loadList({ loadingAndErrorWrapper: false })(
+  FileUploadStatus,
+);
diff --git a/frontend/src/metabase/status/components/FileUploadStatus/FileUploadStatus.unit.spec.tsx b/frontend/src/metabase/status/components/FileUploadStatus/FileUploadStatus.unit.spec.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..8459d86238fee84c2497a61d9b5321afed4a1813
--- /dev/null
+++ b/frontend/src/metabase/status/components/FileUploadStatus/FileUploadStatus.unit.spec.tsx
@@ -0,0 +1,170 @@
+import React from "react";
+import fetchMock from "fetch-mock";
+import { screen } from "@testing-library/react";
+import userEvent from "@testing-library/user-event";
+import { Route } from "react-router";
+import { createMockUpload, createMockState } from "metabase-types/store/mocks";
+
+import { renderWithProviders } from "__support__/ui";
+import { createMockCollection } from "metabase-types/api/mocks";
+import CollectionHeader from "metabase/collections/containers/CollectionHeader";
+import FileUploadStatus from "./FileUploadStatus";
+
+describe("FileUploadStatus", () => {
+  const firstCollectionId = 1;
+  const firstCollection = createMockCollection({ id: firstCollectionId });
+
+  const secondCollectionId = 2;
+
+  beforeEach(() => {
+    fetchMock.get("path:/api/collection", [
+      firstCollection,
+      createMockCollection({
+        id: secondCollectionId,
+        name: "Second Collection",
+      }),
+    ]);
+  });
+
+  it("Should group uploads by collection", async () => {
+    const uploadOne = createMockUpload({
+      collectionId: firstCollectionId,
+      id: 1,
+    });
+
+    const uploadTwo = createMockUpload({
+      collectionId: secondCollectionId,
+      id: 2,
+      name: "test two.csv",
+    });
+
+    const uploadThree = createMockUpload({
+      collectionId: firstCollectionId,
+      id: 3,
+      name: "test three.csv",
+    });
+
+    renderWithProviders(<FileUploadStatus />, {
+      storeInitialState: createMockState({
+        upload: {
+          [uploadOne.id]: uploadOne,
+          [uploadTwo.id]: uploadTwo,
+          [uploadThree.id]: uploadThree,
+        },
+      }),
+    });
+
+    expect(
+      await screen.findByText("Uploading data to Collection..."),
+    ).toBeInTheDocument();
+
+    expect(
+      await screen.findByText("Uploading data to Second Collection..."),
+    ).toBeInTheDocument();
+
+    expect(await screen.findByText("test.csv")).toBeInTheDocument();
+
+    expect(await screen.findByText("test two.csv")).toBeInTheDocument();
+
+    expect(await screen.findByText("test three.csv")).toBeInTheDocument();
+  });
+
+  it("Should show a start exploring link on completion", async () => {
+    fetchMock.post("path:/api/card/from-csv", { model_id: 3 }, { delay: 1000 });
+
+    renderWithProviders(
+      <Route
+        path="/"
+        component={() => {
+          return (
+            <>
+              <CollectionHeader
+                collection={firstCollection}
+                isAdmin={true}
+                isBookmarked={false}
+                isPersonalCollectionChild={false}
+                onCreateBookmark={jest.fn()}
+                onDeleteBookmark={jest.fn()}
+                canUpload
+              />
+              <FileUploadStatus />
+            </>
+          );
+        }}
+      />,
+      {
+        withRouter: true,
+      },
+    );
+
+    userEvent.upload(
+      screen.getByTestId("upload-input"),
+      new File(["foo, bar"], "test.csv", { type: "text/csv" }),
+    );
+
+    expect(
+      await screen.findByText("Uploading data to Collection..."),
+    ).toBeInTheDocument();
+
+    expect(
+      await screen.findByRole(
+        "link",
+        { name: "Start exploring" },
+        { timeout: 5000 },
+      ),
+    ).toHaveAttribute("href", "/model/3");
+  });
+
+  it("Should show an error message on error", async () => {
+    fetchMock.post(
+      "path:/api/card/from-csv",
+      {
+        throws: { data: { message: "It's dead Jim" } },
+        status: 400,
+      },
+      { delay: 1000 },
+    ),
+      renderWithProviders(
+        <Route
+          path="/"
+          component={props => {
+            return (
+              <>
+                <CollectionHeader
+                  collection={firstCollection}
+                  isAdmin={true}
+                  isBookmarked={false}
+                  isPersonalCollectionChild={false}
+                  onCreateBookmark={jest.fn()}
+                  onDeleteBookmark={jest.fn()}
+                  canUpload
+                />
+                <FileUploadStatus />
+              </>
+            );
+          }}
+        />,
+        {
+          withRouter: true,
+        },
+      );
+
+    userEvent.upload(
+      screen.getByTestId("upload-input"),
+      new File(["foo, bar"], "test.csv", { type: "text/csv" }),
+    );
+
+    expect(
+      await screen.findByText("Uploading data to Collection..."),
+    ).toBeInTheDocument();
+
+    expect(
+      await screen.findByText(
+        "Error uploading your File",
+        {},
+        { timeout: 3000 },
+      ),
+    ).toBeInTheDocument();
+    expect(await screen.findByText("It's dead Jim")).toBeInTheDocument();
+  });
+});
diff --git a/frontend/src/metabase/status/components/FileUploadStatus/index.ts b/frontend/src/metabase/status/components/FileUploadStatus/index.ts
new file mode 100644
index 0000000000000000000000000000000000000000..ab767466e097717676aa430c2cf05814d8ca5a8f
--- /dev/null
+++ b/frontend/src/metabase/status/components/FileUploadStatus/index.ts
@@ -0,0 +1 @@
+export { default } from "./FileUploadStatus";
diff --git a/frontend/src/metabase/status/components/FileUploadStatusLarge/FileUploadStatusLarge.stories.tsx b/frontend/src/metabase/status/components/FileUploadStatusLarge/FileUploadStatusLarge.stories.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..f78404216af8ce2ad1cc16750421cf2bd74b48e2
--- /dev/null
+++ b/frontend/src/metabase/status/components/FileUploadStatusLarge/FileUploadStatusLarge.stories.tsx
@@ -0,0 +1,56 @@
+import React from "react";
+import type { ComponentStory } from "@storybook/react";
+import { createMockCollection } from "metabase-types/api/mocks";
+import FileUploadStatusLarge from "./FileUploadStatusLarge";
+
+export default {
+  title: "Status/FileUploadStatusLarge",
+  component: FileUploadStatusLarge,
+};
+
+const Template: ComponentStory<typeof FileUploadStatusLarge> = args => {
+  return <FileUploadStatusLarge {...args} />;
+};
+
+export const Incomplete = Template.bind({});
+Incomplete.args = {
+  uploads: [
+    {
+      id: 1,
+      name: "Marketing UTM Q4 2022",
+      status: "in-progress",
+      collectionId: "root",
+    },
+  ],
+  collection: createMockCollection({ name: "Revenue" }),
+  isActive: true,
+};
+
+export const Complete = Template.bind({});
+Complete.args = {
+  uploads: [
+    {
+      id: 1,
+      name: "Marketing UTM Q4 2022",
+      status: "complete",
+      collectionId: "root",
+    },
+  ],
+  collection: createMockCollection({ name: "Revenue" }),
+  isActive: true,
+};
+
+export const Aborted = Template.bind({});
+Aborted.args = {
+  uploads: [
+    {
+      id: 1,
+      name: "Marketing UTM Q4 2022",
+      status: "error",
+      collectionId: "root",
+      message: "It's dead Jim",
+    },
+  ],
+  collection: createMockCollection({ name: "Revenue" }),
+  isActive: true,
+};
diff --git a/frontend/src/metabase/status/components/FileUploadStatusLarge/FileUploadStatusLarge.tsx b/frontend/src/metabase/status/components/FileUploadStatusLarge/FileUploadStatusLarge.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..3698c3648e1c82ea0253f55ed26f8f2fc7da35ba
--- /dev/null
+++ b/frontend/src/metabase/status/components/FileUploadStatusLarge/FileUploadStatusLarge.tsx
@@ -0,0 +1,64 @@
+import React from "react";
+import { t } from "ttag";
+import Link from "metabase/core/components/Link";
+import { Collection } from "metabase-types/api";
+import { FileUpload } from "metabase-types/store/upload";
+
+import {
+  isUploadInProgress,
+  isUploadCompleted,
+  isUploadAborted,
+} from "metabase/lib/uploads";
+
+import StatusLarge from "../StatusLarge";
+
+export interface FileUploadLargeProps {
+  collection: Collection;
+  uploads: FileUpload[];
+  isActive?: boolean;
+}
+
+const FileUploadLarge = ({
+  collection,
+  uploads,
+  isActive,
+}: FileUploadLargeProps): JSX.Element => {
+  const status = {
+    title: getTitle(uploads, collection),
+    items: uploads.map(upload => ({
+      id: upload.id,
+      title: upload.name,
+      icon: "model",
+      description: getDescription(upload),
+      isInProgress: isUploadInProgress(upload),
+      isCompleted: isUploadCompleted(upload),
+      isAborted: isUploadAborted(upload),
+    })),
+  };
+
+  return <StatusLarge status={status} isActive={isActive} />;
+};
+
+const getTitle = (uploads: FileUpload[], collection: Collection) => {
+  const isDone = uploads.every(isUploadCompleted);
+  const isError = uploads.some(isUploadAborted);
+
+  if (isDone) {
+    return t`Data added to ${collection.name}`;
+  } else if (isError) {
+    return t`Error uploading your File`;
+  } else {
+    return t`Uploading data to ${collection.name}...`;
+  }
+};
+
+const getDescription = (upload: FileUpload) => {
+  if (upload.status === "complete") {
+    return <Link to={`/model/${upload.modelId}`}>Start exploring</Link>;
+  } else if (upload.status === "error") {
+    return upload.message;
+  }
+  return "";
+};
+
+export default FileUploadLarge;
diff --git a/frontend/src/metabase/status/components/FileUploadStatusLarge/index.ts b/frontend/src/metabase/status/components/FileUploadStatusLarge/index.ts
new file mode 100644
index 0000000000000000000000000000000000000000..7118d53323f00ff0504c60ac46612c44fce90d9b
--- /dev/null
+++ b/frontend/src/metabase/status/components/FileUploadStatusLarge/index.ts
@@ -0,0 +1 @@
+export { default } from "./FileUploadStatusLarge";
diff --git a/frontend/src/metabase/status/components/DatabaseStatusLarge/DatabaseStatusLarge.styled.tsx b/frontend/src/metabase/status/components/StatusLarge/StatusLarge.styled.tsx
similarity index 98%
rename from frontend/src/metabase/status/components/DatabaseStatusLarge/DatabaseStatusLarge.styled.tsx
rename to frontend/src/metabase/status/components/StatusLarge/StatusLarge.styled.tsx
index bbdc6432426bbe4361b0902b308532421df65155..62f4f515f6abd2f4a9bd2311a8865f825f5a9f6f 100644
--- a/frontend/src/metabase/status/components/DatabaseStatusLarge/DatabaseStatusLarge.styled.tsx
+++ b/frontend/src/metabase/status/components/StatusLarge/StatusLarge.styled.tsx
@@ -9,6 +9,7 @@ export const StatusRoot = styled.div`
   background-color: ${color("white")};
   box-shadow: 0 1px 12px ${color("shadow")};
   overflow: hidden;
+  margin-top: 1rem;
 `;
 
 export const StatusHeader = styled.div`
@@ -87,6 +88,7 @@ interface StatusCardIconContainerProps {
 }
 
 export const StatusCardIconContainer = styled.div<StatusCardIconContainerProps>`
+  flex: 0 0 auto;
   display: flex;
   align-items: center;
   justify-content: center;
diff --git a/frontend/src/metabase/status/components/StatusLarge/StatusLarge.tsx b/frontend/src/metabase/status/components/StatusLarge/StatusLarge.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..35935ff8705b21336d4c2248eae15fe5eda9f430
--- /dev/null
+++ b/frontend/src/metabase/status/components/StatusLarge/StatusLarge.tsx
@@ -0,0 +1,109 @@
+import React from "react";
+import Ellipsified from "metabase/core/components/Ellipsified";
+import Icon from "../../../components/Icon";
+import useStatusVisibility from "../../hooks/use-status-visibility";
+import {
+  StatusCardRoot,
+  StatusCardIcon,
+  StatusCardBody,
+  StatusCardTitle,
+  StatusCardDescription,
+  StatusCardSpinner,
+  StatusCardIconContainer,
+  StatusRoot,
+  StatusHeader,
+  StatusTitle,
+  StatusToggle,
+  StatusBody,
+} from "./StatusLarge.styled";
+
+type Status = {
+  title: string;
+  items: StatusItem[];
+};
+
+type StatusItem = {
+  id?: number;
+  title: string;
+  icon: string;
+  description?: string | JSX.Element;
+  isInProgress: boolean;
+  isCompleted: boolean;
+  isAborted: boolean;
+};
+
+export interface StatusLargeProps {
+  status: Status;
+  isActive?: boolean;
+  onCollapse?: () => void;
+}
+
+const StatusLarge = ({
+  status,
+  isActive,
+  onCollapse,
+}: StatusLargeProps): JSX.Element => {
+  return (
+    <StatusRoot role="status">
+      <StatusHeader>
+        <StatusTitle>{status.title}</StatusTitle>
+        {onCollapse && (
+          <StatusToggle onClick={onCollapse}>
+            <Icon name="chevrondown" />
+          </StatusToggle>
+        )}
+      </StatusHeader>
+      <StatusBody>
+        {status.items.map(item => (
+          <StatusCard item={item} isActive={isActive} key={item.id} />
+        ))}
+      </StatusBody>
+    </StatusRoot>
+  );
+};
+
+interface StatusCardProps {
+  item: StatusItem;
+  isActive?: boolean;
+}
+
+const StatusCard = ({
+  item,
+  isActive,
+}: StatusCardProps): JSX.Element | null => {
+  const { id, title, icon, description, isInProgress, isCompleted, isAborted } =
+    item;
+
+  const isVisible = useStatusVisibility(isActive || isInProgress);
+
+  if (!isVisible) {
+    return null;
+  }
+
+  return (
+    <StatusCardRoot key={id}>
+      <StatusCardIcon>
+        <Icon name={icon} />
+      </StatusCardIcon>
+      <StatusCardBody>
+        <StatusCardTitle>
+          <Ellipsified>{title}</Ellipsified>
+        </StatusCardTitle>
+        <StatusCardDescription>{description}</StatusCardDescription>
+      </StatusCardBody>
+      {isInProgress && <StatusCardSpinner size={24} borderWidth={3} />}
+      {isCompleted && (
+        <StatusCardIconContainer>
+          <Icon name="check" size={12} />
+        </StatusCardIconContainer>
+      )}
+      {isAborted && (
+        <StatusCardIconContainer isError={true}>
+          <Icon name="warning" size={12} />
+        </StatusCardIconContainer>
+      )}
+    </StatusCardRoot>
+  );
+};
+
+export default StatusLarge;
diff --git a/frontend/src/metabase/status/components/StatusLarge/index.ts b/frontend/src/metabase/status/components/StatusLarge/index.ts
new file mode 100644
index 0000000000000000000000000000000000000000..e6a9551f961927f573fd4e7e3ed883d517cd87e1
--- /dev/null
+++ b/frontend/src/metabase/status/components/StatusLarge/index.ts
@@ -0,0 +1 @@
+export { default } from "./StatusLarge";
diff --git a/frontend/src/metabase/status/components/StatusListing/StatusListing.tsx b/frontend/src/metabase/status/components/StatusListing/StatusListing.tsx
index ebff7a2594829ecf9ef15c98650acd301f3c2ce2..fcd104a8c6372593df277fb778bcc5ff5ad4d69a 100644
--- a/frontend/src/metabase/status/components/StatusListing/StatusListing.tsx
+++ b/frontend/src/metabase/status/components/StatusListing/StatusListing.tsx
@@ -1,13 +1,37 @@
 import React from "react";
+import { connect } from "react-redux";
+
+import { getUserIsAdmin, getUser } from "metabase/selectors/user";
+import type { State } from "metabase-types/store";
+
 import DatabaseStatus from "../../containers/DatabaseStatus";
+import FileUploadStatus from "../FileUploadStatus";
 import { StatusListingRoot } from "./StatusListing.styled";
 
+const mapStateToProps = (state: State) => ({
+  isAdmin: getUserIsAdmin(state),
+  isLoggedIn: !!getUser(state),
+});
+
 export interface StatusListingProps {
   isAdmin: boolean;
+  isLoggedIn: boolean;
 }
 
-const StatusListing = ({ isAdmin }: StatusListingProps): JSX.Element => {
-  return <StatusListingRoot>{isAdmin && <DatabaseStatus />}</StatusListingRoot>;
+export const StatusListingView = ({
+  isAdmin,
+  isLoggedIn,
+}: StatusListingProps) => {
+  if (!isLoggedIn) {
+    return null;
+  }
+
+  return (
+    <StatusListingRoot>
+      {isAdmin && <DatabaseStatus />}
+      <FileUploadStatus />
+    </StatusListingRoot>
+  );
 };
 
-export default StatusListing;
+export default connect(mapStateToProps)(StatusListingView);
diff --git a/frontend/src/metabase/status/components/StatusListing/StatusListing.unit.spec.tsx b/frontend/src/metabase/status/components/StatusListing/StatusListing.unit.spec.tsx
index edfc30805dced4f4d9639e63b5030cd096acf2eb..400efd02aa3b6b20a118938157ea2b5091a3ecd6 100644
--- a/frontend/src/metabase/status/components/StatusListing/StatusListing.unit.spec.tsx
+++ b/frontend/src/metabase/status/components/StatusListing/StatusListing.unit.spec.tsx
@@ -1,20 +1,35 @@
 import React from "react";
-import { render, screen } from "@testing-library/react";
-import StatusListing from "./StatusListing";
+import { renderWithProviders, screen } from "__support__/ui";
+import { setupCollectionsEndpoints } from "__support__/server-mocks";
+import { createMockCollection } from "metabase-types/api/mocks";
+
+import {
+  StatusListingView as StatusListing,
+  StatusListingProps,
+} from "./StatusListing";
 
 const DatabaseStatusMock = () => <div>DatabaseStatus</div>;
 
 jest.mock("../../containers/DatabaseStatus", () => DatabaseStatusMock);
 
+const setup = (options?: Partial<StatusListingProps>) => {
+  setupCollectionsEndpoints([createMockCollection()]);
+
+  return renderWithProviders(<StatusListing isAdmin isLoggedIn {...options} />);
+};
+
 describe("StatusListing", () => {
   it("should render database statuses for admins", () => {
-    render(<StatusListing isAdmin={true} />);
+    setup({
+      isAdmin: true,
+      isLoggedIn: true,
+    });
 
     expect(screen.getByText("DatabaseStatus")).toBeInTheDocument();
   });
 
   it("should not render database statuses for non-admins", () => {
-    render(<StatusListing isAdmin={false} />);
+    renderWithProviders(<StatusListing isAdmin={false} isLoggedIn />);
 
     expect(screen.queryByText("DatabaseStatus")).not.toBeInTheDocument();
   });
diff --git a/frontend/src/metabase/status/components/DatabaseStatusSmall/DatabaseStatusSmall.styled.tsx b/frontend/src/metabase/status/components/StatusSmall/StatusSmall.styled.tsx
similarity index 100%
rename from frontend/src/metabase/status/components/DatabaseStatusSmall/DatabaseStatusSmall.styled.tsx
rename to frontend/src/metabase/status/components/StatusSmall/StatusSmall.styled.tsx
diff --git a/frontend/src/metabase/status/components/StatusSmall/StatusSmall.tsx b/frontend/src/metabase/status/components/StatusSmall/StatusSmall.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..eb110c0a59a47d666b163f0cc34d4f185180cf9f
--- /dev/null
+++ b/frontend/src/metabase/status/components/StatusSmall/StatusSmall.tsx
@@ -0,0 +1,41 @@
+import React from "react";
+import Tooltip from "metabase/core/components/Tooltip";
+import { InitialSyncStatus } from "metabase-types/api";
+import {
+  StatusRoot,
+  StatusIconContainer,
+  StatusIcon,
+  StatusContainer,
+  StatusSpinner,
+} from "./StatusSmall.styled";
+
+export interface StatusSmallProps {
+  status: InitialSyncStatus;
+  statusLabel: string;
+  hasSpinner: boolean;
+  icon: string;
+  onExpand?: () => void;
+}
+
+const StatusSmall = ({
+  status,
+  statusLabel,
+  hasSpinner,
+  icon,
+  onExpand,
+}: StatusSmallProps): JSX.Element => {
+  return (
+    <Tooltip tooltip={statusLabel}>
+      <StatusRoot role="status" aria-label={statusLabel} onClick={onExpand}>
+        <StatusContainer status={status}>
+          <StatusIconContainer status={status}>
+            <StatusIcon status={status} name={icon} />
+          </StatusIconContainer>
+        </StatusContainer>
+        {hasSpinner && <StatusSpinner size={48} />}
+      </StatusRoot>
+    </Tooltip>
+  );
+};
+
+export default StatusSmall;
diff --git a/frontend/src/metabase/status/components/StatusSmall/index.ts b/frontend/src/metabase/status/components/StatusSmall/index.ts
new file mode 100644
index 0000000000000000000000000000000000000000..8cb684d84807824888a1e9ade71b7f0955b38261
--- /dev/null
+++ b/frontend/src/metabase/status/components/StatusSmall/index.ts
@@ -0,0 +1 @@
+export { default } from "./StatusSmall";
diff --git a/frontend/src/metabase/status/containers/StatusListing/StatusListing.tsx b/frontend/src/metabase/status/containers/StatusListing/StatusListing.tsx
deleted file mode 100644
index 4b0f97cee0b9c598a0f9241c45ca46e4ac4b7878..0000000000000000000000000000000000000000
--- a/frontend/src/metabase/status/containers/StatusListing/StatusListing.tsx
+++ /dev/null
@@ -1,10 +0,0 @@
-import { connect } from "react-redux";
-import { getUserIsAdmin } from "metabase/selectors/user";
-import { State } from "metabase-types/store";
-import StatusListing from "../../components/StatusListing";
-
-const mapStateToProps = (state: State) => ({
-  isAdmin: getUserIsAdmin(state),
-});
-
-export default connect(mapStateToProps)(StatusListing);
diff --git a/frontend/src/metabase/status/containers/StatusListing/index.ts b/frontend/src/metabase/status/containers/StatusListing/index.ts
deleted file mode 100644
index 9b7b132008935dc087133a9a4d6ed5517539c88a..0000000000000000000000000000000000000000
--- a/frontend/src/metabase/status/containers/StatusListing/index.ts
+++ /dev/null
@@ -1 +0,0 @@
-export { default } from "./StatusListing";
diff --git a/package.json b/package.json
index 154eab9bcaf097dcc00ea00af12b0515d41177ef..de020bd58c7d685dfcdb2107452c4570f7f91af6 100644
--- a/package.json
+++ b/package.json
@@ -75,6 +75,7 @@
     "react-dnd-html5-backend": "3",
     "react-dom": "~16.14.0",
     "react-draggable": "^3.3.2",
+    "react-dropzone": "^14.2.3",
     "react-element-to-jsx-string": "^13.1.0",
     "react-grid-layout": "^1.2.5",
     "react-is": "^17.0.2",
diff --git a/resources/migrations/000_migrations.yaml b/resources/migrations/000_migrations.yaml
index 1731b64044b419c111895122e101b292e86f1d72..a006b3286c1e30c46edf5c55e05104b068412d63 100644
--- a/resources/migrations/000_migrations.yaml
+++ b/resources/migrations/000_migrations.yaml
@@ -14432,6 +14432,28 @@ databaseChangeLog:
         - sql:
             sql: UPDATE core_user SET google_auth = true, sso_source = NULL WHERE sso_source = 'google';
 
+  - changeSet:
+      id: v47.00-010
+      author: tsmacdonald
+      comment: Added 0.47.0 - Make metabase_table.name long enough for H2 names
+      changes:
+        - modifyDataType:
+            tableName: metabase_table
+            columnName: name
+            newDataType: varchar(256)
+      rollback: # no rollback needed, varchar(256) is backwards compatible
+
+  - changeSet:
+      id: v47.00-011
+      author: tsmacdonald
+      comment: Added 0.47.0 - Make metabase_table.display_name long enough for H2 names
+      changes:
+        - modifyDataType:
+            tableName: metabase_table
+            columnName: display_name
+            newDataType: varchar(256)
+      rollback: # no rollback needed, varchar(256) is backwards compatible
+
   - changeSet:
       id: v47.00-012
       author: qwef
diff --git a/src/metabase/api/card.clj b/src/metabase/api/card.clj
index 453e80d32945977f8198c114cbd0d2d470e5cc1c..d4d4d8493a4f102c39e78dbadb096a13c6f53956 100644
--- a/src/metabase/api/card.clj
+++ b/src/metabase/api/card.clj
@@ -4,6 +4,7 @@
    [cheshire.core :as json]
    [clojure.core.async :as a]
    [clojure.data :as data]
+   [clojure.string :as str]
    [clojure.walk :as walk]
    [compojure.core :refer [DELETE GET POST PUT]]
    [medley.core :as m]
@@ -13,6 +14,7 @@
    [metabase.api.field :as api.field]
    [metabase.api.timeline :as api.timeline]
    [metabase.driver :as driver]
+   [metabase.driver.util :as driver.u]
    [metabase.email.messages :as messages]
    [metabase.events :as events]
    [metabase.mbql.normalize :as mbql.normalize]
@@ -38,13 +40,16 @@
    [metabase.models.query.permissions :as query-perms]
    [metabase.models.revision.last-edit :as last-edit]
    [metabase.models.timeline :as timeline]
+   [metabase.public-settings :as public-settings]
    [metabase.query-processor.async :as qp.async]
    [metabase.query-processor.card :as qp.card]
    [metabase.query-processor.pivot :as qp.pivot]
    [metabase.query-processor.util :as qp.util]
    [metabase.related :as related]
+   [metabase.sync :as sync]
    [metabase.sync.analyze.query-results :as qr]
    [metabase.task.persist-refresh :as task.persist-refresh]
+   [metabase.upload :as upload]
    [metabase.util :as u]
    [metabase.util.date-2 :as u.date]
    [metabase.util.i18n :refer [trs tru]]
@@ -971,4 +976,48 @@ saved later when it is ready."
    query     ms/NonBlankString}
   (param-values (api/read-check Card card-id) param-key query))
 
+(defn upload-csv!
+  "Main entry point for CSV uploading. Coordinates detecting the schema, inserting it into an appropriate database,
+  syncing and scanning the new data, and creating an appropriate model. May throw validation or DB errors."
+  [collection-id filename csv-file]
+  (when (not (public-settings/uploads-enabled))
+    (throw (Exception. "Uploads are not enabled.")))
+  (collection/check-write-perms-for-collection collection-id)
+  (let [db-id             (public-settings/uploads-database-id)
+        database          (or (t2/select-one Database :id db-id)
+                              (throw (Exception. (tru "The uploads database does not exist."))))
+        schema-name       (public-settings/uploads-schema-name)
+        filename-prefix   (or (second (re-matches #"(.*)\.csv$" filename))
+                              filename)
+        driver            (driver.u/database->driver database)
+        _                 (or (driver/database-supports? driver :uploads nil)
+                              (throw (Exception. (tru "Uploads are not supported on {0} databases." (str/capitalize (name driver))))))
+        table-name        (->> (str (public-settings/uploads-table-prefix) filename-prefix)
+                               (upload/unique-table-name driver))
+        schema+table-name (if (str/blank? schema-name)
+                            table-name
+                            (str schema-name "." table-name))
+        _                 (upload/load-from-csv driver db-id schema+table-name csv-file)
+        _                 (sync/sync-database! database)
+        table-id          (t2/select-one-fn :id Table :db_id db-id :%lower.name table-name)]
+    (create-card!
+     {:collection_id          collection-id,
+      :dataset                true
+      :database_id            db-id
+      :dataset_query          {:database db-id
+                               :query    {:source-table table-id}
+                               :type     :query}
+      :display                :table
+      :name                   filename-prefix
+      :visualization_settings {}})))
+
+(api/defendpoint ^:multipart POST "/from-csv"
+  "Create a table and model populated with the values from the attached CSV."
+  [:as {raw-params :params}]
+  ;; parse-long returns nil with "root", which is what we want anyway
+  (upload-csv! (parse-long (get raw-params "collection_id"))
+               (get-in raw-params ["file" :filename])
+               (get-in raw-params ["file" :tempfile]))
+  {:status 200})
+
 (api/define-routes)
diff --git a/src/metabase/driver.clj b/src/metabase/driver.clj
index dd79f2fa154698d5a836b375719ba71b56c01e67..bc4d3f72ab4f660f90735d35d0097ed89790026e 100644
--- a/src/metabase/driver.clj
+++ b/src/metabase/driver.clj
@@ -490,6 +490,9 @@
     ;; Does the driver support experimental "writeback" actions like "delete this row" or "insert a new row" from 44+?
     :actions
 
+    ;; Does the driver support uploading files
+    :uploads
+
     ;; Does the driver support custom writeback actions. Drivers that support this must
     ;; implement [[execute-write-query!]]
     :actions/custom
@@ -810,3 +813,38 @@
   {:arglists '([driver table fields rff opts]), :added "0.46.0"}
   dispatch-on-initialized-driver
   :hierarchy #'hierarchy)
+
+;;; +----------------------------------------------------------------------------------------------------------------+
+;;; |                                                    Upload                                                      |
+;;; +----------------------------------------------------------------------------------------------------------------+
+
+(defmulti table-name-length-limit
+  "Return the maximum number of characters allowed in a table name, or `nil` if there is no limit."
+  {:added "0.47.0", :arglists '([driver])}
+  dispatch-on-initialized-driver
+  :hierarchy #'hierarchy)
+
+(defmulti create-table
+  "Create a table named `table-name`. If the table already exists it will throw an error."
+  {:added "0.47.0", :arglists '([driver db-id table-name col->type])}
+  dispatch-on-initialized-driver
+  :hierarchy #'hierarchy)
+
+(defmulti drop-table
+  "Drop a table named `table-name`. If the table doesn't exist it will not be dropped."
+  {:added "0.47.0", :arglists '([driver db-id table-name])}
+  dispatch-on-initialized-driver
+  :hierarchy #'hierarchy)
+
+(defmulti insert-into
+  "Insert `values` into a table named `table-name`. `values` is a sequence of rows, where each row's order matches
+   `column-names`."
+  {:added "0.47.0", :arglists '([driver db-id table-name column-names values])}
+  dispatch-on-initialized-driver
+  :hierarchy #'hierarchy)
+
+(defmulti upload-type->database-type
+  "Returns the database type for a given `metabase.upload` type."
+  {:added "0.47.0", :arglists '([driver upload-type])}
+  dispatch-on-initialized-driver
+  :hierarchy #'hierarchy)
diff --git a/src/metabase/driver/h2.clj b/src/metabase/driver/h2.clj
index 6a56e840c5356b034f0bf4bfb669f8e33025ae02..f254978bf492258e78f0b4a99edce1258a6d057c 100644
--- a/src/metabase/driver/h2.clj
+++ b/src/metabase/driver/h2.clj
@@ -15,6 +15,7 @@
    [metabase.plugins.classloader :as classloader]
    [metabase.query-processor.error-type :as qp.error-type]
    [metabase.query-processor.store :as qp.store]
+   [metabase.upload :as upload]
    [metabase.util :as u]
    [metabase.util.honey-sql-2 :as h2x]
    [metabase.util.i18n :refer [deferred-tru tru]]
@@ -49,7 +50,8 @@
                               :actions/custom            true
                               :datetime-diff             true
                               :now                       true
-                              :test/jvm-timezone-setting false}]
+                              :test/jvm-timezone-setting false
+                              :uploads                   true}]
   (defmethod driver/database-supports? [:h2 feature]
     [_driver _feature _database]
     supported?))
@@ -515,3 +517,19 @@
       (do (log/error (tru "SSH tunnel can only be established for H2 connections using the TCP protocol"))
           db-details))
     db-details))
+
+(defmethod driver/upload-type->database-type :h2
+  [_driver upload-type]
+  (case upload-type
+    ::upload/varchar_255 "VARCHAR"
+    ::upload/text        "VARCHAR"
+    ::upload/int         "INTEGER"
+    ::upload/float       "DOUBLE PRECISION"
+    ::upload/boolean     "BOOLEAN"
+    ::upload/date        "DATE"
+    ::upload/datetime    "TIMESTAMP"))
+
+(defmethod driver/table-name-length-limit :h2
+  [_driver]
+  ;; http://www.h2database.com/html/advanced.html#limits_limitations
+  256)
diff --git a/src/metabase/driver/mysql.clj b/src/metabase/driver/mysql.clj
index 0ba029faffd64a5ee4301e83ad541d5c723f9a13..4f1f481f6c0bb14aca0738c9e19daf83913fbaa9 100644
--- a/src/metabase/driver/mysql.clj
+++ b/src/metabase/driver/mysql.clj
@@ -26,6 +26,7 @@
    [metabase.query-processor.store :as qp.store]
    [metabase.query-processor.timezone :as qp.timezone]
    [metabase.query-processor.util.add-alias-info :as add]
+   [metabase.upload :as upload]
    [metabase.util :as u]
    [metabase.util.honey-sql-2 :as h2x]
    [metabase.util.i18n :refer [deferred-tru trs]]
@@ -54,6 +55,8 @@
 (defmethod driver/database-supports? [:mysql :nested-field-columns] [_driver _feat db]
   (driver.common/json-unfolding-default db))
 
+(defmethod driver/database-supports? [:mysql :uploads] [_driver _feat _db] true)
+
 (defmethod driver/database-supports? [:mysql :persist-models] [_driver _feat _db] true)
 
 (defmethod driver/database-supports? [:mysql :persist-models-enabled]
@@ -606,3 +609,19 @@
   (format "convert_tz('%s', '%s', @@session.time_zone)"
           (t/format "yyyy-MM-dd HH:mm:ss.SSS" t)
           (str (t/zone-id t))))
+
+(defmethod driver/upload-type->database-type :mysql
+  [_driver upload-type]
+  (case upload-type
+    ::upload/varchar_255 "VARCHAR(255)"
+    ::upload/text        "TEXT"
+    ::upload/int         "INTEGER"
+    ::upload/float       "DOUBLE"
+    ::upload/boolean     "BOOLEAN"
+    ::upload/date        "DATE"
+    ::upload/datetime    "TIMESTAMP"))
+
+(defmethod driver/table-name-length-limit :mysql
+  [_driver]
+  ;; https://dev.mysql.com/doc/refman/8.0/en/identifier-length.html
+  64)
diff --git a/src/metabase/driver/postgres.clj b/src/metabase/driver/postgres.clj
index d1b45813be44042ed9f73ba8a7ddc5f83a84bd22..e84a6402ad8b99a895896d80bf60696ea9a1528e 100644
--- a/src/metabase/driver/postgres.clj
+++ b/src/metabase/driver/postgres.clj
@@ -27,6 +27,7 @@
    [metabase.models.secret :as secret]
    [metabase.query-processor.store :as qp.store]
    [metabase.query-processor.util.add-alias-info :as add]
+   [metabase.upload :as upload]
    [metabase.util :as u]
    [metabase.util.date-2 :as u.date]
    [metabase.util.honey-sql-2 :as h2x]
@@ -76,7 +77,7 @@
   [_driver _feat _db]
   true)
 
-(doseq [feature [:actions :actions/custom]]
+(doseq [feature [:actions :actions/custom :uploads]]
   (defmethod driver/database-supports? [:postgres feature]
     [driver _feat _db]
     ;; only supported for Postgres for right now. Not supported for child drivers like Redshift or whatever.
@@ -752,3 +753,21 @@
   [driver prepared-statement i t]
   (let [local-time (t/local-time (t/with-offset-same-instant t (t/zone-offset 0)))]
     (sql-jdbc.execute/set-parameter driver prepared-statement i local-time)))
+
+(defmethod driver/upload-type->database-type :postgres
+  [_driver upload-type]
+  (case upload-type
+    ::upload/varchar_255 "VARCHAR(255)"
+    ::upload/text        "TEXT"
+    ::upload/int         "INTEGER"
+    ::upload/float       "FLOAT"
+    ::upload/boolean     "BOOLEAN"
+    ::upload/date        "DATE"
+    ::upload/datetime    "TIMESTAMP"))
+
+(defmethod driver/table-name-length-limit :postgres
+  [_driver]
+  ;; https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS
+  ;; This could be incorrect if Postgres has been compiled with a value for NAMEDATALEN other than the default (64), but
+  ;; that seems unlikely and there's not an easy way to find out.
+  63)
diff --git a/src/metabase/driver/sql_jdbc.clj b/src/metabase/driver/sql_jdbc.clj
index 34aca1bf632e9377712e162d9615e3a0600fd355..acdf8ae42291e2a04cda42c6d3d58a29f1613adb 100644
--- a/src/metabase/driver/sql_jdbc.clj
+++ b/src/metabase/driver/sql_jdbc.clj
@@ -2,12 +2,14 @@
   "Shared code for drivers for SQL databases using their respective JDBC drivers under the hood."
   (:require
    [clojure.java.jdbc :as jdbc]
+   [honey.sql :as sql]
    [metabase.driver :as driver]
    [metabase.driver.sql-jdbc.actions :as sql-jdbc.actions]
    [metabase.driver.sql-jdbc.connection :as sql-jdbc.conn]
    [metabase.driver.sql-jdbc.execute :as sql-jdbc.execute]
    [metabase.driver.sql-jdbc.sync :as sql-jdbc.sync]
    [metabase.driver.sql.query-processor :as sql.qp]
+   [metabase.query-processor.writeback :as qp.writeback]
    [metabase.util.honeysql-extensions :as hx]))
 
 (comment sql-jdbc.actions/keep-me)
@@ -97,3 +99,42 @@
 (defmethod sql.qp/cast-temporal-string [:sql-jdbc :Coercion/YYYYMMDDHHMMSSString->Temporal]
   [_driver _semantic_type expr]
   (hx/->timestamp expr))
+
+(defn- create-table-sql
+  [driver table-name col->type]
+  (first (sql/format {:create-table (keyword table-name)
+                      :with-columns (map (fn [kv] (map keyword kv)) col->type)}
+                     :quoted true
+                     :dialect (sql.qp/quote-style driver))))
+
+(defmethod driver/create-table :sql-jdbc
+  [driver db-id table-name col->type]
+  (let [sql (create-table-sql driver table-name col->type)]
+    (qp.writeback/execute-write-sql! db-id sql)))
+
+(defmethod driver/drop-table :sql-jdbc
+  [driver db-id table-name]
+  (let [sql (first (sql/format {:drop-table [:if-exists (keyword table-name)]}
+                               :quoted true
+                               :dialect (sql.qp/quote-style driver)))]
+    (qp.writeback/execute-write-sql! db-id sql)))
+
+(defmethod driver/insert-into :sql-jdbc
+  [driver db-id table-name column-names values]
+  (let [table-name (keyword table-name)
+        columns    (map keyword column-names)
+        sqls       (map #(sql/format {:insert-into table-name
+                                      :columns     columns
+                                      :values      %}
+                                     :quoted true
+                                     :dialect (sql.qp/quote-style driver))
+                        (partition-all 100 values))]
+    ;; We need to partition the insert into multiple statements for both performance and correctness.
+    ;;
+    ;; On Postgres with a large file, 100 (3.76m) was significantly faster than 50 (4.03m) and 25 (4.27m). 1,000 was a
+    ;; little faster but not by much (3.63m), and 10,000 threw an error:
+    ;;     PreparedStatement can have at most 65,535 parameters
+    ;; One imagines that `(long (/ 65535 (count columns)))` might be best, but I don't trust the 65K limit to apply
+    ;; across all drivers. With that in mind, 100 seems like a safe compromise.
+    (doseq [sql sqls]
+      (qp.writeback/execute-write-sql! db-id sql))))
diff --git a/src/metabase/public_settings.clj b/src/metabase/public_settings.clj
index 6293cdf0e7d49312c37627a008c0cd2f49cec8e2..077590d5eee706618e18ba75cf15b4e8f9a471b8 100644
--- a/src/metabase/public_settings.clj
+++ b/src/metabase/public_settings.clj
@@ -599,3 +599,24 @@
                     (not (t2/exists? 'Database :is_sample false, :initial_sync_status "complete"))
                     ;; frontend should set this value to `true` after the modal has been shown once
                     v))))
+
+(defsetting uploads-enabled
+  (deferred-tru "Whether or not uploads are enabled")
+  :visibility :authenticated
+  :type       :boolean
+  :default    false)
+
+(defsetting uploads-database-id
+  (deferred-tru "Database ID for uploads")
+  :visibility :authenticated
+  :type       :integer)
+
+(defsetting uploads-schema-name
+  (deferred-tru "Schema name for uploads")
+  :visibility   :authenticated
+  :type         :string)
+
+(defsetting uploads-table-prefix
+  (deferred-tru "Prefix for upload table names")
+  :visibility   :authenticated
+  :type         :string)
diff --git a/src/metabase/query_processor/writeback.clj b/src/metabase/query_processor/writeback.clj
index fc26f7e3f07dec8e292925f76d03d8dff7874291..2b7f51c8aee82817465f048d9b9eb7658fec3c5c 100644
--- a/src/metabase/query_processor/writeback.clj
+++ b/src/metabase/query_processor/writeback.clj
@@ -43,3 +43,16 @@
     (throw (ex-info (tru "Only native queries can be executed as write queries.")
                     {:type qp.error-type/invalid-query, :status-code 400, :query query})))
   ((writeback-qp) query nil nil))
+
+(defn execute-write-sql!
+  "Execute a write query in SQL against a database given by `db-id`."
+  [db-id sql-or-sql+params]
+  (if (sequential? sql-or-sql+params)
+    (let [[sql & params] sql-or-sql+params]
+      (execute-write-query! {:type     :native
+                             :database db-id
+                             :native   {:query  sql
+                                        :params params}}))
+    (execute-write-query! {:type     :native
+                           :database db-id
+                           :native   {:query sql-or-sql+params}})))
diff --git a/src/metabase/upload.clj b/src/metabase/upload.clj
new file mode 100644
index 0000000000000000000000000000000000000000..15b9f7109bb87f8ec894444886c8f1e4cd854f01
--- /dev/null
+++ b/src/metabase/upload.clj
@@ -0,0 +1,236 @@
+(ns metabase.upload
+  (:require
+   [clojure.data.csv :as csv]
+   [clojure.java.io :as io]
+   [clojure.set :as set]
+   [clojure.string :as str]
+   [flatland.ordered.map :as ordered-map]
+   [flatland.ordered.set :as ordered-set]
+   [java-time :as t]
+   [metabase.driver :as driver]
+   [metabase.mbql.util :as mbql.u]
+   [metabase.search.util :as search-util]
+   [metabase.util :as u]))
+
+(set! *warn-on-reflection* true)
+
+;;;; +------------------+
+;;;; | Schema detection |
+;;;; +------------------+
+
+;;              text
+;;               |
+;;               |
+;;          varchar_255
+;;              / \
+;;             /   \
+;;            /     \
+;;         float   datetime
+;;           |       |
+;;           |       |
+;;          int    date
+;;           |
+;;           |
+;;        boolean
+
+(def ^:private type->parent
+  ;; listed in depth-first order
+  {::varchar_255 ::text
+   ::float       ::varchar_255
+   ::int         ::float
+   ::boolean     ::int
+   ::datetime    ::varchar_255
+   ::date        ::datetime})
+
+(def ^:private types
+  (set/union (set (keys type->parent))
+             (set (vals type->parent))))
+
+(def ^:private type->ancestors
+  (into {} (for [type types]
+             [type (loop [ret (ordered-set/ordered-set)
+                          type type]
+                     (if-let [parent (type->parent type)]
+                       (recur (conj ret parent) parent)
+                       ret))])))
+
+(defn- date-string? [s]
+  (try (t/local-date s)
+       true
+       (catch Exception _
+         false)))
+
+(defn- datetime-string? [s]
+  (try (t/local-date-time s)
+       true
+       (catch Exception _
+         false)))
+
+(defn value->type
+  "The most-specific possible type for a given value. Possibilities are:
+    - ::boolean
+    - ::int
+    - ::float
+    - ::varchar_255
+    - ::text
+    - nil, in which case other functions are expected to replace it with ::text as the catch-all type
+
+  NB: There are currently the following gotchas:
+    1. ints/floats are assumed to have commas as separators and periods as decimal points
+    2. 0 and 1 are assumed to be booleans, not ints."
+  [value]
+  (cond
+    (str/blank? value)                                      nil
+    (re-matches #"(?i)true|t|yes|y|1|false|f|no|n|0" value) ::boolean
+    (re-matches #"-?[\d,]+"                          value) ::int
+    (re-matches #"-?[\d,]*\.\d+"                     value) ::float
+    (datetime-string?                                value) ::datetime
+    (date-string?                                    value) ::date
+    (re-matches #".{1,255}"                          value) ::varchar_255
+    :else                                                   ::text))
+
+(defn- row->types
+  [row]
+  (map (comp value->type search-util/normalize) row))
+
+(defn- lowest-common-member [[x & xs :as all-xs] ys]
+  (cond
+    (empty? all-xs)  (throw (IllegalArgumentException. (format "%s and %s must have a common member" xs ys)))
+    (contains? ys x) x
+    :else            (recur xs ys)))
+
+(defn- lowest-common-ancestor [type-a type-b]
+  (cond
+    (nil? type-a) type-b
+    (nil? type-b) type-a
+    (= type-a type-b) type-a
+    (contains? (type->ancestors type-a) type-b) type-b
+    (contains? (type->ancestors type-b) type-a) type-a
+    :else (lowest-common-member (type->ancestors type-a) (type->ancestors type-b))))
+
+(defn- coalesce-types
+  [types-so-far new-types]
+  (->> (map vector types-so-far new-types)
+       (mapv (partial apply lowest-common-ancestor))))
+
+(defn- pad
+  "Lengthen `values` until it is of length `n` by filling it with nils."
+  [n values]
+  (first (partition n n (repeat nil) values)))
+
+(defn- normalize-column-name
+  [raw-name]
+  (if (str/blank? raw-name)
+    "unnamed_column"
+    (u/slugify (str/trim raw-name))))
+
+(defn- rows->schema
+  [header rows]
+  (let [normalized-header (->> header
+                               (map normalize-column-name)
+                               (mbql.u/uniquify-names))
+        column-count      (count normalized-header)]
+    (->> rows
+         (map row->types)
+         (map (partial pad column-count))
+         (reduce coalesce-types (repeat column-count nil))
+         (map #(or % ::text))
+         (map vector normalized-header)
+         (ordered-map/ordered-map))))
+
+;;;; +------------------+
+;;;; |  Parsing values  |
+;;;; +------------------+
+
+(defn- parse-bool
+  [s]
+  (cond
+    (re-matches #"(?i)true|t|yes|y|1" s) true
+    (re-matches #"(?i)false|f|no|n|0" s) false))
+
+(defn- parse-date
+  [s]
+  (t/local-date s))
+
+(defn- parse-datetime
+  [s]
+  (cond
+    (date-string? s) (t/local-date-time (t/local-date s) (t/local-time "00:00:00"))
+    (datetime-string? s) (t/local-date-time s)))
+
+(def ^:private upload-type->parser
+  {::varchar_255 identity
+   ::text        identity
+   ::int         #(Integer/parseInt (str/trim %))
+   ::float       #(parse-double (str/trim %))
+   ::boolean     #(parse-bool (str/trim %))
+   ::date        #(parse-date (str/trim %))
+   ::datetime    #(parse-datetime (str/trim %))})
+
+(defn- parsed-rows
+  "Returns a vector of parsed rows from a `csv-file`.
+   Replaces empty strings with nil."
+  [col->upload-type csv-file]
+  (with-open [reader (io/reader csv-file)]
+    (let [[_header & rows] (csv/read-csv reader)
+          parsers (map upload-type->parser (vals col->upload-type))]
+      (vec (for [row rows]
+             (for [[v f] (map vector row parsers)]
+               (if (str/blank? v)
+                 nil
+                 (f v))))))))
+
+;;;; +------------------+
+;;;; | Public Functions |
+;;;; +------------------+
+
+(defn unique-table-name
+  "Append the current datetime to the given name to create a unique table name. The resulting name will be short enough for the given driver (truncating the supplised `table-name` if necessary)."
+  [driver table-name]
+  (let [time-format                 "_yyyyMMddHHmmss"
+        acceptable-length           (min (count table-name)
+                                         (- (driver/table-name-length-limit driver) (count time-format)))
+        truncated-name-without-time (subs (u/slugify table-name) 0 acceptable-length)]
+    (str truncated-name-without-time
+         (t/format time-format (t/local-date-time)))))
+
+(def max-sample-rows "Maximum number of values to use for detecting a column's type" 1000)
+
+(defn- sample-rows
+  "Returns an improper subset of the rows no longer than [[max-sample-rows]]. Takes an evenly-distributed sample (not
+  just the first n)."
+  [rows]
+  (take max-sample-rows (take-nth (max 1 (long (/ (count rows) max-sample-rows)))
+                                  rows)))
+(defn detect-schema
+  "Returns an ordered map of `normalized-column-name -> type` for the given CSV file. The CSV file *must* have headers as the
+  first row. Supported types are:
+
+    - ::int
+    - ::float
+    - ::boolean
+    - ::varchar_255
+    - ::text
+    - ::date
+    - ::datetime
+
+  A column that is completely blank is assumed to be of type ::text."
+  [csv-file]
+  (with-open [reader (io/reader csv-file)]
+    (let [[header & rows] (csv/read-csv reader)]
+      (rows->schema header (sample-rows rows)))))
+
+(defn load-from-csv
+  "Loads a table from a CSV file. If the table already exists, it will throw an error. Returns nil."
+  [driver db-id table-name csv-file]
+  (let [col->upload-type   (detect-schema csv-file)
+        col->database-type (update-vals col->upload-type (partial driver/upload-type->database-type driver))
+        column-names       (keys col->upload-type)]
+    (driver/create-table driver db-id table-name col->database-type)
+    (try
+      (let [rows (parsed-rows col->upload-type csv-file)]
+        (driver/insert-into driver db-id table-name column-names rows))
+      (catch Throwable e
+        (driver/drop-table driver db-id table-name)
+        (throw (ex-info (ex-message e) {}))))
+    nil))
diff --git a/test/metabase/actions/test_util.clj b/test/metabase/actions/test_util.clj
index 11a0a43ae6b95348152eedfa90ebe0d95807a463..a387034505d60425ff76751453e9a366a4f495dd 100644
--- a/test/metabase/actions/test_util.clj
+++ b/test/metabase/actions/test_util.clj
@@ -104,6 +104,13 @@
   [dataset-definition & body]
   `(do-with-dataset-definition (tx/dataset-definition ~(str (gensym)) ~dataset-definition) (fn [] ~@body)))
 
+(defmacro with-empty-db
+  "Sets the current dataset to a freshly created db that gets destroyed at the conclusion of `body`.
+   Use this to test destructive actions that may modify the data."
+  {:style/indent :defn}
+  [& body]
+  `(do-with-dataset-definition (tx/dataset-definition ~(str (gensym))) (fn [] ~@body)))
+
 (deftest with-actions-test-data-test
   (datasets/test-drivers (qp.test/normal-drivers-with-feature :actions/custom)
     (dotimes [i 2]
diff --git a/test/metabase/api/card_test.clj b/test/metabase/api/card_test.clj
index b81d6ad666e82f6a362432a7c11b2160b18da1dd..0aca96fb5a3c15e0f68731d8e0e039d9eeeafd80 100644
--- a/test/metabase/api/card_test.clj
+++ b/test/metabase/api/card_test.clj
@@ -2,6 +2,7 @@
   "Tests for /api/card endpoints."
   (:require
    [cheshire.core :as json]
+   [clojure.java.jdbc :as jdbc]
    [clojure.set :as set]
    [clojure.string :as str]
    [clojure.test :refer :all]
@@ -12,6 +13,8 @@
    [medley.core :as m]
    [metabase.api.card :as api.card]
    [metabase.api.pivots :as api.pivots]
+   [metabase.driver :as driver]
+   [metabase.driver.sql-jdbc.connection :as sql-jdbc.conn]
    [metabase.driver.sql-jdbc.execute :as sql-jdbc.execute]
    [metabase.http-client :as client]
    [metabase.models
@@ -19,6 +22,7 @@
             Collection
             Dashboard
             Database
+            Field
             ModerationReview
             PersistedInfo
             Pulse
@@ -44,6 +48,7 @@
    [metabase.task.sync-databases :as task.sync-databases]
    [metabase.test :as mt]
    [metabase.test.data.users :as test.users]
+   [metabase.upload-test :as upload-test]
    [metabase.util :as u]
    [metabase.util.schema :as su]
    [schema.core :as s]
@@ -276,10 +281,11 @@
                         card-3 :rasta]
         (with-cards-in-readable-collection [card-1 card-2 card-3 card-4]
           (testing "\nShould return cards that were recently viewed by current user only"
-            (is (= ["Card 3"
-                    "Card 4"
-                    "Card 1"]
-                   (map :name (mt/user-http-request :rasta :get 200 "card", :f :recent))))))))))
+            (let [recent-card-names (->> (mt/user-http-request :rasta :get 200 "card", :f :recent)
+                                         (map :name)
+                                         (filter #{"Card 1" "Card 2" "Card 3" "Card 4"}))]
+              (is (= ["Card 3" "Card 4" "Card 1"]
+                     recent-card-names)))))))))
 
 (deftest filter-by-popular-test
   (testing "GET /api/card?f=popular"
@@ -295,9 +301,12 @@
         (with-cards-in-readable-collection [card-1 card-2 card-3]
           (testing (str "`f=popular` should return cards sorted by number of ViewLog entries for all users; cards with "
                         "no entries should be excluded")
-            (is (= ["Card 3"
-                    "Card 2"]
-                   (map :name (mt/user-http-request :rasta :get 200 "card", :f :popular))))))))))
+            (let [popular-card-names (->> (mt/user-http-request :rasta :get 200 "card", :f :popular)
+                                          (map :name)
+                                          (filter #{"Card 1" "Card 2" "Card 3"}))]
+              (is (= ["Card 3"
+                      "Card 2"]
+                     popular-card-names)))))))))
 
 (deftest filter-by-archived-test
   (testing "GET /api/card?f=archived"
@@ -2538,3 +2547,116 @@
                  :values_source_type    "static-list"
                  :values_source_config {:values ["BBQ" "Bakery" "Bar"]}}]
                (:parameters card)))))))
+
+(defn- upload-example-csv! [collection-id]
+  (let [file (upload-test/csv-file-with
+              ["id, name"
+               "1, Luke Skywalker"
+               "2, Darth Vader"]
+              "example")]
+    (mt/with-current-user (mt/user->id :rasta)
+      (api.card/upload-csv! collection-id "example.csv" file))))
+
+(deftest upload-csv!-schema-test
+  (mt/test-drivers (disj (mt/normal-drivers-with-feature :uploads) :mysql) ; MySQL doesn't support schemas
+    (mt/with-empty-db
+      (let [db-id (u/the-id (mt/db))]
+        (testing "Happy path with schema, and without table-prefix"
+          ;; create not_public schema in the db
+          (let [details (mt/dbdef->connection-details driver/*driver* :db {:database-name (:name (mt/db))})]
+            (jdbc/execute! (sql-jdbc.conn/connection-details->spec driver/*driver* details)
+                           ["CREATE SCHEMA \"not_public\";"]))
+          (mt/with-temporary-setting-values [uploads-enabled      true
+                                             uploads-database-id  db-id
+                                             uploads-schema-name  "not_public"
+                                             uploads-table-prefix nil]
+            (let [new-model (upload-example-csv! nil)
+                  new-table (t2/select-one Table :db_id db-id)]
+              (is (=? {:display          :table
+                       :database_id      db-id
+                       :dataset_query    {:database db-id
+                                          :query    {:source-table (:id new-table)}
+                                          :type     :query}
+                       :creator_id       (mt/user->id :rasta)
+                       :name             "example"
+                       :collection_id    nil} new-model))
+              (is (=? {:name #"(?i)example(.*)"
+                       :schema #"(?i)not_public"}
+                      new-table))
+              (is (= #{"id" "name"}
+                     (->> (t2/select Field :table_id (:id new-table))
+                          (map (comp #_{:clj-kondo/ignore [:discouraged-var]} str/lower-case :name))
+                          set))))))))))
+
+(deftest upload-csv!-table-prefix-test
+  (mt/test-drivers (mt/normal-drivers-with-feature :uploads)
+    (mt/with-empty-db
+      (let [db-id (u/the-id (mt/db))]
+        (testing "Happy path with table prefix, and without schema"
+          (mt/with-temporary-setting-values [uploads-enabled      true
+                                             uploads-database-id  db-id
+                                             uploads-schema-name  nil
+                                             uploads-table-prefix "uploaded_magic_"]
+            (let [new-model (upload-example-csv! nil)
+                  new-table (t2/select-one Table :db_id db-id)]
+              (is (= "example" (:name new-model)))
+              (is (=? {:name #"(?i)uploaded_magic_example(.*)"}
+                      new-table))
+              (if (= driver/*driver* :mysql)
+                (is (nil? (:schema new-table)))
+                (is (=? {:schema #"(?i)public"} new-table))))))))))
+
+(deftest upload-csv!-failure-test
+  ;; Just test with postgres because failure should be independent of the driver
+  (mt/test-driver :postgres
+    (mt/with-empty-db
+      (let [db-id (u/the-id (mt/db))]
+        (testing "Uploads must be enabled"
+          (doseq [uploads-enabled-value [false nil]]
+            (mt/with-temporary-setting-values [uploads-enabled      uploads-enabled-value
+                                               uploads-database-id  db-id
+                                               uploads-schema-name  "public"
+                                               uploads-table-prefix "uploaded_magic_"]
+              (is (thrown-with-msg?
+                   java.lang.Exception
+                   #"^Uploads are not enabled\.$"
+                   (upload-example-csv! nil))))))
+        (testing "Database ID must be set"
+          (mt/with-temporary-setting-values [uploads-enabled      true
+                                             uploads-database-id  nil
+                                             uploads-schema-name  "public"
+                                             uploads-table-prefix "uploaded_magic_"]
+            (is (thrown-with-msg?
+                 java.lang.Exception
+                 #"^The uploads database does not exist\.$"
+                 (upload-example-csv! nil)))))
+        (testing "Database ID must be valid"
+          (mt/with-temporary-setting-values [uploads-enabled      true
+                                             uploads-database-id  -1
+                                             uploads-schema-name  "public"
+                                             uploads-table-prefix "uploaded_magic_"]
+            (is (thrown-with-msg?
+                 java.lang.Exception
+                 #"^The uploads database does not exist\."
+                 (upload-example-csv! nil)))))
+        (testing "Uploads must be supported"
+          (with-redefs [driver/database-supports? (constantly false)]
+            (mt/with-temporary-setting-values [uploads-enabled      true
+                                               uploads-database-id  db-id
+                                               uploads-schema-name  "public"
+                                               uploads-table-prefix "uploaded_magic_"]
+              (is (thrown-with-msg?
+                   java.lang.Exception
+                   #"^Uploads are not supported on Postgres databases\."
+                   (upload-example-csv! nil))))))
+        (testing "User must have write permissions on the collection"
+          (mt/with-non-admin-groups-no-root-collection-perms
+            (mt/with-temporary-setting-values [uploads-enabled      true
+                                               uploads-database-id  db-id
+                                               uploads-schema-name  "public"
+                                               uploads-table-prefix "uploaded_magic_"]
+              (mt/with-current-user (mt/user->id :lucky)
+                (is (thrown-with-msg?
+                     java.lang.Exception
+                     #"^You do not have curate permissions for this Collection\.$"
+                     (upload-example-csv! nil)))))))))))
diff --git a/test/metabase/test.clj b/test/metabase/test.clj
index 2daaa97e00542d9dd28b074b47383f98a6babc51..0f07cfac4cba6b616f3f5340639c9e33017217aa 100644
--- a/test/metabase/test.clj
+++ b/test/metabase/test.clj
@@ -99,6 +99,7 @@
   with-actions-test-data
   with-actions-test-data-tables
   with-actions-test-data-and-actions-enabled
+  with-empty-db
   with-temp-test-data]
 
  [data
diff --git a/test/metabase/upload_test.clj b/test/metabase/upload_test.clj
new file mode 100644
index 0000000000000000000000000000000000000000..35bf28bb9d43e19cccb0b9e42a21a5a245f4da36
--- /dev/null
+++ b/test/metabase/upload_test.clj
@@ -0,0 +1,404 @@
+(ns metabase.upload-test
+  (:require
+   [clojure.string :as str]
+   [clojure.test :refer :all]
+   [metabase.driver :as driver]
+   [metabase.models :refer [Field Table]]
+   [metabase.query-processor :as qp]
+   [metabase.sync :as sync]
+   [metabase.test :as mt]
+   [metabase.upload :as upload]
+   [metabase.util :as u]
+   [toucan2.core :as t2])
+  (:import
+   [java.io File]))
+
+(set! *warn-on-reflection* true)
+
+(def bool-type      :metabase.upload/boolean)
+(def int-type       :metabase.upload/int)
+(def float-type     :metabase.upload/float)
+(def vchar-type     :metabase.upload/varchar_255)
+(def date-type      :metabase.upload/date)
+(def datetime-type  :metabase.upload/datetime)
+(def text-type      :metabase.upload/text)
+
+(deftest type-detection-test
+  (doseq [[value expected] [["0"                          bool-type]
+                            ["1"                          bool-type]
+                            ["t"                          bool-type]
+                            ["T"                          bool-type]
+                            ["tRuE"                       bool-type]
+                            ["f"                          bool-type]
+                            ["F"                          bool-type]
+                            ["FAlse"                      bool-type]
+                            ["Y"                          bool-type]
+                            ["n"                          bool-type]
+                            ["yes"                        bool-type]
+                            ["NO"                         bool-type]
+                            ["2"                          int-type]
+                            ["-86"                        int-type]
+                            ["9,986,000"                  int-type]
+                            ["3.14"                       float-type]
+                            [".14"                        float-type]
+                            ["0.14"                       float-type]
+                            ["-9,986.567"                 float-type]
+                            ["9,986,000.0"                float-type]
+                            [(apply str (repeat 255 "x")) vchar-type]
+                            [(apply str (repeat 256 "x")) text-type]
+                            ["86 is my favorite number"   vchar-type]
+                            ["My favorite number is 86"   vchar-type]
+                            ["2022-01-01"                 date-type]
+                            ["2022-01-01T01:00:00"        datetime-type]
+                            ["2022-01-01T01:00:00.00"     datetime-type]
+                            ["2022-01-01T01:00:00.000000000" datetime-type]]]
+    (testing (format "\"%s\" is a %s" value expected)
+      (is (= expected (upload/value->type value))))))
+
+(deftest type-coalescing-test
+  (doseq [[type-a type-b expected] [[bool-type     bool-type     bool-type]
+                                    [bool-type     int-type      int-type]
+                                    [bool-type     date-type     vchar-type]
+                                    [bool-type     datetime-type vchar-type]
+                                    [bool-type     vchar-type    vchar-type]
+                                    [bool-type     text-type     text-type]
+                                    [int-type      bool-type     int-type]
+                                    [int-type      float-type    float-type]
+                                    [int-type      date-type     vchar-type]
+                                    [int-type      datetime-type vchar-type]
+                                    [int-type      vchar-type    vchar-type]
+                                    [int-type      text-type     text-type]
+                                    [float-type    vchar-type    vchar-type]
+                                    [float-type    text-type     text-type]
+                                    [float-type    date-type     vchar-type]
+                                    [float-type    datetime-type vchar-type]
+                                    [date-type     datetime-type datetime-type]
+                                    [date-type     vchar-type    vchar-type]
+                                    [date-type     text-type     text-type]
+                                    [datetime-type vchar-type    vchar-type]
+                                    [datetime-type text-type     text-type]
+                                    [vchar-type    text-type     text-type]]]
+    (is (= expected (#'upload/lowest-common-ancestor type-a type-b))
+        (format "%s + %s = %s" (name type-a) (name type-b) (name expected)))))
+
+(defn csv-file-with
+  "Create a temp csv file with the given content and return the file"
+  ([rows]
+   (csv-file-with rows "test"))
+  ([rows filename]
+   (let [contents (str/join "\n" rows)
+         csv-file (doto (File/createTempFile filename ".csv")
+                    (.deleteOnExit))]
+     (spit csv-file contents)
+     csv-file)))
+
+(deftest detect-schema-test
+  (testing "Well-formed CSV file"
+    (is (= {"name"             vchar-type
+            "age"              int-type
+            "favorite_pokemon" vchar-type}
+           (upload/detect-schema
+            (csv-file-with ["Name, Age, Favorite Pokémon"
+                            "Tim, 12, Haunter"
+                            "Ryan, 97, Paras"])))))
+  (testing "CSV missing data"
+    (is (= {"name"       vchar-type
+            "height"     int-type
+            "birth_year" float-type}
+           (upload/detect-schema
+            (csv-file-with ["Name, Height, Birth Year"
+                            "Luke Skywalker, 172, -19"
+                            "Darth Vader, 202, -41.9"
+                            "Watto, 137"          ; missing column
+                            "Sebulba, 112,"]))))) ; comma, but blank column
+  (testing "Type coalescing"
+    (is (= {"name"       vchar-type
+            "height"     float-type
+            "birth_year" vchar-type}
+           (upload/detect-schema
+            (csv-file-with ["Name, Height, Birth Year"
+                            "Rey Skywalker, 170, 15"
+                            "Darth Vader, 202.0, 41.9BBY"])))))
+  (testing "Boolean coalescing"
+    (is (= {"name"          vchar-type
+            "is_jedi_"      bool-type
+            "is_jedi__int_" int-type
+            "is_jedi__vc_"  vchar-type}
+           (upload/detect-schema
+            (csv-file-with ["Name, Is Jedi?, Is Jedi (int), Is Jedi (VC)"
+                            "Rey Skywalker, yes, true, t"
+                            "Darth Vader, YES, TRUE, Y"
+                            "Grogu, 1, 9001, probably?"
+                            "Han Solo, no, FaLsE, 0"])))))
+  (testing "Order is ensured"
+    (let [header "a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,zz,yy,xx,ww,vv,uu,tt,ss,rr,qq,pp,oo,nn,mm,ll,kk,jj,ii,hh,gg,ff,ee,dd,cc,bb,aa"]
+      (is (= (str/split header #",")
+             (keys
+              (upload/detect-schema
+               (csv-file-with [header
+                               "Luke,ah'm,yer,da,,,missing,columns,should,not,matter"])))))))
+  (testing "Empty contents (with header) are okay"
+      (is (= {"name"     text-type
+              "is_jedi_" text-type}
+             (upload/detect-schema
+              (csv-file-with ["Name, Is Jedi?"])))))
+  (testing "Completely empty contents are okay"
+      (is (= {}
+             (upload/detect-schema
+              (csv-file-with [""])))))
+  (testing "CSV missing data in the top row"
+    (is (= {"name"       vchar-type
+            "height"     int-type
+            "birth_year" float-type}
+           (upload/detect-schema
+            (csv-file-with ["Name, Height, Birth Year"
+                            ;; missing column
+                            "Watto, 137"
+                            "Luke Skywalker, 172, -19"
+                            "Darth Vader, 202, -41.9"
+                            ;; comma, but blank column
+                            "Sebulba, 112,"]))))))
+
+(deftest detect-schema-dates-test
+  (testing "Dates"
+    (is (= {"date"         date-type
+            "not_date"     vchar-type
+            "datetime"     datetime-type
+            "not_datetime" vchar-type}
+           (upload/detect-schema
+            (csv-file-with ["Date      ,Not Date  ,Datetime           ,Not datetime       "
+                            "2022-01-01,2023-02-28,2022-01-01T00:00:00,2023-02-28T00:00:00"
+                            "2022-02-01,2023-02-29,2022-01-01T00:00:00,2023-02-29T00:00:00"]))))))
+
+(deftest unique-table-name-test
+  (mt/test-driver (mt/normal-drivers-with-feature :uploads)
+    (testing "File name is slugified"
+      (is (=? #"my_file_name_\d+" (#'upload/unique-table-name driver/*driver* "my file name"))))
+    (testing "semicolons are removed"
+      (is (nil? (re-find #";" (#'upload/unique-table-name driver/*driver* "some text; -- DROP TABLE.csv")))))))
+
+(deftest load-from-csv-table-name-test
+  (testing "Upload a CSV file"
+    (mt/test-driver (mt/normal-drivers-with-feature :uploads)
+      (mt/with-empty-db
+        (let [file       (csv-file-with ["id" "2" "3"])]
+          (testing "Can upload two files with the same name"
+            ;; Sleep for a second, because the table name is based on the current second
+            (is (some? (upload/load-from-csv driver/*driver* (mt/id) "table_name" file)))
+            (Thread/sleep 1000)
+            (is (some? (upload/load-from-csv driver/*driver* (mt/id) "table_name" file)))))))))
+
+(defn- query-table!
+  [table]
+  (qp/process-query {:database (:db_id table)
+                     :type     :query
+                     :query    {:source-table (:id table)}}))
+
+(defn- column-names-for-table
+  [table]
+  (->> (query-table! table)
+       mt/cols
+       (map (comp u/lower-case-en :name))))
+
+(defn- rows-for-table
+  [table]
+  (mt/rows (query-table! table)))
+
+(deftest load-from-csv-test
+  (testing "Upload a CSV file"
+    (mt/test-drivers (mt/normal-drivers-with-feature :uploads)
+      (mt/with-empty-db
+        (upload/load-from-csv
+         driver/*driver*
+         (mt/id)
+         "upload_test"
+         (csv-file-with ["id,nulls,string,bool,number,date,datetime"
+                         "2\t ,,string,true ,1.1\t  ,2022-01-01,2022-01-01T00:00:00"
+                         "   3,,string,false,    1.1,2022-02-01,2022-02-01T00:00:00"]))
+        (testing "Table and Fields exist after sync"
+          (sync/sync-database! (mt/db))
+          (let [table (t2/select-one Table :db_id (mt/id))]
+            (is (=? {:name         #"(?i)upload_test"
+                     :display_name "Upload Test"}
+                    table))
+            (is (=? {:name          #"(?i)id"
+                     :semantic_type :type/PK
+                     :base_type     :type/Integer}
+                    (t2/select-one Field :database_position 0 :table_id (:id table))))
+            (is (=? {:name      #"(?i)nulls"
+                     :base_type :type/Text}
+                    (t2/select-one Field :database_position 1 :table_id (:id table))))
+            (is (=? {:name      #"(?i)string"
+                     :base_type :type/Text}
+                    (t2/select-one Field :database_position 2 :table_id (:id table))))
+            (is (=? {:name      #"(?i)bool"
+                     :base_type :type/Boolean}
+                    (t2/select-one Field :database_position 3 :table_id (:id table))))
+            (is (=? {:name      #"(?i)number"
+                     :base_type :type/Float}
+                    (t2/select-one Field :database_position 4 :table_id (:id table))))
+            (is (=? {:name      #"(?i)date"
+                     :base_type :type/Date}
+                    (t2/select-one Field :database_position 5 :table_id (:id table))))
+            (is (=? {:name      #"(?i)datetime"
+                     :base_type (if (= driver/*driver* :mysql) :type/DateTimeWithLocalTZ :type/DateTime)}
+                    (t2/select-one Field :database_position 6 :table_id (:id table))))
+            (testing "Check the data was uploaded into the table"
+              (is (= 2
+                     (count (rows-for-table table)))))))))))
+
+(deftest load-from-csv-date-test
+  (testing "Upload a CSV file with a datetime column"
+    (mt/test-drivers (mt/normal-drivers-with-feature :uploads)
+      (mt/with-empty-db
+        (upload/load-from-csv
+         driver/*driver*
+         (mt/id)
+         "upload_test"
+         (csv-file-with ["datetime"
+                         "2022-01-01"
+                         "2022-01-01T00:00:00"]))
+        (testing "Fields exists after sync"
+          (sync/sync-database! (mt/db))
+          (let [table (t2/select-one Table :db_id (mt/id))]
+            (is (=? {:name #"(?i)upload_test"} table))
+            (testing "Check the datetime column the correct base_type"
+              (is (=? {:name      #"(?i)datetime"
+                       :base_type (if (= driver/*driver* :mysql) :type/DateTimeWithLocalTZ :type/DateTime)}
+                      (t2/select-one Field :database_position 0 :table_id (:id table)))))
+            (is (some? table))))))))
+
+(deftest load-from-csv-boolean-test
+  (testing "Upload a CSV file"
+    (mt/test-drivers (mt/normal-drivers-with-feature :uploads)
+      (mt/with-empty-db
+        (upload/load-from-csv
+         driver/*driver*
+         (mt/id)
+         "upload_test"
+         (csv-file-with ["id,bool"
+                         "1,true"
+                         "2,false"
+                         "3,TRUE"
+                         "4,FALSE"
+                         "5,t    "
+                         "6,   f"
+                         "7,\tT"
+                         "8,F\t"
+                         "9,y"
+                         "10,n"
+                         "11,Y"
+                         "12,N"
+                         "13,yes"
+                         "14,no"
+                         "15,YES"
+                         "16,NO"
+                         "17,1"
+                         "18,0"]))
+        (testing "Table and Fields exist after sync"
+          (sync/sync-database! (mt/db))
+          (let [table (t2/select-one Table :db_id (mt/id))]
+            (is (=? {:name #"(?i)upload_test"} table))
+            (testing "Check the boolean column has a boolean base_type"
+              (is (=? {:name      #"(?i)bool"
+                       :base_type :type/Boolean}
+                      (t2/select-one Field :database_position 1 :table_id (:id table)))))
+            (testing "Check the data was uploaded into the table correctly"
+              (let [bool-column (map second (rows-for-table table))
+                    alternating (map even? (range (count bool-column)))]
+                (is (= alternating bool-column))))))))))
+
+(deftest load-from-csv-length-test
+  (testing "Upload a CSV file with a long name"
+    (mt/test-drivers (mt/normal-drivers-with-feature :uploads)
+      (let [length-limit (driver/table-name-length-limit driver/*driver*)
+            long-name  (apply str (repeat 33 "abcdefgh")) ; 33×8 = 264. Max is H2 at 256
+            short-name (subs long-name 0 (- length-limit (count "_yyyyMMddHHmmss")))]
+        (is (pos? length-limit) "driver/table-name-length-limit has been set")
+        (mt/with-empty-db
+          (upload/load-from-csv
+           driver/*driver*
+           (mt/id)
+           (upload/unique-table-name driver/*driver* long-name)
+           (csv-file-with ["id,bool"
+                           "1,true"
+                           "2,false"]))
+          (testing "It truncates it to the right number of characters, allowing for the timestamp"
+            (sync/sync-database! (mt/db))
+            (let [table    (t2/select-one Table :db_id (mt/id) :%lower.name [:like (str short-name "%")])
+                  table-re (re-pattern (str "(?i)" short-name "_\\d{14}"))]
+              (is (re-matches table-re (:name table)))
+              (testing "Check the data was uploaded into the table correctly"
+                (is (= [[1 true] [2 false]] (rows-for-table table)))))))))))
+
+(deftest load-from-csv-empty-header-test
+  (testing "Upload a CSV file with a blank column name"
+    (mt/test-drivers (mt/normal-drivers-with-feature :uploads)
+      (mt/with-empty-db
+        (upload/load-from-csv
+         driver/*driver*
+         (mt/id)
+         "upload_test"
+         (csv-file-with [",ship name,"
+                         "1,Serenity,Malcolm Reynolds"
+                         "2,Millennium Falcon, Han Solo"]))
+        (testing "Table and Fields exist after sync"
+          (sync/sync-database! (mt/db))
+          (let [table (t2/select-one Table :db_id (mt/id))]
+            (is (=? {:name #"(?i)upload_test"} table))
+            (testing "Check the data was uploaded into the table correctly"
+              (is (= ["unnamed_column", "ship_name", "unnamed_column_2"]
+                     (column-names-for-table table))))))))))
+
+(deftest load-from-csv-duplicate-names-test
+  (testing "Upload a CSV file with duplicate column names"
+    (mt/test-drivers (mt/normal-drivers-with-feature :uploads)
+      (mt/with-empty-db
+        (upload/load-from-csv
+         driver/*driver*
+         (mt/id)
+         "upload_test"
+         (csv-file-with ["unknown,unknown,unknown,unknown_2"
+                         "1,Serenity,Malcolm Reynolds,Pistol"
+                         "2,Millennium Falcon, Han Solo,Blaster"]))
+        (testing "Table and Fields exist after sync"
+          (sync/sync-database! (mt/db))
+          (let [table (t2/select-one Table :db_id (mt/id))]
+            (is (=? {:name #"(?i)upload_test"} table))
+            (testing "Check the data was uploaded into the table correctly"
+              (is (= ["unknown", "unknown_2", "unknown_3", "unknown_2_2"]
+                     (column-names-for-table table))))))))))
+
+(deftest load-from-csv-reserved-db-words-test
+  (testing "Upload a CSV file with column names that are reserved by the DB"
+    (mt/test-drivers (mt/normal-drivers-with-feature :uploads)
+      (mt/with-empty-db
+        (upload/load-from-csv
+         driver/*driver*
+         (mt/id)
+         "upload_test"
+         (csv-file-with ["true,false,group"
+                         "1,Serenity,Malcolm Reynolds"
+                         "2,Millennium Falcon, Han Solo"]))
+        (testing "Table and Fields exist after sync"
+          (sync/sync-database! (mt/db))
+          (let [table (t2/select-one Table :db_id (mt/id))]
+            (is (=? {:name #"(?i)upload_test"} table))
+            (testing "Check the data was uploaded into the table correctly"
+              (is (= ["true", "false", "group"]
+                     (column-names-for-table table))))))))))
+
+(deftest load-from-csv-failed-test
+  (mt/test-drivers (mt/normal-drivers-with-feature :uploads)
+    (mt/with-empty-db
+      (testing "Can't upload a CSV with missing values"
+        (is (thrown-with-msg?
+             clojure.lang.ExceptionInfo #"Error executing write query: "
+             (upload/load-from-csv
+              driver/*driver*
+              (mt/id)
+              "upload_test"
+              (csv-file-with ["id,column_that_doesnt_have_a_value" "2"])))))
+      (testing "Check that the table isn't created if the upload fails"
+        (sync/sync-database! (mt/db))
+        (is (nil? (t2/select-one Table :db_id (mt/id))))))))
diff --git a/yarn.lock b/yarn.lock
index f52cc7e0d19b3442dee813870be22c7dc94e9160..1e79338f76a46c2cee900dfec565e774d93d2a7a 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -7603,6 +7603,11 @@ atob@^2.1.2:
   resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9"
   integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg==
 
+attr-accept@^2.2.2:
+  version "2.2.2"
+  resolved "https://registry.yarnpkg.com/attr-accept/-/attr-accept-2.2.2.tgz#646613809660110749e92f2c10833b70968d929b"
+  integrity sha512-7prDjvt9HmqiZ0cl5CRjtS84sEyhsHP2coDkaZKRKVfCDo9s7iw7ChVmar78Gu9pC4SoR/28wFu/G5JJhTnqEg==
+
 autobind-decorator@^2.1.0:
   version "2.4.0"
   resolved "https://registry.yarnpkg.com/autobind-decorator/-/autobind-decorator-2.4.0.tgz#ea9e1c98708cf3b5b356f7cf9f10f265ff18239c"
@@ -11699,6 +11704,13 @@ file-loader@^6.2.0:
     loader-utils "^2.0.0"
     schema-utils "^3.0.0"
 
+file-selector@^0.6.0:
+  version "0.6.0"
+  resolved "https://registry.yarnpkg.com/file-selector/-/file-selector-0.6.0.tgz#fa0a8d9007b829504db4d07dd4de0310b65287dc"
+  integrity sha512-QlZ5yJC0VxHxQQsQhXvBaC7VRJ2uaxTf+Tfpu4Z/OcVQJVpZO+DGU0rkoVW5ce2SccxugvpBJoMvUs59iILYdw==
+  dependencies:
+    tslib "^2.4.0"
+
 file-system-cache@^1.0.5:
   version "1.0.5"
   resolved "https://registry.yarnpkg.com/file-system-cache/-/file-system-cache-1.0.5.tgz#84259b36a2bbb8d3d6eb1021d3132ffe64cfff4f"
@@ -18100,6 +18112,15 @@ prop-types@15.x, prop-types@^15.0.0, prop-types@^15.5.10, prop-types@^15.5.4, pr
     object-assign "^4.1.1"
     react-is "^16.8.1"
 
+prop-types@^15.8.1:
+  version "15.8.1"
+  resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.8.1.tgz#67d87bf1a694f48435cf332c24af10214a3140b5"
+  integrity sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==
+  dependencies:
+    loose-envify "^1.4.0"
+    object-assign "^4.1.1"
+    react-is "^16.13.1"
+
 property-expr@^2.0.4:
   version "2.0.5"
   resolved "https://registry.yarnpkg.com/property-expr/-/property-expr-2.0.5.tgz#278bdb15308ae16af3e3b9640024524f4dc02cb4"
@@ -18495,6 +18516,15 @@ react-draggable@^4.0.0, react-draggable@^4.0.3:
     classnames "^2.2.5"
     prop-types "^15.6.0"
 
+react-dropzone@^14.2.3:
+  version "14.2.3"
+  resolved "https://registry.yarnpkg.com/react-dropzone/-/react-dropzone-14.2.3.tgz#0acab68308fda2d54d1273a1e626264e13d4e84b"
+  integrity sha512-O3om8I+PkFKbxCukfIR3QAGftYXDZfOE2N1mr/7qebQJHs7U+/RSL/9xomJNpRg9kM5h9soQSdf0Gc7OHF5Fug==
+  dependencies:
+    attr-accept "^2.2.2"
+    file-selector "^0.6.0"
+    prop-types "^15.8.1"
+
 react-element-to-jsx-string@^13.1.0:
   version "13.2.0"
   resolved "https://registry.yarnpkg.com/react-element-to-jsx-string/-/react-element-to-jsx-string-13.2.0.tgz#550bb9670e4d8c82977934c14db14469d156a70d"
@@ -18549,7 +18579,7 @@ react-is@17.0.2, react-is@^17.0.2:
   resolved "https://registry.yarnpkg.com/react-is/-/react-is-17.0.2.tgz#e691d4a8e9c789365655539ab372762b0efb54f0"
   integrity sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==
 
-react-is@^16.12.0, react-is@^16.13.0, react-is@^16.3.1, react-is@^16.7.0, react-is@^16.8.1:
+react-is@^16.12.0, react-is@^16.13.0, react-is@^16.13.1, react-is@^16.3.1, react-is@^16.7.0, react-is@^16.8.1:
   version "16.13.1"
   resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4"
   integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==
@@ -21251,6 +21281,11 @@ tslib@^2.0.3:
   resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.2.0.tgz#fb2c475977e35e241311ede2693cee1ec6698f5c"
   integrity sha512-gS9GVHRU+RGn5KQM2rllAlR3dU6m7AcpJKdtH8gFvQiC4Otgk98XnmMU+nZenHt/+VhnBPWwgrJsyrdcw6i23w==
 
+tslib@^2.4.0:
+  version "2.5.0"
+  resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.5.0.tgz#42bfed86f5787aeb41d031866c8f402429e0fddf"
+  integrity sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg==
+
 tsutils@^3.21.0:
   version "3.21.0"
   resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623"