diff --git a/.gitignore b/.gitignore
index 4a6c127814a2df51961b52891c8cd39bde0536d7..38ecf1822a3d59d0aafa60cba352b6f30baefb18 100644
--- a/.gitignore
+++ b/.gitignore
@@ -36,7 +36,6 @@ OSX/Resources/metabase.jar
 OSX/build
 /osx-artifacts
 OSX/dsa_priv.pem
-bin/config.json
 bin/release/aws-eb/metabase-aws-eb.zip
 *.sqlite
 /reset-password-artifacts
diff --git a/bin/aws-eb-docker/.ebextensions/metabase_config/metabase-setup.sh b/bin/aws-eb-docker/.ebextensions/metabase_config/metabase-setup.sh
index a976e37df35cdda28c457cdfe1690cd9c5fff399..a46666bb1617a6c3e5a5ac602f57e5c438b544ed 100755
--- a/bin/aws-eb-docker/.ebextensions/metabase_config/metabase-setup.sh
+++ b/bin/aws-eb-docker/.ebextensions/metabase_config/metabase-setup.sh
@@ -70,7 +70,6 @@ server {
         set $day $3;
         set $hour $4;
     }
-    access_log /var/log/nginx/healthd/application.log.$year-$month-$day-$hour healthd;
 
     access_log    /var/log/nginx/access.log;
 
@@ -122,7 +121,6 @@ server {
         set $day $3;
         set $hour $4;
     }
-    access_log /var/log/nginx/healthd/application.log.$year-$month-$day-$hour healthd;
 
     access_log    /var/log/nginx/access.log;
 
@@ -169,7 +167,7 @@ cp_default_server () {
 log_x_real_ip () {
     cp .ebextensions/metabase_config/nginx/log_x_real_ip.conf /etc/nginx/conf.d/log_x_real_ip.conf
     cd  /etc/nginx/sites-available
-    if ! grep -q access_log *-proxy.conf ; then 
+    if ! grep -q access_log *-proxy.conf ; then
         sed -i 's|location \/ {|location \/ {\n\n        access_log \/var\/log\/nginx\/access.log log_x_real_ip;\n|' *-proxy.conf
     fi
 }
diff --git a/bin/ci b/bin/ci
index a461187df7c94fd125108a2d60e655c452f1cc71..e45bc3f55575080fdab0f9175fe84476dcec87a7 100755
--- a/bin/ci
+++ b/bin/ci
@@ -155,6 +155,9 @@ trap summary EXIT
 fail_fast() {
   echo -e "========================================"
   echo -e "Failing fast! Stopping other nodes..."
+  # Touch a file to differentiate between a local failure and a
+  # failure triggered by another node
+  touch '/tmp/local-fail'
   # ssh to the other CircleCI nodes and send SIGUSR1 to tell them to exit early
   for (( i = 0; i < $CIRCLE_NODE_TOTAL; i++ )); do
     if [ $i != $CIRCLE_NODE_INDEX ]; then
@@ -182,7 +185,12 @@ fi
 
 export CIRCLE_COMMIT_MESSAGE="$(git log --format=oneline -n 1 $CIRCLE_SHA1)"
 
-if [ -f "/tmp/fail" ]; then
+# This local-fail check is to guard against two nodes failing at the
+# same time. Both nodes ssh to each node and drop /tmp/fail. Those
+# failing nodes then get here and see and the other node has told it
+# to exit early. This results in both nodes exiting early, and thus
+# not failing, causing the build to succeed
+if [[ -f "/tmp/fail" && ! -f "/tmp/local-fail" ]]; then
   exit_early
 fi
 
diff --git a/bin/config.json.template b/bin/config.json
similarity index 68%
rename from bin/config.json.template
rename to bin/config.json
index a8aa578b1d5ed62cdc0621c3b9c7a47e7d091129..e6cd9b2b050b23f2bf47d3d44314ceed866528cd 100644
--- a/bin/config.json.template
+++ b/bin/config.json
@@ -1,6 +1,5 @@
 {
     "codesigningIdentity": "Developer ID Application: Metabase, Inc",
-    "slackWebhookURL": "",
     "awsProfile": "metabase",
-    "awsBucket": ""
+    "awsBucket": "downloads.metabase.com"
 }
diff --git a/bin/docker/run_metabase.sh b/bin/docker/run_metabase.sh
index cd2eb1c3d0da1955818c8917db6261759fbd6973..8d719dab26f680d6294e259928f94a65fd26c003 100755
--- a/bin/docker/run_metabase.sh
+++ b/bin/docker/run_metabase.sh
@@ -1,8 +1,8 @@
 #!/bin/bash
 
-# if nobody manually set a host to list on then go with $HOSTNAME
+# if nobody manually set a host to listen on then go with all available interfaces and host names
 if [ -z "$MB_JETTY_HOST" ]; then
-    export MB_JETTY_HOST=$HOSTNAME
+    export MB_JETTY_HOST=0.0.0.0
 fi
 
 
diff --git a/bin/osx-release b/bin/osx-release
index 0165e4b915613d628c3e1b25a8c609e8d930525e..c6fe2c13bbed385ae706601e986835f00bfc5863 100755
--- a/bin/osx-release
+++ b/bin/osx-release
@@ -8,11 +8,9 @@ use File::Copy 'copy';
 use File::Copy::Recursive 'rcopy';   # CPAN
 use File::Path 'remove_tree';
 use File::stat 'stat';
-use JSON 'encode_json', 'from_json'; # CPAN
 use Readonly;                        # CPAN
 use String::Util 'trim';             # CPAN
 use Text::Caml;                      # CPAN
-use WWW::Curl::Simple;               # CPAN
 
 use Metabase::Util;
 
@@ -298,30 +296,6 @@ sub create_dmg {
 
 # ------------------------------------------------------------ UPLOADING ------------------------------------------------------------
 
-sub announce_on_slack {
-    Readonly my $slack_url => config('slackWebhookURL') or return;
-    Readonly my $version   => version();
-    Readonly my $awsURL    => 'https://s3.amazonaws.com/' . config('awsBucket') . '/' . upload_subdir() . '/Metabase.dmg';
-    my $text = "Metabase OS X $version 'Complexity-Embracing Toucan' Is Now Available!\n\n" .
-               "Get it here: $awsURL\n\n";
-
-    open(my $file, get_file_or_die($release_notes)) or die $!;
-    while (<$file>) {
-        m/^\s+<li>.*$/ && s|^\s+<li>(.*)</li>$|$1| && ($text .= '*  ' . $_);
-    }
-
-    my $json = encode_json {
-        channel    => '#general',
-        username   => 'OS X Bot',
-        icon_emoji => ':bird:',
-        text       => trim($text)
-    };
-
-    my $curl = WWW::Curl::Simple->new;
-    unless ((my $response = $curl->post($slack_url, $json))->code == 200) {
-        die 'Error posting to slack: ' . $response->code . ' ' . $response->content . "\n";
-    }
-}
 
 # Upload artifacts to AWS
 # Make sure to run `aws configure --profile metabase` first to set up your ~/.aws/config file correctly
@@ -352,8 +326,6 @@ sub upload {
            's3', 'cp', $upload_dir,
            "s3://$aws_bucket") == 0 or die "Upload failed: $!\n";
 
-    announce_on_slack;
-
     announce "Upload finished."
 }
 
diff --git a/bin/osx-setup b/bin/osx-setup
index fd2011ef84fdcece113295244dd6251136f8e16d..811963ad7185477a823e7872ce859fd3d5d5b651 100755
--- a/bin/osx-setup
+++ b/bin/osx-setup
@@ -21,11 +21,13 @@ use constant UBERJAR_DEST => getcwd() . '/OSX/Resources/metabase.jar';
 use constant RESET_PW_SRC => getcwd() . '/reset-password-artifacts/reset-password/reset-password.jar';
 use constant RESET_PW_DEST => getcwd() . '/OSX/Resources/reset-password.jar';
 
+use constant BUILD_SCRIPT => getcwd() . '/bin/build';
+
 # Copy the JRE if needed
 (rcopy(JRE_HOME, JRE_DEST) or die $!) unless -d JRE_DEST;
 
 # Build jars if needed
-(system('./bin/build') or die $!) unless -f UBERJAR_SRC;
+(system(BUILD_SCRIPT) or die $!) unless -f UBERJAR_SRC;
 (system('lein', 'with-profile', 'reset-password', 'jar') or die $!) unless -f RESET_PW_SRC;
 
 # Copy jars over
diff --git a/bin/version b/bin/version
index 8f8564b7acdd9298a03c847856b3540774b54092..0847d46dbf2eab160b594ac1e30775b36791282c 100755
--- a/bin/version
+++ b/bin/version
@@ -1,6 +1,6 @@
 #!/usr/bin/env bash
 
-VERSION="v0.25.0-snapshot"
+VERSION="v0.26.0-snapshot"
 
 # dynamically pull more interesting stuff from latest git commit
 HASH=$(git show-ref --head --hash=7 head)            # first 7 letters of hash should be enough; that's what GitHub uses
diff --git a/docs/administration-guide/10-single-sign-on.md b/docs/administration-guide/10-single-sign-on.md
index 958384e96255ce63ea01aedc899304190d86d3cf..142030502425aab4e4662ea3b7994789adfdb3ba 100644
--- a/docs/administration-guide/10-single-sign-on.md
+++ b/docs/administration-guide/10-single-sign-on.md
@@ -1,10 +1,12 @@
-## Single Sign-On with Google
+## Authenticating with Google Sign-In or LDAP
 
-Enabling single sign-on lets your team log in with a click instead of using email and password and can optionally let them sign up for Metabase accounts without an admin having to create them first.
+Enabling Google Sign-In or LDAP lets your team log in with a click instead of using email and password, and can optionally let them sign up for Metabase accounts without an admin having to create them first. You can find these options in the Settings section of the Admin Panel, under Authentication.
 
-Currently Metabase works with Google accounts for single sign-on. As time goes on we may add other auth providers. If you have a service you’d like to see work with Metabase please let us know by [filing an issue](http://github.com/metabase/metabase/issues/new).
+![Authentication](./images/authentication.png)
 
-### Enabling Sign in
+As time goes on we may add other auth providers. If you have a service you’d like to see work with Metabase please let us know by [filing an issue](http://github.com/metabase/metabase/issues/new).
+
+### Enabling Google Sign-In
 
 To let your team start signing in with Google you’ll first need to create an application through Google’s [developer console](https://console.developers.google.com/projectselector/apis/library).
 
@@ -16,14 +18,30 @@ Once you have your client_id, copy and paste it into the box on the Single Sign-
 
 Now existing Metabase users signed into a Google account that matches their Metabase account email can sign in with just a click.
 
-###  Enabling Sign up
+###  Enabling account creation with Google Sign-In
+
+If you’ve added your Google client ID to your Metabase settings you can also let users sign up on their own without creating accounts for them.
+
+To enable this, go to the Google Sign-In configuration page, and specify the email domain you want to allow. For example, if you work at WidgetCo you could enter `widgetco.com` in the field to let anyone with a company email sign up on their own.
+
+Note: Metabase accounts created with Google Sign-In do not have passwords and must use Google to sign in to Metabase.
+
+
+### Enabling LDAP authentication
+
+If your organization uses LDAP, and you want to allow your users to log in via their LDAP credentials, you can do so as follows.
+
+Click the `Configure` button in the LDAP section of the Authentication page, and you'll see this form:
+
+![Authentication](./images/ldap-form.png)
 
-If you’ve added your Google client id to your Metabase settings you can also let users sign up on their own without creating accounts for them.
+Click the toggle at the top of the form to enable LDAP, then fill in the form with the information about your LDAP server.
 
-To enable this, check the box on the Single Sign-On Admin Settings page and specify the email domain you want to allow. For example if you work at WidgetCo you could enter widgetco.com in the field to let anyone with a company email sign up on their own.
+Metabase will pull out three main attributes from your LDAP directory - email (defaulting to the `mail` attribute), first name (defaulting to the `givenName` attribute) and last name (defaulting to the `sn` attribute). If your LDAP setup uses other attributes for these, you can edit this under the "Attributes" portion of the form. 
 
-Note: Metabase accounts created with Single Sign-On do not have passwords and must use Google to sign in to Metabase.
+![Attributes](./images/ldap-attributes.png)
 
+If you have user groups in Metabase you are using to control access, it is often tedious to have to manually assign a user to a group after they're logged in via SSO. You can take advantage of the groups your LDAP directory uses by enabling Group Mappings, and specifying which LDAP group corresponds to which user group on your Metabase server. 
 
 ---
 
diff --git a/docs/administration-guide/images/authentication.png b/docs/administration-guide/images/authentication.png
new file mode 100644
index 0000000000000000000000000000000000000000..2e8b39dea2fba8081ddd5b315262659d2c07f7e8
Binary files /dev/null and b/docs/administration-guide/images/authentication.png differ
diff --git a/docs/administration-guide/images/ldap-attributes.png b/docs/administration-guide/images/ldap-attributes.png
new file mode 100644
index 0000000000000000000000000000000000000000..8332385c03e1f4ddfffc04f0b106ab146441af3c
Binary files /dev/null and b/docs/administration-guide/images/ldap-attributes.png differ
diff --git a/docs/administration-guide/images/ldap-form.png b/docs/administration-guide/images/ldap-form.png
new file mode 100644
index 0000000000000000000000000000000000000000..4d1cfdaac0795e8e28339def0b34e4adadaafde1
Binary files /dev/null and b/docs/administration-guide/images/ldap-form.png differ
diff --git a/docs/administration-guide/start.md b/docs/administration-guide/start.md
index 9c8e21e123193cb85fc16fa916a1726159cf2054..bf6b44a2e7194555d3a1db96ad804478d6ae7060 100644
--- a/docs/administration-guide/start.md
+++ b/docs/administration-guide/start.md
@@ -13,7 +13,7 @@ Are you in charge of managing Metabase for your organization? Then you're in the
 * [Creating segments and metrics](07-segments-and-metrics.md)
 * [Configuring settings](08-configuration-settings.md)
 * [Setting up Slack integration](09-setting-up-slack.md)
-* [Enabling single sign-on with Google](10-single-sign-on.md)
+* [Authenticating with Google Sign-In or LDAP](10-single-sign-on.md)
 * [Creating a Getting Started Guide for your team](11-getting-started-guide.md)
 * [Sharing dashboards and questions with public links](12-public-links.md)
 * [Embedding Metabase in other Applications](13-embedding.md)
diff --git a/docs/developers-guide-osx.md b/docs/developers-guide-osx.md
index 1defb97c6d9995e015fc532e00f856426d7a606b..53c6448b8796d47db49161d15710351592dac1a5 100644
--- a/docs/developers-guide-osx.md
+++ b/docs/developers-guide-osx.md
@@ -1,49 +1,24 @@
 # Metabase OS X App
 
-NOTE: These instructions are only for packaging a built Metabase uberjar into `Metabase.app`. They are not useful if your goal is to work on Metabase itself; for development, please see our [developers' guide](developers-guide.md). 
+NOTE: These instructions are only for packaging a built Metabase uberjar into `Metabase.app`. They are not useful if your goal is to work on Metabase itself; for development, please see our [developers' guide](developers-guide.md).
 
 ## Prereqs
 
 1.  Install XCode.
 
-2.  Run `./bin/build` to build the latest version of the uberjar.
+1.  Install XCode command-line tools. In `Xcode` > `Preferences` > `Locations` select your current Xcode version in the `Command Line Tools` drop-down.
 
-3.  Update Perl. I'm not sure these steps are actually needed, so feel free to try skipping it and come back to it if it fails:
-  
-    ```bash
-      # Upgrade Perl
-      brew install perl
-      
-      # Add new version of perl to your $PATH
-      # (replace "5.24.0_1" below with whatever version you installed)
-      echo 'export PATH="/usr/local/Cellar/perl/5.24.0_1/bin:$PATH"' >> ~/.bash_profile
-      source ~/.bash_profile
-      
-      # Double-check that we're using the newer version of CPAN
-      # (If this is your first time running CPAN, use the default config settings when prompted)
-      cpan --version # You should see a line like "running under Perl version 5.24.0."
-    ```
+1.  Run `./bin/build` to build the latest version of the uberjar.
 
-4.  Next, you'll need to run the following commands before building the app:
+1.  Next, you'll need to run the following commands before building the app:
 
     ```bash
       # Fetch and initialize git submodule
       git submodule update --init
-      
-      # Install libcurl (needed by WWW::Curl::Simple (I think))
-      brew install curl && brew link curl --force
-      
-      # The new version of LLVM is snippy so have CPAN pass compiler flags to fix errors
-      # (Make sure this file exists first. If you didn't upgrade Perl in the step above, 
-      # it might be in a different location; perhaps called "Config.pm". 
-      # You may need to run "cpan" (no arguments) to generate an appropriate initial config. 
-      # As above, you can go with the defaults).
-      sed -i '' -e "s/'make_arg' => q\[\]/'make_arg' => q\[CCFLAGS=\"-Wno-return-type\"\]/" ~/.cpan/CPAN/MyConfig.pm
 
       # Install Perl modules used by ./bin/osx-setup and ./bin/osx-release
-      # You may have to run this as sudo if you didn't upgrade perl as described in step above
-      cpan install File::Copy::Recursive JSON Readonly String::Util Text::Caml WWW::Curl::Simple
-      
+      sudo cpan install File::Copy::Recursive Readonly String::Util Text::Caml JSON
+
       # Copy JRE and uberjar
       ./bin/osx-setup
     ```
@@ -66,19 +41,27 @@ brew install awscli
 # You just need the access key ID and secret key; use the defaults for locale and other options.
 aws configure --profile metabase
 
-# Copy & Edit Config file. Alternative ask Cam for a copy of his
-cp bin/config.json.template bin/config.json
-emacs bin/config.json
-
 # Obtain a copy of the private key used for signing the app (ask Cam)
 # and put a copy of it at ./dsa_priv.pem
 cp /path/to/private/key.pem OSX/dsa_priv.pem
 ```
 
-You'll probably also want an Apple Developer ID Application Certificate in your computer's keychain. You'll need to generate a Certificate Signing Request from Keychain Access, and have Sameer go to [the Apple Developer Site](https://developer.apple.com/account/mac/certificate/) and generate one for you, then load the file on your computer. 
+You'll need the `Apple Developer ID Application Certificate` in your computer's keychain.
+You'll need to generate a Certificate Signing Request from Keychain Access, and have Sameer go to [the Apple Developer Site](https://developer.apple.com/account/mac/certificate/)
+and generate one for you, then load the file on your computer.
+
+Finally, you may need to open the project a single time in Xcode to make sure the appropriate "build schemes" are generated (these are not checked into CI).
+Run `open OSX/Metabase.xcodeproj` to open the project, which will automatically generate the appropriate schemes. This only needs to be done once.
 
 After that, you are good to go:
 ```bash
 # Bundle entire app, and upload to s3
 ./bin/osx-release
 ```
+
+## Debugging ./bin/osx-release
+
+*  You can run individual steps of the release script by passing in the appropriate step subroutines. e.g. `./bin/osx-release create_dmg upload`.
+   The entire sequence of different steps can be found at the bottom of `./bin/osx-release`.
+*  Generating the DMG seems to be somewhat finicky, so if it fails with a message like "Device busy" trying the step again a few times usually resolves the issue.
+   You can continue the build process from the DMG creation step by running `./bin/osx-release create_dmg upload`.
diff --git a/docs/developers-guide.md b/docs/developers-guide.md
index 53431a3f7cf5c7017c4dd06c627d9b9233314035..91130dbf3ab4d75844294f83a1b1cc86b8e33ead 100644
--- a/docs/developers-guide.md
+++ b/docs/developers-guide.md
@@ -156,10 +156,12 @@ By default, the tests only run against the `h2` driver. You can specify which dr
 
     ENGINES=h2,postgres,mysql,mongo lein test
 
-At the time of this writing, the valid engines are `h2`, `postgres`, `mysql`, `mongo`, `sqlserver`, `sqlite`, `druid`, `bigquery`, and `redshift`. Some of these engines require additional parameters
+At the time of this writing, the valid engines are `h2`, `postgres`, `mysql`, `mongo`, `sqlserver`, `sqlite`, `druid`, `bigquery`, `oracle`, `vertica`, and `redshift`. Some of these engines require additional parameters
 when testing since they are impossible to run locally (such as Redshift and Bigquery). The tests will fail on launch and let you know what parameters to supply if needed.
 
-Run the linters:
+Due to some issues with the way we've structured our test setup code, you currently always need to include `h2` in the `ENGINES` list. Thus to test something like `bigquery` you should specify `ENGINES=h2,bigquery`. Fortunately the H2 tests are fast so this should not make a noticeable difference.
+
+##### Run the linters:
 
     lein eastwood && lein bikeshed && lein docstring-checker && ./bin/reflection-linter
 
diff --git a/docs/operations-guide/running-metabase-on-heroku.md b/docs/operations-guide/running-metabase-on-heroku.md
index d1fb6b320fe001ef497a04baa35e9146673b078f..7e7e20aa088d2f4e930dcc2492ca5ea27ec1d386 100644
--- a/docs/operations-guide/running-metabase-on-heroku.md
+++ b/docs/operations-guide/running-metabase-on-heroku.md
@@ -58,7 +58,7 @@ git remote add heroku https://git.heroku.com/your-metabase-app.git
 
 * If you are upgrading from a version that is lower than 0.25, add the Metabase buildpack to your Heroku app:
 ```
-heroku buildpacks:add https://github.com/metabase/metabase-heroku
+heroku buildpacks:add https://github.com/metabase/metabase-buildpack
 ```
 
 * Force push the new version to Heroku:
diff --git a/docs/operations-guide/start.md b/docs/operations-guide/start.md
index 1ad0822d8a3f1257c423ddd38d522538ffc23bbd..859f3c630dda86720da183d99663ddbd1845ea07 100644
--- a/docs/operations-guide/start.md
+++ b/docs/operations-guide/start.md
@@ -377,3 +377,14 @@ By default Metabase will include emoji characters in logs. You can disable this
 
     export MB_EMOJI_IN_LOGS="false"
     java -jar metabase.jar
+
+# Configuring Logging Level
+
+By default, Metabase logs quite a bit of information. Luckily, Metabase uses [Log4j](http://logging.apache.org/log4j) under the hood, meaning the logging is completely configurable.
+
+Metabase's default logging configuration can be found [here](https://github.com/metabase/metabase/blob/master/resources/log4j.properties). You can override this properties file and tell
+Metabase to use your own logging configuration file by passing a `-Dlog4j.configuration` argument when running Metabase:
+
+    java -Dlog4j.configuration=file:/path/to/custom/log4j.properties -jar metabase.jar
+
+The easiest way to get started customizing logging would be to use a copy of default `log4j.properties` file linked to above and adjust that to meet your needs. Keep in mind that you'll need to restart Metabase for changes to the file to take effect.
diff --git a/docs/troubleshooting-guide/email.md b/docs/troubleshooting-guide/email.md
index 564abd2f6b413f25f07747ef7a42c85e8ac57ca3..4c798c64162f9b9368247f0799850d863d721245 100644
--- a/docs/troubleshooting-guide/email.md
+++ b/docs/troubleshooting-guide/email.md
@@ -1,6 +1,24 @@
 
+
 ## Troubleshooting Process
-1. 
+
+1. Verify the Email Account Credentials
+    a. In the admin panel email settings, click "Send test email", and verify that the email is delivered to the test account
+    b.If the email is not sent or returns an error, try to use the same account credentials in another program and see if they work. If they do, it might be a bug, please report it at github.com/metabase/metabase/issues/new
+
+2. Is the email being sent? 
+    a. Check the server logs for any error messages
+    b. If there are any error messages, they are usually helpful :wink:
+    c. If you have access to your email delivery service's outbound queue or a dashboard, check that for errors. 
+    d. Some email delivery services have very specific rules regarding valid "from" addresses, make sure you've whitelisted the "from" address you're using in Metabase
+    e. Some email delivery services have test modes or otherwise restricted delivery. Double check that your delivery service allows you to send email to the domain you're trying to get email sent to.
+
+3. If the email is being sent, but you're not getting it, is anyone else getting theirs?
+    a. If so, check your spam folder, any forwarding rules, etc
+    b. If someone at another email provider is getting emails, this is probably due to deliverability rules, and you should look into signing your emails with DKIM, etc.
+
+4. For user accounts specifically, did you previously create an account under this email and then delete it? This occasionally results in that email address being "claimed".
+
 
 ## Specific Problems:
 
diff --git a/docs/users-guide/03-basic-exploration.md b/docs/users-guide/03-basic-exploration.md
index 98dbe34532d33ab8bfdf587433cac3651a518ada..d3f48187f6c868a4deb0fb0f31336346595353fb 100644
--- a/docs/users-guide/03-basic-exploration.md
+++ b/docs/users-guide/03-basic-exploration.md
@@ -22,7 +22,7 @@ Lastly, clicking on the ID of an item in table gives you the option to go to a d
 **Note that charts created with SQL don't currently have these action options.**
 
 #### Exploring saved questions
-In Metabase parlance, every chart on number on a dashboard is called a "question." Clicking on the title of a question on a dashboard will take you to a detail view of that question. You'll also end up at this detail view if you use one of the actions mentioned above. You can also browse all the questions your teammates have saved by clicking the `Questions` link in the main navigation.
+In Metabase parlance, every chart or number on a dashboard is called a "question." Clicking on the title of a question on a dashboard will take you to a detail view of that question. You'll also end up at this detail view if you use one of the actions mentioned above. You can also browse all the questions your teammates have saved by clicking the `Questions` link in the main navigation.
 
 When you're viewing the detail view of a question, you can use all the same actions mentioned above. You can also click on the headings of tables to see more options, like viewing the sum of the values in a column, or finding the minimum or maximum value in it.
 
diff --git a/docs/users-guide/04-asking-questions.md b/docs/users-guide/04-asking-questions.md
index 60f5725b26e495a119c4fa1965c4f5cb61c70c94..32e1efe80f0bcb2fe5a534f74d3fd3b24161196d 100644
--- a/docs/users-guide/04-asking-questions.md
+++ b/docs/users-guide/04-asking-questions.md
@@ -17,7 +17,7 @@ The first dropdown menu in the question builder is where you’ll choose the dat
 
 If you've [saved some questions](06-sharing-answers.html), in the Data menu you'll see the option to use one of your saved questions as source data. What this means in practice is that you can do things like use complex SQL queries to create new tables that can be used in a question just like any other table in your database.
 
-You can use any saved question as source data, provided you have [permission](../administration-guide/05-setting-permissions.html) to view that question. You can even use questions that were saved as a chart rather than a table. The only caveat is that you can't use a saved question which itself uses a saved question as source data. (That's more inception than Metabase can handle!)
+You can use any saved question as source data, provided you have [permission](../administration-guide/05-setting-permissions.html) to view that question. You can even use questions that were saved as a chart rather than a table.
 
 ### Filters
 ---
@@ -124,7 +124,7 @@ Say we had a table of baseball games, each row representing a single game, and w
 
 The words in the quotes are the names of the fields in our table. If you start typing in this box, Metabase will show you fields in the current table that match what you’ve typed, and you can select from this list to autocomplete the field name.
 
-Right now, you can only use the following math operators in your formulas: +, –, * (multiplication), and / (division). You can also use parentheses to clarify the order of operations.
+Right now, you can only use the following math operators in your formulas: `+`, `–`, `*` (multiplication), and `/` (division). You can also use parentheses to clarify the order of operations.
 
 Once you’ve written your formula and given your new field a name, select `Raw Data` for your view, and click the `Get Answer` button to see your new field appended to your current table. It’ll be on the far right of the table. **Note that this new field is NOT permanently added to this table.** It will only be kept if you save a question that uses it.
 
diff --git a/frontend/src/metabase-lib/lib/Dimension.js b/frontend/src/metabase-lib/lib/Dimension.js
index 4ba742591e2f7cc7f9a3fff9a0f8d0155b485cfa..39ec3621b811dcb5d0f8805949f99352bbd51dc2 100644
--- a/frontend/src/metabase-lib/lib/Dimension.js
+++ b/frontend/src/metabase-lib/lib/Dimension.js
@@ -42,6 +42,10 @@ export default class Dimension {
     _args: any;
     _metadata: ?Metadata;
 
+    // Display names provided by the backend
+    _subDisplayName: ?String;
+    _subTriggerDisplayName: ?String;
+
     /**
      * Dimension constructor
      */
@@ -108,17 +112,15 @@ export default class Dimension {
      */
     // TODO Atte Keinänen 5/21/17: Rename either this or the static method with the same name
     // Also making it clear in the method name that we're working with sub-dimensions would be good
-    dimensions(
-        DimensionTypes: typeof Dimension[] = DIMENSION_TYPES
-    ): Dimension[] {
+    dimensions(DimensionTypes?: typeof Dimension[]): Dimension[] {
         const dimensionOptions = this.field().dimension_options;
-        if (dimensionOptions) {
+        if (!DimensionTypes && dimensionOptions) {
             return dimensionOptions.map(option =>
                 this._dimensionForOption(option));
         } else {
             return [].concat(
-                ...DimensionTypes.map(DimensionType =>
-                    DimensionType.dimensions(this))
+                ...(DimensionTypes || [])
+                    .map(DimensionType => DimensionType.dimensions(this))
             );
         }
     }
@@ -155,8 +157,8 @@ export default class Dimension {
         }
         let dimension = Dimension.parseMBQL(mbql, this._metadata);
         if (option.name) {
-            dimension.subDisplayName = () => option.name;
-            dimension.subTriggerDisplayName = () => option.name;
+            dimension._subDisplayName = option.name;
+            dimension._subTriggerDisplayName = option.name;
         }
         return dimension;
     }
@@ -255,7 +257,7 @@ export default class Dimension {
      * @abstract
      */
     subDisplayName(): string {
-        return "";
+        return this._subDisplayName || "";
     }
 
     /**
@@ -263,7 +265,7 @@ export default class Dimension {
      * @abstract
      */
     subTriggerDisplayName(): string {
-        return "";
+        return this._subTriggerDisplayName || "";
     }
 
     /**
@@ -304,16 +306,26 @@ export class FieldDimension extends Dimension {
     }
 
     subDisplayName(): string {
-        if (this._parent) {
+        if (this._subDisplayName) {
+            return this._subTriggerDisplayName;
+        } else if (this._parent) {
+            // TODO Atte Keinänen 8/1/17: Is this used at all?
             // foreign key, show the field name
             return this.field().display_name;
-        } else if (this.field().isNumber()) {
-            return "Continuous (no binning)";
         } else {
+            // TODO Atte Keinänen 8/1/17: Is this used at all?
             return "Default";
         }
     }
 
+    subTriggerDisplayName(): string {
+        if (this.defaultDimension() instanceof BinnedDimension) {
+            return "Unbinned";
+        } else {
+            return "";
+        }
+    }
+
     icon() {
         return this.field().icon();
     }
@@ -465,11 +477,7 @@ export class BinnedDimension extends FieldDimension {
     }
 
     static dimensions(parent: Dimension): Dimension[] {
-        if (isFieldDimension(parent) && parent.field().isNumber()) {
-            return [5, 10, 25, 100].map(
-                bins => new BinnedDimension(parent, ["default", bins])
-            );
-        }
+        // Subdimensions are are provided by the backend through the dimension_options field property
         return [];
     }
 
@@ -481,18 +489,20 @@ export class BinnedDimension extends FieldDimension {
         return this._parent.baseDimension();
     }
 
-    subDisplayName(): string {
-        if (this._args[0] === "default") {
-            return `Quantized into ${this._args[1]} ${inflect("bins", this._args[1])}`;
-        }
-        return JSON.stringify(this._args);
-    }
-
     subTriggerDisplayName(): string {
-        if (this._args[0] === "default") {
+        if (this._args[0] === "num-bins") {
             return `${this._args[1]} ${inflect("bins", this._args[1])}`;
+        } else if (this._args[0] === "bin-width") {
+            const binWidth = this._args[1];
+            const units = this.field().isCoordinate() ? "°" : "";
+            return `${binWidth}${units}`;
+        } else {
+            return "Auto binned";
         }
-        return "";
+    }
+
+    render() {
+        return [...super.render(), ": ", this.subTriggerDisplayName()];
     }
 }
 
@@ -541,6 +551,10 @@ export class AggregationDimension extends Dimension {
         return this._displayName;
     }
 
+    aggregationIndex(): number {
+        return this._args[0];
+    }
+
     mbql() {
         return ["aggregation", this._args[0]];
     }
diff --git a/frontend/src/metabase-lib/lib/Question.js b/frontend/src/metabase-lib/lib/Question.js
index e3be6ece333262afd68d63f48436afed2b66ba8e..e9169b0e3e4335c3ed9362de1ac9037188f5f0ae 100644
--- a/frontend/src/metabase-lib/lib/Question.js
+++ b/frontend/src/metabase-lib/lib/Question.js
@@ -224,11 +224,11 @@ export default class Question {
     breakout(b) {
         return this.setCard(breakout(this.card(), b));
     }
-    pivot(breakout, dimensions = []) {
+    pivot(breakouts = [], dimensions = []) {
         const tableMetadata = this.tableMetadata();
         return this.setCard(
             // $FlowFixMe: tableMetadata could be null
-            pivot(this.card(), breakout, tableMetadata, dimensions)
+            pivot(this.card(), tableMetadata, breakouts, dimensions)
         );
     }
     filter(operator, column, value) {
@@ -246,6 +246,25 @@ export default class Question {
     toUnderlyingData(): Question {
         return this.setDisplay("table");
     }
+
+    composeThisQuery(): ?Question {
+        const SAVED_QUESTIONS_FAUX_DATABASE = -1337;
+
+        if (this.id()) {
+            const card = {
+                display: "table",
+                dataset_query: {
+                    type: "query",
+                    database: SAVED_QUESTIONS_FAUX_DATABASE,
+                    query: {
+                        source_table: "card__" + this.id()
+                    }
+                }
+            };
+            return this.setCard(card);
+        }
+    }
+
     drillPK(field: Field, value: Value): ?Question {
         const query = this.query();
         if (query instanceof StructuredQuery) {
diff --git a/frontend/src/metabase-lib/lib/metadata/Base.js b/frontend/src/metabase-lib/lib/metadata/Base.js
index e6c72cd66f79aab2a8b8211b90762d610d18d07e..a4d7e9fda8146f3d694b1a512c623c99aab10753 100644
--- a/frontend/src/metabase-lib/lib/metadata/Base.js
+++ b/frontend/src/metabase-lib/lib/metadata/Base.js
@@ -1,7 +1,17 @@
 export default class Base {
+    _plainObject = null;
     constructor(object = {}) {
+        this._plainObject = object;
         for (const property in object) {
             this[property] = object[property];
         }
     }
+
+    /**
+     * Get the plain metadata object without hydrated fields.
+     * Useful for situations where you want serialize the metadata object.
+     */
+    getPlainObject() {
+        return this._plainObject;
+    }
 }
diff --git a/frontend/src/metabase-lib/lib/metadata/Field.js b/frontend/src/metabase-lib/lib/metadata/Field.js
index ecbeb444e30eb269ded3e625fa6ef48ba052c402..26e81cea30dbf1c57b893183050a95ffddbfcf48 100644
--- a/frontend/src/metabase-lib/lib/metadata/Field.js
+++ b/frontend/src/metabase-lib/lib/metadata/Field.js
@@ -18,10 +18,13 @@ import {
     isMetric,
     isPK,
     isFK,
+    isCoordinate,
     getIconForField,
     getFieldType
 } from "metabase/lib/schema_metadata";
 
+import type { FieldValues } from "metabase/meta/types/Field";
+
 /**
  * Wrapper class for field metadata objects. Belongs to a Table.
  */
@@ -83,7 +86,11 @@ export default class Field extends Base {
         return isFK(this);
     }
 
-    fieldValues(): Array<string> {
+    isCoordinate() {
+        return isCoordinate(this);
+    }
+
+    fieldValues(): FieldValues {
         return getFieldValues(this._object);
     }
 
diff --git a/frontend/src/metabase-lib/lib/queries/StructuredQuery.js b/frontend/src/metabase-lib/lib/queries/StructuredQuery.js
index 735f223554dc497bc691ace408f1b0979cab97bd..12b78b08e8466770c7cd3554dfc77699ba8a5c0c 100644
--- a/frontend/src/metabase-lib/lib/queries/StructuredQuery.js
+++ b/frontend/src/metabase-lib/lib/queries/StructuredQuery.js
@@ -33,6 +33,7 @@ import type {
 } from "metabase/meta/types/Metadata";
 
 import Dimension, {
+    FKDimension,
     ExpressionDimension,
     AggregationDimension
 } from "metabase-lib/lib/Dimension";
@@ -633,7 +634,7 @@ export default class StructuredQuery extends AtomicQuery {
             );
             for (const dimension of fkDimensions) {
                 const fkDimensions = dimension
-                    .dimensions()
+                    .dimensions([FKDimension])
                     .filter(dimensionFilter);
 
                 if (fkDimensions.length > 0) {
diff --git a/frontend/src/metabase/admin/databases/components/DatabaseEditForms.jsx b/frontend/src/metabase/admin/databases/components/DatabaseEditForms.jsx
index ffdcb6bcdf7364f73406a100946a65240559d15d..3f425cb0b93ff280b105fe90b6aeace5eaa27769 100644
--- a/frontend/src/metabase/admin/databases/components/DatabaseEditForms.jsx
+++ b/frontend/src/metabase/admin/databases/components/DatabaseEditForms.jsx
@@ -4,9 +4,7 @@ import cx from "classnames";
 import LoadingAndErrorWrapper from "metabase/components/LoadingAndErrorWrapper.jsx";
 import DatabaseDetailsForm from "metabase/components/DatabaseDetailsForm.jsx";
 
-
 export default class DatabaseEditForms extends Component {
-
     static propTypes = {
         database: PropTypes.object,
         details: PropTypes.object,
@@ -18,7 +16,7 @@ export default class DatabaseEditForms extends Component {
     };
 
     render() {
-        let { database, details, hiddenFields, engines, formState: { formError, formSuccess } } = this.props;
+        let { database, details, hiddenFields, engines, formState: { formError, formSuccess, isSubmitting } } = this.props;
 
         let errors = {};
         return (
@@ -44,7 +42,8 @@ export default class DatabaseEditForms extends Component {
                               formSuccess={formSuccess}
                               hiddenFields={hiddenFields}
                               submitFn={(database) => this.props.save({ ...database, id: this.props.database.id }, database.details)}
-                              submitButtonText={'Save'}>
+                              submitButtonText={'Save'}
+                              submitting={isSubmitting}>
                           </DatabaseDetailsForm>
                           : null }
                     </div>
diff --git a/frontend/src/metabase/admin/databases/containers/DatabaseListApp.jsx b/frontend/src/metabase/admin/databases/containers/DatabaseListApp.jsx
index c9ab23f2272ce899ad48c705bb6b801ea71d3f7b..2fd3a3d75fff6b37cd28d1f4cafebd00c21d4f0c 100644
--- a/frontend/src/metabase/admin/databases/containers/DatabaseListApp.jsx
+++ b/frontend/src/metabase/admin/databases/containers/DatabaseListApp.jsx
@@ -13,10 +13,12 @@ import DeleteDatabaseModal from "../components/DeleteDatabaseModal.jsx";
 
 import {
     getDatabasesSorted,
-    hasSampleDataset
+    hasSampleDataset,
+    getDeletes,
+    getDeletionError
 } from "../selectors";
 import * as databaseActions from "../database";
-
+import FormMessage from "metabase/components/form/FormMessage";
 
 const mapStateToProps = (state, props) => {
     return {
@@ -24,7 +26,8 @@ const mapStateToProps = (state, props) => {
         databases:            getDatabasesSorted(state),
         hasSampleDataset:     hasSampleDataset(state),
         engines:              MetabaseSettings.get('engines'),
-        deletes:              state.admin.databases.deletes
+        deletes:              getDeletes(state),
+        deletionError:        getDeletionError(state)
     }
 }
 
@@ -37,15 +40,23 @@ export default class DatabaseList extends Component {
     static propTypes = {
         databases: PropTypes.array,
         hasSampleDataset: PropTypes.bool,
-        engines: PropTypes.object
+        engines: PropTypes.object,
+        deletes: PropTypes.array,
+        deletionError: PropTypes.object
     };
 
     componentWillMount() {
         this.props.fetchDatabases();
     }
 
+    componentWillReceiveProps(newProps) {
+        if (!this.props.created && newProps.created) {
+            this.refs.createdDatabaseModal.open()
+        }
+    }
+
     render() {
-        let { databases, hasSampleDataset, created, engines } = this.props;
+        let { databases, hasSampleDataset, created, engines, deletionError } = this.props;
 
         return (
             <div className="wrapper">
@@ -53,6 +64,11 @@ export default class DatabaseList extends Component {
                     <Link to="/admin/databases/create" className="Button Button--primary float-right">Add database</Link>
                     <h2 className="PageTitle">Databases</h2>
                 </section>
+                { deletionError &&
+                    <section>
+                        <FormMessage formError={deletionError} />
+                    </section>
+                }
                 <section>
                     <table className="ContentTable">
                         <thead>
@@ -64,7 +80,7 @@ export default class DatabaseList extends Component {
                         </thead>
                         <tbody>
                             { databases ?
-                                databases.map(database => {
+                                [ databases.map(database => {
                                     const isDeleting = this.props.deletes.indexOf(database.id) !== -1
                                     return (
                                         <tr
@@ -98,7 +114,8 @@ export default class DatabaseList extends Component {
                                                 )
                                             }
                                         </tr>
-                                    )})
+                                    )}),
+                                ]
                             :
                                 <tr>
                                     <td colSpan={4}>
diff --git a/frontend/src/metabase/admin/databases/database.js b/frontend/src/metabase/admin/databases/database.js
index 84598f060221be8caf4df105798be651ea697ed2..f705c40d4b58aca2e96f24b5365e4c5e02486eb1 100644
--- a/frontend/src/metabase/admin/databases/database.js
+++ b/frontend/src/metabase/admin/databases/database.js
@@ -12,10 +12,17 @@ import { MetabaseApi } from "metabase/services";
 const RESET = "metabase/admin/databases/RESET";
 const SELECT_ENGINE = "metabase/admin/databases/SELECT_ENGINE";
 export const FETCH_DATABASES = "metabase/admin/databases/FETCH_DATABASES";
-const INITIALIZE_DATABASE = "metabase/admin/databases/INITIALIZE_DATABASE";
+export const INITIALIZE_DATABASE = "metabase/admin/databases/INITIALIZE_DATABASE";
 const ADD_SAMPLE_DATASET = "metabase/admin/databases/ADD_SAMPLE_DATASET";
-const SAVE_DATABASE = "metabase/admin/databases/SAVE_DATABASE";
+export const UPDATE_DATABASE = 'metabase/admin/databases/UPDATE_DATABASE'
+export const UPDATE_DATABASE_STARTED = 'metabase/admin/databases/UPDATE_DATABASE_STARTED'
+export const UPDATE_DATABASE_FAILED = 'metabase/admin/databases/UPDATE_DATABASE_FAILED'
+export const CREATE_DATABASE = 'metabase/admin/databases/CREATE_DATABASE'
+export const CREATE_DATABASE_STARTED = 'metabase/admin/databases/CREATE_DATABASE_STARTED'
+export const CREATE_DATABASE_FAILED = 'metabase/admin/databases/CREATE_DATABASE_FAILED'
 export const DELETE_DATABASE = "metabase/admin/databases/DELETE_DATABASE";
+export const DELETE_DATABASE_STARTED = 'metabase/admin/databases/DELETE_DATABASE_STARTED'
+export const DELETE_DATABASE_FAILED = "metabase/admin/databases/DELETE_DATABASE_FAILED";
 const SYNC_DATABASE = "metabase/admin/databases/SYNC_DATABASE";
 
 export const reset = createAction(RESET);
@@ -73,61 +80,69 @@ export const addSampleDataset = createThunkAction(ADD_SAMPLE_DATASET, function()
     };
 });
 
-// saveDatabase
-export const saveDatabase = createThunkAction(SAVE_DATABASE, function(database, details) {
-    return async function(dispatch, getState) {
-        let savedDatabase, formState;
-
+export const createDatabase = function (database) {
+    return async function (dispatch, getState) {
         try {
-            //$scope.$broadcast("form:reset");
-            database.details = details;
-            if (database.id) {
-                //$scope.$broadcast("form:api-success", "Successfully saved!");
-                savedDatabase = await MetabaseApi.db_update(database);
-                MetabaseAnalytics.trackEvent("Databases", "Update", database.engine);
-            } else {
-                //$scope.$broadcast("form:api-success", "Successfully created!");
-                //$scope.$emit("database:created", new_database);
-                savedDatabase = await MetabaseApi.db_create(database);
-                MetabaseAnalytics.trackEvent("Databases", "Create", database.engine);
-                dispatch(push('/admin/databases?created='+savedDatabase.id));
-            }
+            dispatch.action(CREATE_DATABASE_STARTED, { database })
+            const createdDatabase = await MetabaseApi.db_create(database);
+            MetabaseAnalytics.trackEvent("Databases", "Create", database.engine);
+
+            // update the db metadata already here because otherwise there will be a gap between "Adding..." status
+            // and seeing the db that was just added
+            await dispatch(fetchDatabases())
+            dispatch(push('/admin/databases?created=' + createdDatabase.id));
+            dispatch.action(CREATE_DATABASE, { database: createdDatabase })
+        } catch (error) {
+            console.error("error creating a database", error);
+            MetabaseAnalytics.trackEvent("Databases", "Create Failed", database.engine);
+            dispatch.action(CREATE_DATABASE_FAILED, { database, error })
+        }
+    };
+}
 
-            // this object format is what FormMessage expects:
-            formState = { formSuccess: { data: { message: "Successfully saved!" }}};
+export const updateDatabase = function(database) {
+    return async function(dispatch, getState) {
+        try {
+            dispatch.action(UPDATE_DATABASE_STARTED, { database })
+            const savedDatabase = await MetabaseApi.db_update(database);
+            MetabaseAnalytics.trackEvent("Databases", "Update", database.engine);
 
+            dispatch.action(UPDATE_DATABASE, { database: savedDatabase })
         } catch (error) {
-            //$scope.$broadcast("form:api-error", error);
-            console.error("error saving database", error);
-            MetabaseAnalytics.trackEvent("Databases", database.id ? "Update Failed" : "Create Failed", database.engine);
-            formState = { formError: error };
+            MetabaseAnalytics.trackEvent("Databases", "Update Failed", database.engine);
+            dispatch.action(UPDATE_DATABASE_FAILED, { error });
         }
+    };
+};
 
-        return {
-            database: savedDatabase,
-            formState
+// NOTE Atte Keinänen 7/26/17: Original monolithic saveDatabase was broken out to smaller actions
+// but `saveDatabase` action creator is still left here for keeping the interface for React components unchanged
+export const saveDatabase = function(database, details) {
+    return async function(dispatch, getState) {
+        database.details = details;
+        const isUnsavedDatabase = !database.id
+        if (isUnsavedDatabase) {
+            dispatch(createDatabase(database))
+        } else {
+            dispatch(updateDatabase(database))
         }
     };
-});
-
-const START_DELETE = 'metabase/admin/databases/START_DELETE'
-const startDelete = createAction(START_DELETE)
+};
 
-
-// deleteDatabase
-export const deleteDatabase = createThunkAction(DELETE_DATABASE, function(databaseId, redirect=true) {
+export const deleteDatabase = function(databaseId, isDetailView = true) {
     return async function(dispatch, getState) {
         try {
-            dispatch(startDelete(databaseId))
+            dispatch.action(DELETE_DATABASE_STARTED, { databaseId })
             dispatch(push('/admin/databases/'));
             await MetabaseApi.db_delete({"dbId": databaseId});
-            MetabaseAnalytics.trackEvent("Databases", "Delete", redirect ? "Using Detail" : "Using List");
-            return databaseId;
+            MetabaseAnalytics.trackEvent("Databases", "Delete", isDetailView ? "Using Detail" : "Using List");
+            dispatch.action(DELETE_DATABASE, { databaseId })
         } catch(error) {
             console.log('error deleting database', error);
+            dispatch.action(DELETE_DATABASE_FAILED, { databaseId, error })
         }
     };
-});
+}
 
 // syncDatabase
 export const syncDatabase = createThunkAction(SYNC_DATABASE, function(databaseId) {
@@ -148,35 +163,44 @@ export const syncDatabase = createThunkAction(SYNC_DATABASE, function(databaseId
 const databases = handleActions({
     [FETCH_DATABASES]: { next: (state, { payload }) => payload },
     [ADD_SAMPLE_DATASET]: { next: (state, { payload }) => payload ? [...state, payload] : state },
-    [DELETE_DATABASE]: { next: (state, { payload }) => payload ? _.reject(state, (d) => d.id === payload) : state }
+    [DELETE_DATABASE]: (state, { payload: { databaseId} }) =>
+        databaseId ? _.reject(state, (d) => d.id === databaseId) : state
 }, null);
 
 const editingDatabase = handleActions({
     [RESET]: { next: () => null },
     [INITIALIZE_DATABASE]: { next: (state, { payload }) => payload },
-    [SAVE_DATABASE]: { next: (state, { payload }) => payload.database || state },
+    [UPDATE_DATABASE]: { next: (state, { payload }) => payload.database || state },
     [DELETE_DATABASE]: { next: (state, { payload }) => null },
     [SELECT_ENGINE]: { next: (state, { payload }) => ({...state, engine: payload }) }
 }, null);
 
 const deletes = handleActions({
-    [START_DELETE]: {
-        next: (state, { payload }) => state.concat([payload])
-    },
-    [DELETE_DATABASE]: {
-        next: (state, { payload }) => state.splice(state.indexOf(payload), 1)
-    }
+    [DELETE_DATABASE_STARTED]: (state, { payload: { databaseId } }) => state.concat([databaseId]),
+    [DELETE_DATABASE_FAILED]: (state, { payload: { databaseId, error } }) => state.filter((dbId) => dbId !== databaseId),
+    [DELETE_DATABASE]: (state, { payload: { databaseId } }) => state.filter((dbId) => dbId !== databaseId)
 }, []);
 
-const DEFAULT_FORM_STATE = { formSuccess: null, formError: null };
+const deletionError = handleActions({
+    [DELETE_DATABASE_FAILED]: (state, { payload: { error } }) => error,
+}, null)
+
+const DEFAULT_FORM_STATE = { formSuccess: null, formError: null, isSubmitting: false };
 const formState = handleActions({
     [RESET]: { next: () => DEFAULT_FORM_STATE },
-    [SAVE_DATABASE]: { next: (state, { payload }) => payload.formState }
+    [CREATE_DATABASE_STARTED]: () => ({ isSubmitting: true }),
+    // not necessarily needed as the page is immediately redirected after db creation
+    [CREATE_DATABASE]: () => ({ formSuccess: { data: { message: "Successfully created!" } } }),
+    [CREATE_DATABASE_FAILED]: (state, { payload: { error } }) => ({ formError: error }),
+    [UPDATE_DATABASE_STARTED]: () => ({ isSubmitting: true }),
+    [UPDATE_DATABASE]: () => ({ formSuccess: { data: { message: "Successfully saved!" } } }),
+    [UPDATE_DATABASE_FAILED]: (state, { payload: { error } }) => ({ formError: error }),
 }, DEFAULT_FORM_STATE);
 
 export default combineReducers({
     databases,
     editingDatabase,
+    deletionError,
     formState,
     deletes
 });
diff --git a/frontend/src/metabase/admin/databases/selectors.js b/frontend/src/metabase/admin/databases/selectors.js
index e9371fb171f2463bf258134b5f7c2272a5f26e5d..a632b5692041c056abe528986352d4ac17310f4e 100644
--- a/frontend/src/metabase/admin/databases/selectors.js
+++ b/frontend/src/metabase/admin/databases/selectors.js
@@ -21,3 +21,6 @@ export const hasSampleDataset = createSelector(
 // Database Edit
 export const getEditingDatabase   = state => state.admin.databases.editingDatabase;
 export const getFormState         = state => state.admin.databases.formState;
+
+export const getDeletes           = state => state.admin.databases.deletes;
+export const getDeletionError     = state => state.admin.databases.deletionError;
diff --git a/frontend/src/metabase/admin/datamodel/components/database/ColumnItem.jsx b/frontend/src/metabase/admin/datamodel/components/database/ColumnItem.jsx
index a1283fd11080ced28ad6dc43f862172320ec0e77..9d0fcedcc776b64a4a5dcd3af43696953572b938 100644
--- a/frontend/src/metabase/admin/datamodel/components/database/ColumnItem.jsx
+++ b/frontend/src/metabase/admin/datamodel/components/database/ColumnItem.jsx
@@ -1,8 +1,10 @@
 import React, { Component } from "react";
 import PropTypes from "prop-types";
+import { Link, withRouter } from "react-router";
 
 import Input from "metabase/components/Input.jsx";
 import Select from "metabase/components/Select.jsx";
+import Icon from "metabase/components/Icon";
 
 import * as MetabaseCore from "metabase/lib/core";
 import { titleize, humanize } from "metabase/lib/formatting";
@@ -10,14 +12,17 @@ import { isNumericBaseType } from "metabase/lib/schema_metadata";
 import { TYPE, isa, isFK } from "metabase/lib/types";
 
 import _  from "underscore";
+import cx from "classnames";
 
+import type { Field } from "metabase/meta/types/Field"
+import MetabaseAnalytics from "metabase/lib/analytics";
+
+@withRouter
 export default class Column extends Component {
     constructor(props, context) {
         super(props, context);
         this.onDescriptionChange = this.onDescriptionChange.bind(this);
         this.onNameChange = this.onNameChange.bind(this);
-        this.onSpecialTypeChange = this.onSpecialTypeChange.bind(this);
-        this.onTargetChange = this.onTargetChange.bind(this);
         this.onVisibilityChange = this.onVisibilityChange.bind(this);
     }
 
@@ -25,8 +30,6 @@ export default class Column extends Component {
         field: PropTypes.object,
         idfields: PropTypes.array.isRequired,
         updateField: PropTypes.func.isRequired,
-        updateFieldSpecialType: PropTypes.func.isRequired,
-        updateFieldTarget: PropTypes.func.isRequired
     };
 
     updateProperty(name, value) {
@@ -51,70 +54,154 @@ export default class Column extends Component {
         this.updateProperty("visibility_type", type.id);
     }
 
-    onSpecialTypeChange(special_type) {
-        this.props.field.special_type = special_type.id;
-        this.props.updateFieldSpecialType(this.props.field);
+    render() {
+        const { field, idfields, updateField } = this.props;
+
+        return (
+            <li className="mt1 mb3 flex">
+                <div className="flex flex-column flex-full">
+                    <div>
+                        <Input style={{minWidth: 420}} className="AdminInput TableEditor-field-name float-left bordered inline-block rounded text-bold" type="text" value={this.props.field.display_name || ""} onBlurChange={this.onNameChange}/>
+                        <div className="clearfix">
+                            <div className="flex flex-full">
+                                <div className="flex-full px1">
+                                    <FieldVisibilityPicker
+                                        className="block"
+                                        field={field}
+                                        updateField={updateField}
+                                    />
+                                </div>
+                                <div className="flex-full px1">
+                                    <SpecialTypeAndTargetPicker
+                                        className="block"
+                                        field={field}
+                                        updateField={updateField}
+                                        idfields={idfields}
+                                    />
+                                </div>
+                            </div>
+                        </div>
+                    </div>
+                    <div className="MetadataTable-title flex flex-column flex-full bordered rounded mt1 mr1">
+                        <Input className="AdminInput TableEditor-field-description" type="text" value={this.props.field.description || ""} onBlurChange={this.onDescriptionChange} placeholder="No column description yet" />
+                    </div>
+                </div>
+                <Link to={`${this.props.location.pathname}/${this.props.field.id}`} className="text-brand-hover mx2 mt1">
+                    <Icon name="gear" />
+                </Link>
+            </li>
+        )
+    }
+}
+
+// FieldVisibilityPicker and SpecialTypeSelect are also used in FieldApp
+
+export class FieldVisibilityPicker extends Component {
+    props: {
+        field: Field,
+        updateField: (Field) => void,
+        className?: string
     }
 
-    onTargetChange(target_field) {
-        this.props.field.fk_target_field_id = target_field.id;
-        this.props.updateFieldTarget(this.props.field);
+    onVisibilityChange = (visibilityType) => {
+        const { field } = this.props
+        field.visibility_type = visibilityType.id;
+        this.props.updateField(field);
     }
 
     render() {
-        var targetSelect;
-        if (isFK(this.props.field.special_type)) {
-            targetSelect = (
-                <Select
-                    className="TableEditor-field-target block"
-                    placeholder="Select a target"
-                    value={this.props.field.fk_target_field_id && _.find(this.props.idfields, (field) => field.id === this.props.field.fk_target_field_id)}
-                    options={this.props.idfields}
-                    optionNameFn={(field) => field.table.schema && field.table.schema !== "public" ? titleize(humanize(field.table.schema))+"."+field.displayName : field.displayName}
-                    onChange={this.onTargetChange}
-                />
-            );
+        const { field, className } = this.props;
+
+        return (
+            <Select
+                className={cx("TableEditor-field-visibility block", className)}
+                placeholder="Select a field visibility"
+                value={_.find(MetabaseCore.field_visibility_types, (type) => { return type.id === field.visibility_type })}
+                options={MetabaseCore.field_visibility_types}
+                onChange={this.onVisibilityChange}
+                triggerClasses={this.props.triggerClasses}
+            />
+        )
+    }
+}
+
+export class SpecialTypeAndTargetPicker extends Component {
+    props: {
+        field: Field,
+        updateField: (Field) => void,
+        className?: string,
+        selectSeparator?: React$Element<any>
+    }
+
+    onSpecialTypeChange = async (special_type) => {
+        const { field, updateField } = this.props;
+        field.special_type = special_type.id;
+
+        // If we are changing the field from a FK to something else, we should delete any FKs present
+        if (field.target && field.target.id != null && isFK(field.special_type)) {
+            // we have something that used to be an FK and is now not an FK
+            // clean up after ourselves
+            field.target = null;
+            field.fk_target_field_id = null;
         }
 
+        await updateField(field);
+
+        MetabaseAnalytics.trackEvent("Data Model", "Update Field Special-Type", field.special_type);
+    }
+
+    onTargetChange = async (target_field) => {
+        const { field, updateField } = this.props;
+        field.fk_target_field_id = target_field.id;
+
+        await updateField(field);
+
+        MetabaseAnalytics.trackEvent("Data Model", "Update Field Target");
+    }
+
+    render() {
+        const { field, idfields, className, selectSeparator } = this.props;
+
         let specialTypes = MetabaseCore.field_special_types.slice(0);
         specialTypes.push({'id': null, 'name': 'No special type', 'section': 'Other'});
         // if we don't have a numeric base-type then prevent the options for unix timestamp conversion (#823)
-        if (!isNumericBaseType(this.props.field)) {
+        if (!isNumericBaseType(field)) {
             specialTypes = specialTypes.filter((f) => !isa(f.id, TYPE.UNIXTimestamp));
         }
 
+        const showFKTargetSelect = isFK(field.special_type);
+
+        // If all FK target fields are in the same schema (like `PUBLIC` for sample dataset)
+        // or if there are no schemas at all, omit the schema name
+        const includeSchemaName = _.uniq(idfields.map((idField) => idField.table.schema)).length > 1
+
         return (
-            <li className="mt1 mb3">
-                <div>
-                    <Input style={{minWidth: 420}} className="AdminInput TableEditor-field-name float-left bordered inline-block rounded text-bold" type="text" value={this.props.field.display_name || ""} onBlurChange={this.onNameChange}/>
-                    <div className="clearfix">
-                        <div className="flex flex-full">
-                            <div className="flex-full px1">
-                                <Select
-                                    className="TableEditor-field-visibility block"
-                                    placeholder="Select a field visibility"
-                                    value={_.find(MetabaseCore.field_visibility_types, (type) => type.id === this.props.field.visibility_type)}
-                                    options={MetabaseCore.field_visibility_types}
-                                    onChange={this.onVisibilityChange}
-                                />
-                            </div>
-                            <div className="flex-full px1">
-                                <Select
-                                    className="TableEditor-field-special-type block"
-                                    placeholder="Select a special type"
-                                    value={_.find(MetabaseCore.field_special_types, (type) => type.id === this.props.field.special_type)}
-                                    options={specialTypes}
-                                    onChange={this.onSpecialTypeChange}
-                                />
-                                {targetSelect}
-                            </div>
-                        </div>
-                    </div>
-                </div>
-                <div className="MetadataTable-title flex flex-column flex-full bordered rounded mt1 mr1">
-                    <Input className="AdminInput TableEditor-field-description" type="text" value={this.props.field.description || ""} onBlurChange={this.onDescriptionChange} placeholder="No column description yet" />
-                </div>
-            </li>
+            <div>
+                <Select
+                    className={cx("TableEditor-field-special-type", className)}
+                    placeholder="Select a special type"
+                    value={_.find(MetabaseCore.field_special_types, (type) => type.id === field.special_type)}
+                    options={specialTypes}
+                    onChange={this.onSpecialTypeChange}
+                    triggerClasses={this.props.triggerClasses}
+                />
+                { showFKTargetSelect && selectSeparator }
+                { showFKTargetSelect && <Select
+                    className={cx("TableEditor-field-target", className)}
+                    triggerClasses={this.props.triggerClasses}
+                    placeholder="Select a target"
+                    value={field.fk_target_field_id && _.find(idfields, (idField) => idField.id === field.fk_target_field_id)}
+                    options={idfields}
+                    optionNameFn={
+                        (idField) => includeSchemaName
+                            ? titleize(humanize(idField.table.schema)) + "." + idField.displayName
+                            : idField.displayName
+                    }
+                    onChange={this.onTargetChange}
+                /> }
+            </div>
         )
     }
 }
+
+
diff --git a/frontend/src/metabase/admin/datamodel/components/database/ColumnsList.jsx b/frontend/src/metabase/admin/datamodel/components/database/ColumnsList.jsx
index 306b46a5cc5c22347e90394618fa397a8f21c9ae..c2ec39151ab44b0f55fd94f4990565e2058ce450 100644
--- a/frontend/src/metabase/admin/datamodel/components/database/ColumnsList.jsx
+++ b/frontend/src/metabase/admin/datamodel/components/database/ColumnsList.jsx
@@ -7,9 +7,7 @@ export default class ColumnsList extends Component {
     static propTypes = {
         tableMetadata: PropTypes.object,
         idfields: PropTypes.array,
-        updateField: PropTypes.func.isRequired,
-        updateFieldSpecialType: PropTypes.func.isRequired,
-        updateFieldTarget: PropTypes.func.isRequired
+        updateField: PropTypes.func.isRequired
     };
 
     render() {
@@ -31,8 +29,6 @@ export default class ColumnsList extends Component {
                             field={field}
                             idfields={this.props.idfields}
                             updateField={this.props.updateField}
-                            updateFieldSpecialType={this.props.updateFieldSpecialType}
-                            updateFieldTarget={this.props.updateFieldTarget}
                         />
                     )}
                 </ol>
diff --git a/frontend/src/metabase/admin/datamodel/components/database/MetadataTable.jsx b/frontend/src/metabase/admin/datamodel/components/database/MetadataTable.jsx
index 190f29442097dabc5623bc672a3d99a65301b12f..7125d55237e973f58aa7ade4651e18f7b8bb0641 100644
--- a/frontend/src/metabase/admin/datamodel/components/database/MetadataTable.jsx
+++ b/frontend/src/metabase/admin/datamodel/components/database/MetadataTable.jsx
@@ -23,9 +23,7 @@ export default class MetadataTable extends Component {
         tableMetadata: PropTypes.object,
         idfields: PropTypes.array.isRequired,
         updateTable: PropTypes.func.isRequired,
-        updateField: PropTypes.func.isRequired,
-        updateFieldSpecialType: PropTypes.func.isRequired,
-        updateFieldTarget: PropTypes.func.isRequired
+        updateField: PropTypes.func.isRequired
     };
 
     isHidden() {
@@ -111,8 +109,6 @@ export default class MetadataTable extends Component {
                         tableMetadata={tableMetadata}
                         idfields={this.props.idfields}
                         updateField={this.props.updateField}
-                        updateFieldSpecialType={this.props.updateFieldSpecialType}
-                        updateFieldTarget={this.props.updateFieldTarget}
                     />
                 </div>
             </div>
diff --git a/frontend/src/metabase/admin/datamodel/containers/FieldApp.jsx b/frontend/src/metabase/admin/datamodel/containers/FieldApp.jsx
new file mode 100644
index 0000000000000000000000000000000000000000..fe5c891c2440bd830c32a74026910987a74f3492
--- /dev/null
+++ b/frontend/src/metabase/admin/datamodel/containers/FieldApp.jsx
@@ -0,0 +1,624 @@
+/**
+ * Settings editor for a single database field. Lets you change field type, visibility and display values / remappings.
+ *
+ * TODO Atte Keinänen 7/6/17: This uses the standard metadata API; we should migrate also other parts of admin section
+ */
+
+import React, { Component } from 'react'
+import { Link } from 'react-router'
+import { connect } from "react-redux";
+import _ from "underscore";
+import cx from "classnames";
+
+import Icon from 'metabase/components/Icon'
+import Input from 'metabase/components/Input'
+import Select from 'metabase/components/Select'
+import SaveStatus from "metabase/components/SaveStatus";
+import Breadcrumbs from "metabase/components/Breadcrumbs";
+import ButtonWithStatus from "metabase/components/ButtonWithStatus";
+import MetabaseAnalytics from "metabase/lib/analytics";
+
+import { getMetadata } from "metabase/selectors/metadata";
+import * as metadataActions from "metabase/redux/metadata";
+import * as datamodelActions from "../datamodel"
+
+import LoadingAndErrorWrapper from "metabase/components/LoadingAndErrorWrapper";
+import SelectButton from "metabase/components/SelectButton";
+import PopoverWithTrigger from "metabase/components/PopoverWithTrigger";
+import FieldList from "metabase/query_builder/components/FieldList";
+import {
+    FieldVisibilityPicker,
+    SpecialTypeAndTargetPicker
+} from "metabase/admin/datamodel/components/database/ColumnItem";
+import { getDatabaseIdfields } from "metabase/admin/datamodel/selectors";
+import Metadata from "metabase-lib/lib/metadata/Metadata";
+import Question from "metabase-lib/lib/Question";
+import { DatetimeFieldDimension } from "metabase-lib/lib/Dimension";
+
+const SelectClasses = 'h3 bordered border-dark shadowed p2 inline-block flex align-center rounded text-bold'
+
+const mapStateToProps = (state, props) => {
+    return {
+        databaseId: parseInt(props.params.databaseId),
+        tableId: parseInt(props.params.tableId),
+        fieldId: parseInt(props.params.fieldId),
+        metadata: getMetadata(state),
+        idfields: getDatabaseIdfields(state)
+    }
+}
+
+const mapDispatchToProps = {
+    fetchDatabaseMetadata: metadataActions.fetchDatabaseMetadata,
+    fetchTableMetadata: metadataActions.fetchTableMetadata,
+    updateField: metadataActions.updateField,
+    updateFieldValues: metadataActions.updateFieldValues,
+    updateFieldDimension: metadataActions.updateFieldDimension,
+    deleteFieldDimension: metadataActions.deleteFieldDimension,
+    fetchDatabaseIdfields: datamodelActions.fetchDatabaseIdfields
+}
+
+@connect(mapStateToProps, mapDispatchToProps)
+export default class FieldApp extends Component {
+    saveStatus: null
+
+    props: {
+        databaseId: number,
+        tableId: number,
+        fieldId: number,
+        metadata: Metadata,
+        idfields: Object[],
+
+        fetchDatabaseMetadata: (number) => Promise<void>,
+        fetchTableMetadata: (number) => Promise<void>,
+        updateField: (any) => Promise<void>,
+        updateFieldValues: (any) => Promise<void>,
+        updateFieldDimension: (any) => Promise<void>,
+        deleteFieldDimension: (any) => Promise<void>,
+        fetchDatabaseIdfields: (number) => Promise<void>
+    }
+
+    async componentWillMount() {
+        const {databaseId, tableId, fetchDatabaseMetadata, fetchTableMetadata, fetchDatabaseIdfields} = this.props;
+
+        // A complete database metadata is needed in case that foreign key is changed
+        // and then we need to show FK remapping options for a new table
+        await fetchDatabaseMetadata(databaseId);
+
+        // Only fetchTableMetadata hydrates `dimension` in the field object
+        // Force reload to ensure that we are not showing stale information
+        await fetchTableMetadata(tableId, true);
+
+        // TODO Atte Keinänen 7/10/17: Migrate this to redux/metadata
+        await fetchDatabaseIdfields(databaseId);
+    }
+
+    linkWithSaveStatus = (saveMethod) => {
+        const self = this;
+        return async (...params) => {
+            self.saveStatus && self.saveStatus.setSaving();
+            await saveMethod(...params);
+            self.saveStatus && self.saveStatus.setSaved();
+        }
+    }
+
+    onUpdateField = this.linkWithSaveStatus(this.props.updateField)
+    onUpdateFieldProperties = this.linkWithSaveStatus(async (fieldProps) => {
+        const { metadata, fieldId } = this.props;
+        const field = metadata.fields[fieldId];
+
+        if (field) {
+            // `table` and `target` propertes is part of the fully connected metadata graph; drop it because it
+            // makes conversion to JSON impossible due to cyclical data structure
+            await this.props.updateField({ ...field.getPlainObject(), ...fieldProps });
+        } else {
+            console.warn("Updating field properties in fields settings failed because of missing field metadata")
+        }
+    })
+    onUpdateFieldValues = this.linkWithSaveStatus(this.props.updateFieldValues)
+    onUpdateFieldDimension = this.linkWithSaveStatus(this.props.updateFieldDimension)
+    onDeleteFieldDimension = this.linkWithSaveStatus(this.props.deleteFieldDimension)
+
+    render () {
+        const {
+            metadata,
+            fieldId,
+            databaseId,
+            tableId,
+            idfields,
+            fetchTableMetadata
+        } = this.props;
+
+        const db = metadata.databases[databaseId]
+        const field = metadata.fields[fieldId]
+        const table = metadata.tables[tableId]
+
+        const isLoading = !field || !table || !idfields
+
+        return (
+            <LoadingAndErrorWrapper loading={isLoading} error={null} noWrapper>
+                { () =>
+                    <div className="relative">
+                        <div className="wrapper wrapper--trim">
+                            <BackButton databaseId={databaseId} tableId={tableId} />
+                            <div className="my4 py1 ml-auto mr-auto">
+                                <Breadcrumbs
+                                    crumbs={[
+                                        [db.name, `/admin/datamodel/database/${db.id}`],
+                                        [table.display_name, `/admin/datamodel/database/${db.id}/table/${table.id}`],
+                                        `${field.display_name} – Field Settings`,
+                                    ]}
+                                />
+                            </div>
+                            <div className="absolute top right mt4 mr4">
+                                <SaveStatus ref={(ref) => this.saveStatus = ref}/>
+                            </div>
+
+                            <Section>
+                                <FieldHeader
+                                    field={field}
+                                    updateFieldProperties={this.onUpdateFieldProperties}
+                                    updateFieldDimension={this.onUpdateFieldDimension}
+                                />
+                            </Section>
+
+                            <Section>
+                                <SectionHeader title="Visibility"
+                                               description="Where this field will appear throughout Metabase"/>
+                                <FieldVisibilityPicker
+                                    triggerClasses={SelectClasses}
+                                    field={field.getPlainObject()}
+                                    updateField={this.onUpdateField}
+                                />
+                            </Section>
+
+                            <Section>
+                                <SectionHeader title="Type" />
+                                <SpecialTypeAndTargetPicker
+                                    triggerClasses={SelectClasses}
+                                    field={field.getPlainObject()}
+                                    updateField={this.onUpdateField}
+                                    idfields={idfields}
+                                    selectSeparator={<SelectSeparator />}
+                                />
+                            </Section>
+
+                            <Section>
+                                <FieldRemapping
+                                    field={field}
+                                    table={table}
+                                    fields={metadata.fields}
+                                    updateFieldProperties={this.onUpdateFieldProperties}
+                                    updateFieldValues={this.onUpdateFieldValues}
+                                    updateFieldDimension={this.onUpdateFieldDimension}
+                                    deleteFieldDimension={this.onDeleteFieldDimension}
+                                    fetchTableMetadata={fetchTableMetadata}
+                                />
+                            </Section>
+                        </div>
+                    </div>
+                }
+            </LoadingAndErrorWrapper>
+        )
+    }
+}
+
+// TODO: Should this invoke goBack() instead?
+// not sure if it's possible to do that neatly with Link component
+export const BackButton = ({ databaseId, tableId }) =>
+    <Link
+        to={`/admin/datamodel/database/${databaseId}/table/${tableId}`}
+        className="circle text-white p2 mt3 ml3 flex align-center justify-center  absolute top left"
+        style={{ backgroundColor: '#8091AB' }}
+    >
+        <Icon name="backArrow" />
+    </Link>
+
+const SelectSeparator = () =>
+    <Icon
+        name="chevronright"
+        size={12}
+        className="mx2 text-grey-3"
+    />
+
+export class FieldHeader extends Component {
+    onNameChange = (e) => {
+        this.updateNameDebounced(e.target.value);
+    }
+    onDescriptionChange = (e) => {
+        this.updateDescriptionDebounced(e.target.value)
+    }
+
+    // Separate update methods because of throttling the input
+    updateNameDebounced = _.debounce(async (name) => {
+        const { field, updateFieldProperties, updateFieldDimension } = this.props;
+
+        // Update the dimension name if it exists
+        // TODO: Have a separate input field for the dimension name?
+        if (!_.isEmpty(field.dimensions)) {
+            await updateFieldDimension(field.id, {
+                type: field.dimensions.type,
+                human_readable_field_id: field.dimensions.human_readable_field_id,
+                name
+            })
+        }
+
+        // todo: how to treat empty / too long strings? see how this is done in Column
+        updateFieldProperties({ display_name: name })
+    }, 300)
+
+    updateDescriptionDebounced = _.debounce((description) => {
+        const { updateFieldProperties } = this.props
+        updateFieldProperties({ description })
+    }, 300);
+
+
+    render () {
+        return (
+            <div>
+                <Input
+                    className="h1 AdminInput bordered rounded border-dark block mb1"
+                    value={this.props.field.display_name}
+                    onChange={this.onNameChange}
+                    placeholder={this.props.field.name}
+                />
+                <Input
+                    className="text AdminInput bordered input text-measure block full"
+                    value={this.props.field.description}
+                    onChange={this.onDescriptionChange}
+                    placeholder="No description for this field yet"
+                />
+            </div>
+        )
+    }
+}
+
+// consider renaming this component to something more descriptive
+export class ValueRemappings extends Component {
+    constructor(props, context) {
+        super(props, context);
+
+        const editingRemappings = new Map([...props.remappings]
+            .map(([original, mappedOrUndefined]) => {
+                // Use currently the original value as the "default custom mapping" as the current backend implementation
+                // requires that all original values must have corresponding mappings
+
+                // Additionally, the defensive `.toString` ensures that the mapped value definitely will be string
+                const mappedString =
+                    mappedOrUndefined !== undefined ? mappedOrUndefined.toString() : original.toString();
+
+                return [original, mappedString]
+            })
+        )
+
+        const containsUnsetMappings = [...props.remappings].some(([_, mappedOrUndefined]) => {
+            return mappedOrUndefined === undefined;
+        })
+        if (containsUnsetMappings) {
+            // Save the initial values to make sure that we aren't left in a potentially broken state where
+            // the dimension type is "internal" but we don't have any values in metabase_fieldvalues
+            this.props.updateRemappings(editingRemappings);
+        }
+
+        this.state = {
+            editingRemappings
+        }
+    }
+
+    onSetRemapping(original, newMapped) {
+        this.setState({
+            editingRemappings: new Map([
+                ...this.state.editingRemappings,
+                [original, newMapped]
+            ])
+        });
+    }
+
+    onSaveClick = () => {
+        MetabaseAnalytics.trackEvent("Data Model", "Update Custom Remappings");
+        // Returns the promise so that ButtonWithStatus can show the saving status
+        return this.props.updateRemappings(this.state.editingRemappings);
+    }
+
+    customValuesAreNonEmpty = () => {
+        return Array.from(this.state.editingRemappings.values())
+            .every((value) => value !== "")
+    }
+
+    render () {
+        const { editingRemappings } = this.state;
+
+        return (
+            <div className="bordered rounded py2 px4 border-dark">
+                <div className="flex align-center my1 pb2 border-bottom">
+                    <h3>Original value</h3>
+                    <h3 className="ml-auto">Mapped value</h3>
+                </div>
+                <ol>
+                    { [...editingRemappings].map(([original, mapped]) =>
+                        <li className="mb1">
+                            <FieldValueMapping
+                                original={original}
+                                mapped={mapped}
+                                setMapping={(newMapped) => this.onSetRemapping(original, newMapped) }
+                            />
+                        </li>
+                    )}
+                </ol>
+                <div className="flex align-center">
+                    <ButtonWithStatus
+                        className="ml-auto"
+                        disabled={!this.customValuesAreNonEmpty()}
+                        onClickOperation={this.onSaveClick}
+                    >
+                        Save
+                    </ButtonWithStatus>
+                </div>
+            </div>
+        )
+    }
+}
+
+export class FieldValueMapping extends Component {
+    onInputChange = (e) => {
+        this.props.setMapping(e.target.value)
+    }
+
+    render () {
+        const { original, mapped } = this.props
+        return (
+            <div className="flex align-center">
+                <h3>{original}</h3>
+                <Input
+                    className="AdminInput input ml-auto"
+                    value={mapped}
+                    onChange={this.onInputChange}
+                    placeholder={"Enter value"}
+                />
+            </div>
+        )
+    }
+}
+
+const Section = ({ children }) => <section className="my3">{children}</section>
+
+const SectionHeader = ({ title, description }) =>
+    <div className="border-bottom py2 mb2">
+        <h2 className="text-italic">{title}</h2>
+        { description && <p className="mb0 text-grey-4 mt1 text-paragraph text-measure">{description}</p> }
+    </div>
+
+const MAP_OPTIONS = {
+    original: { type: "original", name: 'Use original value' },
+    foreign:  { type: "foreign", name: 'Use foreign key' },
+    custom:   { type: "custom", name: 'Custom mapping' }
+}
+
+export class FieldRemapping extends Component {
+    state = {
+        isChoosingInitialFkTarget: false,
+        dismissedInitialFkTargetPopover: false
+    }
+
+    constructor(props, context) {
+        super(props, context);
+    }
+
+    getMappingTypeForField = (field) => {
+        if (this.state.isChoosingInitialFkTarget) return MAP_OPTIONS.foreign;
+
+        if (_.isEmpty(field.dimensions)) return MAP_OPTIONS.original;
+        if (field.dimensions.type === "external") return MAP_OPTIONS.foreign;
+        if (field.dimensions.type === "internal") return MAP_OPTIONS.custom;
+
+        throw new Error("Unrecognized mapping type");
+    }
+
+    getAvailableMappingTypes = () => {
+        const { field } = this.props;
+
+        const hasForeignKeys = field.special_type === "type/FK" && this.getForeignKeys().length > 0;
+
+        // Only show the "custom" option if we have some values that can be mapped to user-defined custom values
+        // (for a field without user-defined remappings, every key of `field.remappings` has value `undefined`)
+        const hasMappableNumeralValues =
+            field.remapping.size > 0 &&
+            [...field.remapping.keys()].every((key) => typeof key === "number" );
+
+        return [
+            MAP_OPTIONS.original,
+            ...(hasForeignKeys ? [MAP_OPTIONS.foreign] : []),
+            ...(hasMappableNumeralValues > 0 ? [MAP_OPTIONS.custom] : [])
+        ]
+    }
+
+    getFKTargetTableEntityNameOrNull = () => {
+        const fks = this.getForeignKeys()
+        const fkTargetFields = fks[0] && fks[0].dimensions.map((dim) => dim.field());
+
+        if (fkTargetFields) {
+            // TODO Atte Keinänen 7/11/17: Should there be `isName(field)` in Field.js?
+            const nameField = fkTargetFields.find((field) => field.special_type === "type/Name")
+            return nameField ? nameField.id : null;
+        } else {
+            throw new Error("Current field isn't a foreign key or FK target table metadata is missing")
+        }
+    }
+
+    clearEditingStates = () => {
+        this.setState({ isChoosingInitialFkTarget: false, dismissedInitialFkTargetPopover: false });
+    }
+
+    onSetMappingType = async (mappingType) => {
+        const { table, field, fetchTableMetadata, updateFieldDimension, deleteFieldDimension } = this.props;
+
+        this.clearEditingStates();
+
+
+        if (mappingType.type === "original") {
+            MetabaseAnalytics.trackEvent("Data Model", "Change Remapping Type", "No Remapping");
+            await deleteFieldDimension(field.id)
+            this.setState({ hasChanged: false })
+        } else if (mappingType.type === "foreign") {
+            // Try to find a entity name field from target table and choose it as remapping target field if it exists
+            const entityNameFieldId = this.getFKTargetTableEntityNameOrNull();
+
+            if (entityNameFieldId) {
+                MetabaseAnalytics.trackEvent("Data Model", "Change Remapping Type", "Foreign Key");
+                await updateFieldDimension(field.id, {
+                    type: "external",
+                    name: field.display_name,
+                    human_readable_field_id: entityNameFieldId
+                })
+            } else {
+                // Enter a special state where we are choosing an initial value for FK target
+                this.setState({
+                    hasChanged: true,
+                    isChoosingInitialFkTarget: true
+                });
+            }
+
+        } else if (mappingType.type === "custom") {
+            MetabaseAnalytics.trackEvent("Data Model", "Change Remapping Type", "Custom Remappings");
+            await updateFieldDimension(field.id, {
+                type: "internal",
+                name: field.display_name,
+                human_readable_field_id: null
+            })
+            this.setState({ hasChanged: true })
+        } else {
+            throw new Error("Unrecognized mapping type");
+        }
+
+        // TODO Atte Keinänen 7/11/17: It's a pretty heavy approach to reload the whole table after a single field
+        // has been updated; would be nicer to just fetch a single field. MetabaseApi.field_get seems to exist for that
+        await fetchTableMetadata(table.id, true);
+    }
+
+    onForeignKeyFieldChange = async (foreignKeyClause) => {
+        const { table, field, fetchTableMetadata, updateFieldDimension } = this.props;
+
+        this.clearEditingStates();
+
+        // TODO Atte Keinänen 7/10/17: Use Dimension class when migrating to metabase-lib
+        if (foreignKeyClause.length === 3 && foreignKeyClause[0] === "fk->") {
+            MetabaseAnalytics.trackEvent("Data Model", "Update FK Remapping Target");
+            await updateFieldDimension(field.id, {
+                type: "external",
+                name: field.display_name,
+                human_readable_field_id: foreignKeyClause[2]
+            })
+
+            await fetchTableMetadata(table.id, true);
+
+            this.refs.fkPopover.close()
+        } else {
+            throw new Error("The selected field isn't a foreign key")
+        }
+
+    }
+
+    onUpdateRemappings = (remappings) => {
+        const { field, updateFieldValues } = this.props;
+        return updateFieldValues(field.id, Array.from(remappings));
+    }
+
+    // TODO Atte Keinänen 7/11/17: Should we have stricter criteria for valid remapping targets?
+    isValidFKRemappingTarget = (dimension) => !(dimension.defaultDimension() instanceof DatetimeFieldDimension)
+
+    getForeignKeys = () => {
+        const { table, field } = this.props;
+
+        // this method has a little odd structure due to using fieldOptions(); basically filteredFKs should
+        // always be an array with a single value
+        const metadata = table.metadata;
+        const fieldOptions = Question.create({ metadata, databaseId: table.db.id, tableId: table.id }).query().fieldOptions();
+        const unfilteredFks = fieldOptions.fks
+        const filteredFKs = unfilteredFks.filter(fk => fk.field.id === field.id);
+
+        return filteredFKs.map(filteredFK => ({
+            field: filteredFK.field,
+            dimension: filteredFK.dimension,
+            dimensions: filteredFK.dimensions.filter(this.isValidFKRemappingTarget)
+        }));
+    }
+
+    onFkPopoverDismiss = () => {
+        const { isChoosingInitialFkTarget } = this.state;
+
+        if (isChoosingInitialFkTarget) {
+            this.setState({ dismissedInitialFkTargetPopover: true })
+        }
+    }
+
+    render () {
+        const { field, table, fields} = this.props;
+        const { isChoosingInitialFkTarget, hasChanged, dismissedInitialFkTargetPopover } = this.state;
+
+        const mappingType = this.getMappingTypeForField(field)
+        const isFKMapping = mappingType === MAP_OPTIONS.foreign;
+        const hasFKMappingValue = isFKMapping && field.dimensions.human_readable_field_id !== null;
+        const fkMappingField = hasFKMappingValue && fields[field.dimensions.human_readable_field_id];
+
+        return (
+            <div>
+                <SectionHeader
+                    title='Display values'
+                    description="Choose to show the original value from the database, or have this field display associated or custom information."
+                />
+                <Select
+                    triggerClasses={SelectClasses}
+                    value={mappingType}
+                    onChange={this.onSetMappingType}
+                    options={this.getAvailableMappingTypes()}
+                />
+                { mappingType === MAP_OPTIONS.foreign && [
+                    <SelectSeparator key="foreignKeySeparator" />,
+                    <PopoverWithTrigger
+                        ref="fkPopover"
+                        triggerElement={
+                            <SelectButton
+                                hasValue={hasFKMappingValue}
+                                className={cx(
+                                    "flex inline-block no-decoration h3 p2 shadowed",
+                                    {
+                                        "border-error": dismissedInitialFkTargetPopover,
+                                        "border-dark": !dismissedInitialFkTargetPopover
+                                    }
+                                )}
+                            >
+                                {fkMappingField ? fkMappingField.display_name : <span className="text-grey-1">Choose a field</span>}
+                            </SelectButton>
+                        }
+                        isInitiallyOpen={isChoosingInitialFkTarget}
+                        onClose={this.onFkPopoverDismiss}
+                    >
+                        <FieldList
+                            className="text-purple"
+                            field={fkMappingField}
+                            fieldOptions={{ count: 0, dimensions: [], fks: this.getForeignKeys() }}
+                            tableMetadata={table}
+                            onFieldChange={this.onForeignKeyFieldChange}
+                            hideSectionHeader
+                        />
+                    </PopoverWithTrigger>,
+                    dismissedInitialFkTargetPopover && <div className="text-danger my2">Please select a column to use for display.</div>,
+                    hasChanged && hasFKMappingValue && <RemappingNamingTip />
+                ]}
+                { mappingType === MAP_OPTIONS.custom && (
+                    <div className="mt3">
+                        { hasChanged && <RemappingNamingTip /> }
+                        <ValueRemappings
+                            remappings={field && field.remapping}
+                            updateRemappings={this.onUpdateRemappings}
+                        />
+                    </div>
+                )}
+            </div>
+        )
+    }
+}
+
+const RemappingNamingTip = () =>
+    <div className="bordered rounded p1 mt1 mb2 border-brand">
+        <span className="text-brand text-bold">Tip:</span> You might want to update the field name to make sure it still makes sense based on your remapping choices.
+    </div>
diff --git a/frontend/src/metabase/admin/datamodel/containers/MetadataEditorApp.jsx b/frontend/src/metabase/admin/datamodel/containers/MetadataEditorApp.jsx
index cfba3ebbf3a95950eed261532a2437ad8cab0914..29862bed258baedf282593aa92e3f1c9058bc910 100644
--- a/frontend/src/metabase/admin/datamodel/containers/MetadataEditorApp.jsx
+++ b/frontend/src/metabase/admin/datamodel/containers/MetadataEditorApp.jsx
@@ -57,9 +57,7 @@ export default class MetadataEditor extends Component {
         idfields: PropTypes.array.isRequired,
         editingTable: PropTypes.number,
         updateTable: PropTypes.func.isRequired,
-        updateField: PropTypes.func.isRequired,
-        updateFieldSpecialType: PropTypes.func.isRequired,
-        updateFieldTarget: PropTypes.func.isRequired
+        updateField: PropTypes.func.isRequired
     };
 
     componentWillMount() {
@@ -85,8 +83,6 @@ export default class MetadataEditor extends Component {
                         idfields={this.props.idfields}
                         updateTable={(table) => this.props.updateTable(table)}
                         updateField={(field) => this.props.updateField(field)}
-                        updateFieldSpecialType={(field) => this.props.updateFieldSpecialType(field)}
-                        updateFieldTarget={(field) => this.props.updateFieldTarget(field)}
                         onRetireSegment={this.props.onRetireSegment}
                         onRetireMetric={this.props.onRetireMetric}
                     />
diff --git a/frontend/src/metabase/admin/datamodel/datamodel.js b/frontend/src/metabase/admin/datamodel/datamodel.js
index 43f3576c3e8e287d379795628a7a8b2349581af1..dec914c348600a48df918dc1c2f3a8c92ad5397a 100644
--- a/frontend/src/metabase/admin/datamodel/datamodel.js
+++ b/frontend/src/metabase/admin/datamodel/datamodel.js
@@ -5,7 +5,6 @@ import { push } from "react-router-redux";
 
 import MetabaseAnalytics from "metabase/lib/analytics";
 import { loadTableAndForeignKeys } from "metabase/lib/table";
-import { isFK } from "metabase/lib/types";
 
 import { MetabaseApi, SegmentApi, MetricApi, RevisionsApi } from "metabase/services";
 
@@ -145,37 +144,6 @@ export const updateField = createThunkAction(UPDATE_FIELD, function(field) {
     };
 });
 
-// updateFieldSpecialType
-export const UPDATE_FIELD_SPECIAL_TYPE = "metabase/admin/datamodel/UPDATE_FIELD_SPECIAL_TYPE";
-export const updateFieldSpecialType = createThunkAction(UPDATE_FIELD_SPECIAL_TYPE, function(field) {
-    return function(dispatch, getState) {
-
-        // If we are changing the field from a FK to something else, we should delete any FKs present
-        if (field.target && field.target.id != null && isFK(field.special_type)) {
-            // we have something that used to be an FK and is now not an FK
-            // clean up after ourselves
-            field.target = null;
-            field.fk_target_field_id = null;
-        }
-
-        // save the field
-        dispatch(updateField(field));
-
-        MetabaseAnalytics.trackEvent("Data Model", "Update Field Special-Type", field.special_type);
-    };
-});
-
-// updateFieldTarget
-export const UPDATE_FIELD_TARGET = "metabase/admin/datamodel/UPDATE_FIELD_TARGET";
-export const updateFieldTarget = createThunkAction(UPDATE_FIELD_TARGET, function(field) {
-    return function(dispatch, getState) {
-        // This function notes a change in the target of the target of a foreign key
-        dispatch(updateField(field));
-
-        MetabaseAnalytics.trackEvent("Data Model", "Update Field Target");
-    };
-});
-
 // retireSegment
 export const RETIRE_SEGMENT = "metabase/admin/datamodel/RETIRE_SEGMENT";
 export const onRetireSegment = createThunkAction(RETIRE_SEGMENT, function(segment) {
diff --git a/frontend/src/metabase/admin/permissions/containers/DataPermissionsApp.jsx b/frontend/src/metabase/admin/permissions/containers/DataPermissionsApp.jsx
index 1d881f1df35ed6fa9556969eb950d302fcaf07ee..844226b50e0d12aa0386c1981532d73d46899bd1 100644
--- a/frontend/src/metabase/admin/permissions/containers/DataPermissionsApp.jsx
+++ b/frontend/src/metabase/admin/permissions/containers/DataPermissionsApp.jsx
@@ -4,12 +4,12 @@ import { connect } from "react-redux"
 import PermissionsApp from "./PermissionsApp.jsx";
 
 import { PermissionsApi } from "metabase/services";
-import { fetchDatabases } from "metabase/redux/metadata";
+import { fetchRealDatabases } from "metabase/redux/metadata";
 
-@connect(null, { fetchDatabases })
+@connect(null, { fetchRealDatabases })
 export default class DataPermissionsApp extends Component {
     componentWillMount() {
-        this.props.fetchDatabases();
+        this.props.fetchRealDatabases();
     }
     render() {
         return (
diff --git a/frontend/src/metabase/admin/settings/components/SettingsSetting.jsx b/frontend/src/metabase/admin/settings/components/SettingsSetting.jsx
index 998b5a826fcb82b286a77f46a6fc00e46c739f4c..6adc903198c980c0e42352a9c0d225b5e912d6eb 100644
--- a/frontend/src/metabase/admin/settings/components/SettingsSetting.jsx
+++ b/frontend/src/metabase/admin/settings/components/SettingsSetting.jsx
@@ -1,5 +1,6 @@
 import React, { Component } from "react";
 import PropTypes from "prop-types";
+import { assocIn } from "icepick";
 
 import SettingHeader from "./SettingHeader.jsx";
 
@@ -19,7 +20,16 @@ const SETTING_WIDGET_MAP = {
     "boolean":  SettingToggle
 };
 
+const updatePlaceholderForEnvironmentVars = (props) => {
+    if (props && props.setting && props.setting.is_env_setting){
+        return assocIn(props, ["setting", "placeholder"], "Using " + props.setting.env_name) 
+    }
+    return props
+}
+
 export default class SettingsSetting extends Component {
+    
+
     static propTypes = {
         setting: PropTypes.object.isRequired,
         updateSetting: PropTypes.func.isRequired,
@@ -40,7 +50,8 @@ export default class SettingsSetting extends Component {
                     <SettingHeader setting={setting} />
                 }
                 <div className="flex">
-                    <Widget {...this.props} />
+                    <Widget {...updatePlaceholderForEnvironmentVars(this.props)} 
+                    />
                 </div>
                 { errorMessage &&
                     <div className="text-error text-bold pt1">{errorMessage}</div>
diff --git a/frontend/src/metabase/admin/settings/selectors.js b/frontend/src/metabase/admin/settings/selectors.js
index 84c0957333d250f6961b4ed63804cfc888979d9b..c44aceaa723f71ea959f17a6266a65b9e44f979b 100644
--- a/frontend/src/metabase/admin/settings/selectors.js
+++ b/frontend/src/metabase/admin/settings/selectors.js
@@ -60,6 +60,11 @@ const SECTIONS = [
                 key: "enable-advanced-humanization",
                 display_name: "Friendly Table and Field Names",
                 type: "boolean"
+            },
+            {
+                key: "enable-nested-queries",
+                display_name: "Enable Nested Queries",
+                type: "boolean"
             }
         ]
     },
diff --git a/frontend/src/metabase/components/AccordianList.jsx b/frontend/src/metabase/components/AccordianList.jsx
index bd3bc970c9726135e9972c08e2ba4b71e6f8d47c..103832d3c3a33b1f0a67018694a0df541883c43f 100644
--- a/frontend/src/metabase/components/AccordianList.jsx
+++ b/frontend/src/metabase/components/AccordianList.jsx
@@ -65,7 +65,7 @@ export default class AccordianList extends Component {
     componentDidMount() {
         // when the component is mounted and an item is selected then scroll to it
         const element = this.refs.selected && ReactDOM.findDOMNode(this.refs.selected);
-        if (element && !elementIsInView(element) && element.scrollIntoView) {
+        if (element && element.scrollIntoView && !elementIsInView(element)) {
             element.scrollIntoView();
         }
     }
@@ -175,9 +175,9 @@ export default class AccordianList extends Component {
             searchable && (typeof searchable !== "function" || searchable(sections[sectionIndex]));
 
         return (
-            <div id={id} className={this.props.className} style={{ width: '300px', ...style }}>
+            <div id={id} className={this.props.className} style={{ minWidth: '300px', ...style }}>
                 {sections.map((section, sectionIndex) =>
-                    <section key={sectionIndex} className={cx("List-section", { "List-section--open": sectionIsOpen(sectionIndex) })}>
+                    <section key={sectionIndex} className={cx("List-section", section.className, { "List-section--open": sectionIsOpen(sectionIndex) })}>
                         { section.name && alwaysExpanded ?
                             (!hideSingleSectionTitle || sections.length > 1 || alwaysTogglable) &&
                                 <div className="px2 pt2 h6 text-grey-2 text-uppercase text-bold">
diff --git a/frontend/src/metabase/components/ButtonWithStatus.jsx b/frontend/src/metabase/components/ButtonWithStatus.jsx
new file mode 100644
index 0000000000000000000000000000000000000000..6b00d74988356ebedf6f84a6e66b373ca881e009
--- /dev/null
+++ b/frontend/src/metabase/components/ButtonWithStatus.jsx
@@ -0,0 +1,61 @@
+import React, { Component } from "react";
+import cx from "classnames";
+
+let defaultTitleForState = {
+    default: "Save",
+    inProgress: "Saving...",
+    completed: "Saved!",
+    failed: "Saving failed."
+};
+
+
+// TODO Atte Keinänen 7/14/17: This could use Button component underneath and pass parameters to it
+// (Didn't want to generalize too much for the first version of this component
+
+/**
+ * Renders a button that triggers a promise-returning `onClickOperation` when user clicks the button.
+ * When the button is clicked, `inProgress` text is shown, and when the promise resolves, `completed` text is shown.
+ */
+export default class ButtonWithStatus extends Component {
+    props: {
+        onClickOperation: (any) => Promise<void>,
+        titleForState?: string[],
+        disabled?: boolean,
+        className?: string
+    }
+
+    state = {
+        progressState: "default"
+    }
+
+    onClick = async () => {
+        this.setState({ progressState: "inProgress" });
+        try {
+            await this.props.onClickOperation();
+            this.setState({ progressState: "completed" });
+        } catch(e) {
+            console.warn('The operation triggered by click in `ButtonWithStatus` failed')
+            this.setState({ progressState: "failed" });
+            throw e;
+        } finally {
+            setTimeout(() => this.setState({ progressState: "default" }), 3000);
+        }
+    }
+
+    render() {
+        const { progressState } = this.state;
+        const titleForState =  this.props.titleForState || defaultTitleForState
+        const title = titleForState[progressState];
+        const disabled = this.props.disabled || progressState !== "default";
+
+        return (
+            <button
+                className={cx("Button", {"Button--primary": !disabled}, {"Button--success-new": progressState === "completed"}, this.props.className)}
+                disabled={disabled} onClick={this.onClick}
+            >
+                {title}
+            </button>
+        )
+    }
+}
+
diff --git a/frontend/src/metabase/components/DatabaseDetailsForm.jsx b/frontend/src/metabase/components/DatabaseDetailsForm.jsx
index 3adc7254bd39e3781c24c8a4a222c92e28608f24..2832f6937c4f61871982339f0920d348ea011f45 100644
--- a/frontend/src/metabase/components/DatabaseDetailsForm.jsx
+++ b/frontend/src/metabase/components/DatabaseDetailsForm.jsx
@@ -48,7 +48,8 @@ export default class DatabaseDetailsForm extends Component {
         formError: PropTypes.object,
         hiddenFields: PropTypes.object,
         submitButtonText: PropTypes.string.isRequired,
-        submitFn: PropTypes.func.isRequired
+        submitFn: PropTypes.func.isRequired,
+        submitting: PropTypes.boolean
     };
 
     validateForm() {
@@ -250,7 +251,7 @@ export default class DatabaseDetailsForm extends Component {
     }
 
     render() {
-        let { engine, engines, formError, formSuccess, hiddenFields, submitButtonText } = this.props;
+        let { engine, engines, formError, formSuccess, hiddenFields, submitButtonText, submitting } = this.props;
         let { valid } = this.state;
 
         let fields = [
@@ -278,8 +279,8 @@ export default class DatabaseDetailsForm extends Component {
                 </div>
 
                 <div className="Form-actions">
-                    <button className={cx("Button", {"Button--primary": valid})} disabled={!valid}>
-                        {submitButtonText}
+                    <button className={cx("Button", {"Button--primary": valid})} disabled={!valid || submitting}>
+                        {submitting ? "Saving..." : submitButtonText}
                     </button>
                     <FormMessage formError={formError} formSuccess={formSuccess}></FormMessage>
                 </div>
diff --git a/frontend/src/metabase/css/components/popover.css b/frontend/src/metabase/components/Popover.css
similarity index 92%
rename from frontend/src/metabase/css/components/popover.css
rename to frontend/src/metabase/components/Popover.css
index a9e54efbc6625f12ae320f98ce498ef8775f8ef1..586fe9453760762629c76aab568105d6f6fea5fd 100644
--- a/frontend/src/metabase/css/components/popover.css
+++ b/frontend/src/metabase/components/Popover.css
@@ -170,3 +170,25 @@
 	height: 6px;
 	pointer-events: none;
 }
+
+ /* transition classes */
+
+.Popover-appear,
+.Popover-enter {
+  opacity: 0.01;
+}
+
+.Popover-appear.Popover-appear-active,
+.Popover-enter.Popover-enter-active {
+  opacity: 1;
+  transition: opacity 100ms ease-in;
+}
+
+.Popover-leave {a
+  opacity: 1;
+}
+
+.Popover-leave.Popover-leave-active {
+  opacity: 0.01;
+  transition: opacity 100ms ease-in;
+}
diff --git a/frontend/src/metabase/components/Popover.jsx b/frontend/src/metabase/components/Popover.jsx
index 5e63bd4056e4fd59817f19136be8ad9cd874047d..2ccacd534527ef6fcf4909c6047c68f1635d660c 100644
--- a/frontend/src/metabase/components/Popover.jsx
+++ b/frontend/src/metabase/components/Popover.jsx
@@ -10,6 +10,11 @@ import { constrainToScreen } from "metabase/lib/dom";
 
 import cx from "classnames";
 
+import "./Popover.css";
+
+const POPOVER_TRANSITION_ENTER = 100;
+const POPOVER_TRANSITION_LEAVE = 100;
+
 export default class Popover extends Component {
     constructor(props, context) {
         super(props, context);
@@ -57,23 +62,12 @@ export default class Popover extends Component {
         return this._popoverElement;
     }
 
-    _cleanupPopoverElement() {
-        if (this._popoverElement) {
-            ReactDOM.unmountComponentAtNode(this._popoverElement);
-            if (this._popoverElement.parentNode) {
-                this._popoverElement.parentNode.removeChild(this._popoverElement);
-            }
-            clearInterval(this._timer);
-            delete this._popoverElement, this._timer;
-        }
-    }
-
     componentDidMount() {
-        this._renderPopover();
+        this._renderPopover(this.props.isOpen);
     }
 
     componentDidUpdate() {
-        this._renderPopover();
+        this._renderPopover(this.props.isOpen);
     }
 
     componentWillUnmount() {
@@ -81,7 +75,17 @@ export default class Popover extends Component {
             this._tether.destroy();
             delete this._tether;
         }
-        this._cleanupPopoverElement();
+        if (this._popoverElement) {
+            this._renderPopover(false);
+            setTimeout(() => {
+                ReactDOM.unmountComponentAtNode(this._popoverElement);
+                if (this._popoverElement.parentNode) {
+                    this._popoverElement.parentNode.removeChild(this._popoverElement);
+                }
+                clearInterval(this._timer);
+                delete this._popoverElement, this._timer;
+            }, POPOVER_TRANSITION_LEAVE);
+        }
     }
 
     handleDismissal(...args) {
@@ -159,22 +163,24 @@ export default class Popover extends Component {
         return best;
     }
 
-    _renderPopover() {
-        if (this.props.isOpen) {
-            // popover is open, lets do this!
-            const popoverElement = this._getPopoverElement();
-            ReactDOM.unstable_renderSubtreeIntoContainer(this,
-                <ReactCSSTransitionGroup
-                    transitionName="Popover"
-                    transitionAppear={true}
-                    transitionAppearTimeout={250}
-                    transitionEnterTimeout={250}
-                    transitionLeaveTimeout={250}
-                >
-                    {this._popoverComponent()}
-                </ReactCSSTransitionGroup>
-                , popoverElement);
-
+    _renderPopover(isOpen) {
+        // popover is open, lets do this!
+        const popoverElement = this._getPopoverElement();
+        ReactDOM.unstable_renderSubtreeIntoContainer(this,
+            <ReactCSSTransitionGroup
+                transitionName="Popover"
+                transitionAppear
+                transitionEnter
+                transitionLeave
+                transitionAppearTimeout={POPOVER_TRANSITION_ENTER}
+                transitionEnterTimeout={POPOVER_TRANSITION_ENTER}
+                transitionLeaveTimeout={POPOVER_TRANSITION_LEAVE}
+            >
+                { isOpen ? this._popoverComponent() : null }
+            </ReactCSSTransitionGroup>
+        , popoverElement);
+
+        if (isOpen) {
             var tetherOptions = {};
 
             tetherOptions.element = popoverElement;
@@ -261,9 +267,6 @@ export default class Popover extends Component {
                     }
                 }
             }
-        } else {
-            // if the popover isn't open then actively unmount our popover
-            this._cleanupPopoverElement();
         }
     }
 
@@ -277,11 +280,14 @@ export default class Popover extends Component {
  * Simply renders the popover body inline instead of mutating DOM root.
  */
 export const TestPopover = (props) =>
-    props.isOpen ?
+    (props.isOpen === undefined || props.isOpen) ?
         <div
             id={props.id}
             className={cx("TestPopover TestPopoverBody", props.className)}
             style={props.style}
+            // because popover is normally directly attached to body element, other elements should not need
+            // to care about clicks that happen inside the popover
+            onClick={ (e) => { e.stopPropagation(); } }
         >
             { typeof props.children === "function" ?
                 props.children()
@@ -289,4 +295,4 @@ export const TestPopover = (props) =>
                 props.children
             }
         </div>
-        : null
\ No newline at end of file
+        : null
diff --git a/frontend/src/metabase/components/Triggerable.jsx b/frontend/src/metabase/components/Triggerable.jsx
index 703c3460cb7ffb05d52d1458fad588fb962cbf5b..4da08c27d6193962ef62e09c2a9f8eb5fad3fef9 100644
--- a/frontend/src/metabase/components/Triggerable.jsx
+++ b/frontend/src/metabase/components/Triggerable.jsx
@@ -45,6 +45,11 @@ export default ComposedComponent => class extends Component {
         if (e && e.target && ReactDOM.findDOMNode(this.refs.trigger).contains(e.target)) {
             return;
         }
+
+        if (this.props.onClose) {
+            this.props.onClose(e)
+        }
+
         this.close();
     }
 
diff --git a/frontend/src/metabase/components/form/FormMessage.jsx b/frontend/src/metabase/components/form/FormMessage.jsx
index 09f2c4ca15ea682dd34c66a7f62f2484c2d55637..78dc438384c39edcc4655bd6987bda6e0bf63912 100644
--- a/frontend/src/metabase/components/form/FormMessage.jsx
+++ b/frontend/src/metabase/components/form/FormMessage.jsx
@@ -1,9 +1,10 @@
 import React, { Component } from "react";
 import cx from "classnames";
 
+export const SERVER_ERROR_MESSAGE = "Server error encountered";
+export const UNKNOWN_ERROR_MESSAGE = "Unknown error encountered";
 
 export default class FormMessage extends Component {
-
     render() {
         let { className, formError, formSuccess, message } = this.props;
 
@@ -12,9 +13,9 @@ export default class FormMessage extends Component {
                 if (formError.data && formError.data.message) {
                     message = formError.data.message;
                 } else if (formError.status >= 400) {
-                    message = "Server error encountered";
+                    message = SERVER_ERROR_MESSAGE;
                 } else {
-                    message = "Unknown error encountered";
+                    message = UNKNOWN_ERROR_MESSAGE;
                 }
             } else if (formSuccess && formSuccess.data.message) {
                 message = formSuccess.data.message;
diff --git a/frontend/src/metabase/containers/SaveQuestionModal.jsx b/frontend/src/metabase/containers/SaveQuestionModal.jsx
index eead67bd786821d92c6ade677651d1d2835ddaec..3b4cf1976e80bcd2d25523e4e72b144d2c352843 100644
--- a/frontend/src/metabase/containers/SaveQuestionModal.jsx
+++ b/frontend/src/metabase/containers/SaveQuestionModal.jsx
@@ -94,7 +94,9 @@ export default class SaveQuestionModal extends Component {
                 description: details.saveType === "overwrite" ?
                     originalCard.description :
                     details.description ? details.description.trim() : null,
-                collection_id: details.collection_id
+                collection_id: details.saveType === "overwrite" ?
+                    originalCard.collection_id :
+                    details.collection_id
             };
 
             if (details.saveType === "create") {
diff --git a/frontend/src/metabase/css/core/colors.css b/frontend/src/metabase/css/core/colors.css
index 35b6b8478f438cd0e3f9da051ac37d23bf72c349..d2bf5f4231df209796bc2d8512fb0f9b2992b052 100644
--- a/frontend/src/metabase/css/core/colors.css
+++ b/frontend/src/metabase/css/core/colors.css
@@ -21,7 +21,6 @@
   --gold-color: #F9D45C;
   --orange-color: #F9A354;
   --purple-color: #A989C5;
-  --purple-light-color: #C5ABDB;
   --green-color: #9CC177;
   --dark-color: #4C545B;
   --error-color: #EF8C8C;
@@ -46,11 +45,6 @@
     color: var(--brand-color);
 }
 
-.text-brand-saturated,
-.text-brand-saturated-hover:hover {
-    color: var(--brand-saturated-color);
-}
-
 .text-brand-darken,
 .text-brand-darken-hover:hover {
     color: color(var(--brand-color) shade(20%));
@@ -86,16 +80,6 @@
   background-color: #FCE8E8
 }
 
-/* heads up */
-
-.text-headsup {
-  color: var(--headsup-color);
-}
-
-.bg-headsup {
-  background-color: var(--headsup-color);
-}
-
 /* warning */
 
 .text-warning {
@@ -117,11 +101,6 @@
     color: var(--purple-color);
 }
 
-.text-purple-light,
-.text-purple-light-hover:hover {
-    color: var(--purple-light-color);
-}
-
 .text-green,
 .text-green-hover:hover {
     color: var(--green-color);
@@ -137,7 +116,6 @@
 
 .bg-gold { background-color: var(--gold-color); }
 .bg-purple { background-color: var(--purple-color); }
-.bg-purple-light { background-color: var(--purple-light-color); }
 .bg-green { background-color: var(--green-color); }
 
 /* alt */
diff --git a/frontend/src/metabase/css/core/link.css b/frontend/src/metabase/css/core/link.css
index e8c7b19254c691cef269bc5b9eba9158149fd2a3..53a073b2989fcf7f456374fff4e8048ee5648e82 100644
--- a/frontend/src/metabase/css/core/link.css
+++ b/frontend/src/metabase/css/core/link.css
@@ -7,6 +7,7 @@
 }
 
 .link {
+  cursor: pointer;
   text-decoration: none;
   color: var(--default-link-color);
 }
diff --git a/frontend/src/metabase/css/dashboard.css b/frontend/src/metabase/css/dashboard.css
index 87df84da7032f6a7ec0f6d48a14ad707fa2b5c58..2caa6e78974cfb6318b7868d9b8534968cac58c6 100644
--- a/frontend/src/metabase/css/dashboard.css
+++ b/frontend/src/metabase/css/dashboard.css
@@ -233,6 +233,7 @@
     bottom: 0;
     right: 0;
     cursor: nwse-resize;
+    z-index: 1; /* ensure the handle is above the card contents */
 }
 
 .Dash--editing .DashCard .react-resizable-handle:after {
diff --git a/frontend/src/metabase/css/index.css b/frontend/src/metabase/css/index.css
index 4c0d3ae572648a559b8a3b853d52e63b9bc5a93e..95ee56f34e5a58cfb35ebc7acac7d39ba1e5011a 100644
--- a/frontend/src/metabase/css/index.css
+++ b/frontend/src/metabase/css/index.css
@@ -9,7 +9,6 @@
 @import './components/icons.css';
 @import './components/list.css';
 @import './components/modal.css';
-@import './components/popover.css';
 @import './components/select.css';
 @import './components/table.css';
 
@@ -22,4 +21,3 @@
 @import './query_builder.css';
 @import './setup.css';
 @import './tutorial.css';
-
diff --git a/frontend/src/metabase/dashboard/dashboard.js b/frontend/src/metabase/dashboard/dashboard.js
index 24aebceb82f09b61cc71f0925de46df7259400ad..4b4d7c777e349e9fd7ef5751dc4388b5454d7397 100644
--- a/frontend/src/metabase/dashboard/dashboard.js
+++ b/frontend/src/metabase/dashboard/dashboard.js
@@ -369,8 +369,8 @@ export const fetchDashboard = createThunkAction(FETCH_DASHBOARD, function(dashId
                 .each((dbId) => dispatch(fetchDatabaseMetadata(dbId)));
         }
 
-        if (dashboard.param_values) {
-            dispatch(addParamValues(dashboard.param_values));
+        if (result.param_values) {
+            dispatch(addParamValues(result.param_values));
         }
 
         return {
@@ -516,9 +516,17 @@ export const navigateToNewCardFromDashboard = createThunkAction(
             const {dashboardId, dashboards, parameterValues} = getState().dashboard;
             const dashboard = dashboards[dashboardId];
             const cardIsDirty = !_.isEqual(previousCard.dataset_query, nextCard.dataset_query);
+            const cardAfterClick = getCardAfterVisualizationClick(nextCard, previousCard);
+
+            // clicking graph title with a filter applied loses display type and visualization settings; see #5278
+            const cardWithVizSettings = {
+                ...cardAfterClick,
+                display: cardAfterClick.display || previousCard.display,
+                visualization_settings: cardAfterClick.visualization_settings || previousCard.visualization_settings
+            }
 
             const url = questionUrlWithParameters(
-                getCardAfterVisualizationClick(nextCard, previousCard),
+                cardWithVizSettings,
                 metadata,
                 dashboard.parameters,
                 parameterValues,
diff --git a/frontend/src/metabase/icon_paths.js b/frontend/src/metabase/icon_paths.js
index 9c658befe881179fca8f13b1917ae7c912cafcaa..5a2fd4f245bad7d67eb954ea41942416d0197d93 100644
--- a/frontend/src/metabase/icon_paths.js
+++ b/frontend/src/metabase/icon_paths.js
@@ -53,6 +53,9 @@ export var ICON_PATHS = {
     },
     cursor_move: 'M14.8235294,14.8235294 L14.8235294,6.58823529 L17.1764706,6.58823529 L17.1764706,14.8235294 L25.4117647,14.8235294 L25.4117647,17.1764706 L17.1764706,17.1764706 L17.1764706,25.4117647 L14.8235294,25.4117647 L14.8235294,17.1764706 L6.58823529,17.1764706 L6.58823529,14.8235294 L14.8235294,14.8235294 L14.8235294,14.8235294 Z M16,0 L20.1176471,6.58823529 L11.8823529,6.58823529 L16,0 Z M11.8823529,25.4117647 L20.1176471,25.4117647 L16,32 L11.8823529,25.4117647 Z M32,16 L25.4117647,20.1176471 L25.4117647,11.8823529 L32,16 Z M6.58823529,11.8823529 L6.58823529,20.1176471 L0,16 L6.58823529,11.8823529 Z',
     cursor_resize: 'M17.4017952,6.81355995 L15.0488541,6.81355995 L15.0488541,25.6370894 L17.4017952,25.6370894 L17.4017952,6.81355995 Z M16.2253247,0.225324657 L20.3429717,6.81355995 L12.1076776,6.81355995 L16.2253247,0.225324657 Z M12.1076776,25.6370894 L20.3429717,25.6370894 L16.2253247,32.2253247 L12.1076776,25.6370894 Z',
+    costapproximate: 'M27 19a3 3 0 1 1 0-6 3 3 0 0 1 0 6zM16 8a3 3 0 1 1 0-6 3 3 0 0 1 0 6zm0 22a3 3 0 1 1 0-6 3 3 0 0 1 0 6zM5 19a3 3 0 1 1 0-6 3 3 0 0 1 0 6z',
+    costexact: 'M27 19a3 3 0 1 1 0-6 3 3 0 0 1 0 6zM16 8a3 3 0 1 1 0-6 3 3 0 0 1 0 6zm0 22a3 3 0 1 1 0-6 3 3 0 0 1 0 6zM5 19a3 3 0 1 1 0-6 3 3 0 0 1 0 6zm11 0a3 3 0 1 1 0-6 3 3 0 0 1 0 6z',
+    costextended: 'M27,19 C25.3431458,19 24,17.6568542 24,16 C24,14.3431458 25.3431458,13 27,13 C28.6568542,13 30,14.3431458 30,16 C30,17.6568542 28.6568542,19 27,19 Z M16,8 C14.3431458,8 13,6.65685425 13,5 C13,3.34314575 14.3431458,2 16,2 C17.6568542,2 19,3.34314575 19,5 C19,6.65685425 17.6568542,8 16,8 Z M16,30 C14.3431458,30 13,28.6568542 13,27 C13,25.3431458 14.3431458,24 16,24 C17.6568542,24 19,25.3431458 19,27 C19,28.6568542 17.6568542,30 16,30 Z M5,19 C3.34314575,19 2,17.6568542 2,16 C2,14.3431458 3.34314575,13 5,13 C6.65685425,13 8,14.3431458 8,16 C8,17.6568542 6.65685425,19 5,19 Z M16,19 C14.3431458,19 13,17.6568542 13,16 C13,14.3431458 14.3431458,13 16,13 C17.6568542,13 19,14.3431458 19,16 C19,17.6568542 17.6568542,19 16,19 Z M10,12 C8.8954305,12 8,11.1045695 8,10 C8,8.8954305 8.8954305,8 10,8 C11.1045695,8 12,8.8954305 12,10 C12,11.1045695 11.1045695,12 10,12 Z M22,12 C20.8954305,12 20,11.1045695 20,10 C20,8.8954305 20.8954305,8 22,8 C23.1045695,8 24,8.8954305 24,10 C24,11.1045695 23.1045695,12 22,12 Z M22,24 C20.8954305,24 20,23.1045695 20,22 C20,20.8954305 20.8954305,20 22,20 C23.1045695,20 24,20.8954305 24,22 C24,23.1045695 23.1045695,24 22,24 Z M10,24 C8.8954305,24 8,23.1045695 8,22 C8,20.8954305 8.8954305,20 10,20 C11.1045695,20 12,20.8954305 12,22 C12,23.1045695 11.1045695,24 10,24 Z',
     database: 'M1.18285296e-08,10.5127919 C-1.47856568e-08,7.95412848 1.18285298e-08,4.57337284 1.18285298e-08,4.57337284 C1.18285298e-08,4.57337284 1.58371041,5.75351864e-10 15.6571342,0 C29.730558,-5.7535027e-10 31.8900148,4.13849684 31.8900148,4.57337284 L31.8900148,10.4843058 C31.8900148,10.4843058 30.4448001,15.1365942 16.4659751,15.1365944 C2.48715012,15.1365947 2.14244494e-08,11.4353349 1.18285296e-08,10.5127919 Z M0.305419478,21.1290071 C0.305419478,21.1290071 0.0405133833,21.2033291 0.0405133833,21.8492606 L0.0405133833,27.3032816 C0.0405133833,27.3032816 1.46515486,31.941655 15.9641228,31.941655 C30.4630908,31.941655 32,27.3446712 32,27.3446712 C32,27.3446712 32,21.7986104 32,21.7986105 C32,21.2073557 31.6620557,21.0987647 31.6620557,21.0987647 C31.6620557,21.0987647 29.7146434,25.22314 16.0318829,25.22314 C2.34912233,25.22314 0.305419478,21.1290071 0.305419478,21.1290071 Z M0.305419478,12.656577 C0.305419478,12.656577 0.0405133833,12.730899 0.0405133833,13.3768305 L0.0405133833,18.8308514 C0.0405133833,18.8308514 1.46515486,23.4692249 15.9641228,23.4692249 C30.4630908,23.4692249 32,18.8722411 32,18.8722411 C32,18.8722411 32,13.3261803 32,13.3261803 C32,12.7349256 31.6620557,12.6263346 31.6620557,12.6263346 C31.6620557,12.6263346 29.7146434,16.7507099 16.0318829,16.7507099 C2.34912233,16.7507099 0.305419478,12.656577 0.305419478,12.656577 Z',
     dashboard: 'M32,29 L32,4 L32,0 L0,0 L0,8 L28,8 L28,28 L4,28 L4,8 L0,8 L0,29.5 L0,32 L32,32 L32,29 Z M7.27272727,18.9090909 L17.4545455,18.9090909 L17.4545455,23.2727273 L7.27272727,23.2727273 L7.27272727,18.9090909 Z M7.27272727,12.0909091 L24.7272727,12.0909091 L24.7272727,16.4545455 L7.27272727,16.4545455 L7.27272727,12.0909091 Z M20.3636364,18.9090909 L24.7272727,18.9090909 L24.7272727,23.2727273 L20.3636364,23.2727273 L20.3636364,18.9090909 Z',
     dashboards: 'M17,5.49100518 L17,10.5089948 C17,10.7801695 17.2276528,11 17.5096495,11 L26.4903505,11 C26.7718221,11 27,10.7721195 27,10.5089948 L27,5.49100518 C27,5.21983051 26.7723472,5 26.4903505,5 L17.5096495,5 C17.2281779,5 17,5.22788048 17,5.49100518 Z M18.5017326,14 C18.225722,14 18,13.77328 18,13.4982674 L18,26.5017326 C18,26.225722 18.22672,26 18.5017326,26 L5.49826741,26 C5.77427798,26 6,26.22672 6,26.5017326 L6,13.4982674 C6,13.774278 5.77327997,14 5.49826741,14 L18.5017326,14 Z M14.4903505,6 C14.2278953,6 14,5.78028538 14,5.49100518 L14,10.5089948 C14,10.2167107 14.2224208,10 14.4903505,10 L5.50964952,10 C5.77210473,10 6,10.2197146 6,10.5089948 L6,5.49100518 C6,5.78328929 5.77757924,6 5.50964952,6 L14.4903505,6 Z M26.5089948,22 C26.2251201,22 26,21.7774008 26,21.4910052 L26,26.5089948 C26,26.2251201 26.2225992,26 26.5089948,26 L21.4910052,26 C21.7748799,26 22,26.2225992 22,26.5089948 L22,21.4910052 C22,21.7748799 21.7774008,22 21.4910052,22 L26.5089948,22 Z M26.5089948,14 C26.2251201,14 26,13.7774008 26,13.4910052 L26,18.5089948 C26,18.2251201 26.2225992,18 26.5089948,18 L21.4910052,18 C21.7748799,18 22,18.2225992 22,18.5089948 L22,13.4910052 C22,13.7748799 21.7774008,14 21.4910052,14 L26.5089948,14 Z M26.4903505,6 C26.2278953,6 26,5.78028538 26,5.49100518 L26,10.5089948 C26,10.2167107 26.2224208,10 26.4903505,10 L17.5096495,10 C17.7721047,10 18,10.2197146 18,10.5089948 L18,5.49100518 C18,5.78328929 17.7775792,6 17.5096495,6 L26.4903505,6 Z M5,13.4982674 L5,26.5017326 C5,26.7769181 5.21990657,27 5.49826741,27 L18.5017326,27 C18.7769181,27 19,26.7800934 19,26.5017326 L19,13.4982674 C19,13.2230819 18.7800934,13 18.5017326,13 L5.49826741,13 C5.22308192,13 5,13.2199066 5,13.4982674 Z M5,5.49100518 L5,10.5089948 C5,10.7801695 5.22765279,11 5.50964952,11 L14.4903505,11 C14.7718221,11 15,10.7721195 15,10.5089948 L15,5.49100518 C15,5.21983051 14.7723472,5 14.4903505,5 L5.50964952,5 C5.22817786,5 5,5.22788048 5,5.49100518 Z M21,21.4910052 L21,26.5089948 C21,26.7801695 21.2278805,27 21.4910052,27 L26.5089948,27 C26.7801695,27 27,26.7721195 27,26.5089948 L27,21.4910052 C27,21.2198305 26.7721195,21 26.5089948,21 L21.4910052,21 C21.2198305,21 21,21.2278805 21,21.4910052 Z M21,13.4910052 L21,18.5089948 C21,18.7801695 21.2278805,19 21.4910052,19 L26.5089948,19 C26.7801695,19 27,18.7721195 27,18.5089948 L27,13.4910052 C27,13.2198305 26.7721195,13 26.5089948,13 L21.4910052,13 C21.2198305,13 21,13.2278805 21,13.4910052 Z',
diff --git a/frontend/src/metabase/lib/core.js b/frontend/src/metabase/lib/core.js
index 9ac2d007b51931f268e821cd02537ff7da52d39f..5d86dca90704e7707ada4e8b4fd9d7cfb4563828 100644
--- a/frontend/src/metabase/lib/core.js
+++ b/frontend/src/metabase/lib/core.js
@@ -39,6 +39,10 @@ export const field_special_types = [{
     'id': TYPE.Email,
     'name': 'Email',
     'section': 'Common'
+}, {
+    'id': TYPE.Enum,
+    'name': 'Enum',
+    'section': 'Common'
 }, {
     'id': TYPE.ImageURL,
     'name': 'Image URL',
diff --git a/frontend/src/metabase/lib/dataset.js b/frontend/src/metabase/lib/dataset.js
index c5e56bf62088ae03f3cf17a4f883669ad49a9fef..41faf1fcb145df73993e2849e6e308c92352539b 100644
--- a/frontend/src/metabase/lib/dataset.js
+++ b/frontend/src/metabase/lib/dataset.js
@@ -1,4 +1,19 @@
 import _ from "underscore";
 
+import type { Value, Column, DatasetData } from "metabase/meta/types/Dataset";
+
 // Many aggregations result in [[null]] if there are no rows to aggregate after filters
-export const datasetContainsNoResults = (data) => data.rows.length === 0 || _.isEqual(data.rows, [[null]])
+export const datasetContainsNoResults = (data: DatasetData): boolean =>
+    data.rows.length === 0 || _.isEqual(data.rows, [[null]]);
+
+/**
+ * @returns min and max for a value in a column
+ */
+export const rangeForValue = (
+    value: Value,
+    column: Column
+): ?[number, number] => {
+    if (column && column.binning_info && column.binning_info.bin_width) {
+        return [value, value + column.binning_info.bin_width];
+    }
+};
diff --git a/frontend/src/metabase/lib/formatting.js b/frontend/src/metabase/lib/formatting.js
index b1e978f32c361021d466a19fc19a7ac29a2d6c37..7f409d705c976a7423c0650e25ad438e6145a4f6 100644
--- a/frontend/src/metabase/lib/formatting.js
+++ b/frontend/src/metabase/lib/formatting.js
@@ -8,11 +8,15 @@ import React from "react";
 
 import ExternalLink from "metabase/components/ExternalLink.jsx";
 
-import { isDate, isNumber, isCoordinate } from "metabase/lib/schema_metadata";
+import { isDate, isNumber, isCoordinate, isLatitude, isLongitude } from "metabase/lib/schema_metadata";
 import { isa, TYPE } from "metabase/lib/types";
 import { parseTimestamp } from "metabase/lib/time";
+import { rangeForValue } from "metabase/lib/dataset";
+import { getFriendlyName } from "metabase/visualizations/lib/utils";
+import { decimalCount } from "metabase/visualizations/lib/numeric";
 
 import type { Column, Value } from "metabase/meta/types/Dataset";
+import type { Field } from "metabase/meta/types/Field";
 import type { DatetimeUnit } from "metabase/meta/types/Query";
 import type { Moment } from "metabase/meta/types";
 
@@ -29,6 +33,12 @@ const PRECISION_NUMBER_FORMATTER      = d3.format(".2r");
 const FIXED_NUMBER_FORMATTER          = d3.format(",.f");
 const FIXED_NUMBER_FORMATTER_NO_COMMA = d3.format(".f");
 const DECIMAL_DEGREES_FORMATTER       = d3.format(".08f");
+const BINNING_DEGREES_FORMATTER       = (value, binWidth) => {
+    return d3.format(`.0${decimalCount(binWidth)}f`)(value)
+}
+
+// use en dashes, for Maz
+const RANGE_SEPARATOR = ` – `;
 
 export function formatNumber(number: number, options: FormattingOptions = {}) {
     options = { comma: true, ...options};
@@ -60,6 +70,25 @@ export function formatNumber(number: number, options: FormattingOptions = {}) {
     }
 }
 
+export function formatCoordinate(value: number, options: FormattingOptions = {}) {
+    const binWidth = options.column && options.column.binning_info && options.column.binning_info.bin_width;
+    let direction = "";
+    if (isLatitude(options.column)) {
+        direction = " " + (value < 0 ? "S" : "N");
+        value = Math.abs(value);
+    } else if (isLongitude(options.column)) {
+        direction = " " + (value < 0 ? "W" : "E");
+        value = Math.abs(value);
+    }
+
+    const formattedValue = binWidth ? BINNING_DEGREES_FORMATTER(value, binWidth) : DECIMAL_DEGREES_FORMATTER(value)
+    return formattedValue + "°" + direction;
+}
+
+export function formatRange(range: [number, number], formatter: (value: number) => string, options: FormattingOptions = {}) {
+    return range.map(value => formatter(value, options)).join(` ${RANGE_SEPARATOR} `);
+}
+
 function formatMajorMinor(major, minor, options = {}) {
     options = {
         jsx: false,
@@ -89,18 +118,16 @@ export function formatTimeRangeWithUnit(value: Value, unit: DatetimeUnit, option
     // Tooltips should show full month name, but condense "MMMM D, YYYY - MMMM D, YYYY" to "MMMM D - D, YYYY" etc
     const monthFormat = options.type === "tooltip" ? "MMMM" : "MMM";
     const condensed = options.type === "tooltip";
-    // use en dashes, for Maz
-    const separator = ` – `;
 
     const start = m.clone().startOf(unit);
     const end = m.clone().endOf(unit);
     if (start.isValid() && end.isValid()) {
         if (!condensed || start.year() !== end.year()) {
-            return start.format(`${monthFormat} D, YYYY`) + separator + end.format(`${monthFormat} D, YYYY`);
+            return start.format(`${monthFormat} D, YYYY`) + RANGE_SEPARATOR + end.format(`${monthFormat} D, YYYY`);
         } else if (start.month() !== end.month()) {
-            return start.format(`${monthFormat} D`) + separator + end.format(`${monthFormat} D, YYYY`);
+            return start.format(`${monthFormat} D`) + RANGE_SEPARATOR + end.format(`${monthFormat} D, YYYY`);
         } else {
-            return start.format(`${monthFormat} D`) + separator + end.format(`D, YYYY`);
+            return start.format(`${monthFormat} D`) + RANGE_SEPARATOR + end.format(`D, YYYY`);
         }
     } else {
         return formatWeek(m, options);
@@ -199,11 +226,37 @@ function formatStringFallback(value: Value, options: FormattingOptions = {}) {
 
 export function formatValue(value: Value, options: FormattingOptions = {}) {
     let column = options.column;
+
     options = {
         jsx: false,
         comma: isNumber(column),
         ...options
     };
+
+    // "column" may also be a field object
+    // $FlowFixMe: remapping is a special field added by Visualization.jsx or getMetadata selector
+    if (column && column.remapping && column.remapping.size > 0) {
+        // $FlowFixMe
+        const remappedValueSample = column.remapping.values().next().value
+
+        // Even if the column only has a list of analyzed values without remappings, those values
+        // are keys in `remapping` array with value `undefined`
+        const hasSetRemappings = remappedValueSample !== undefined
+        if (hasSetRemappings) {
+            // $FlowFixMe
+            if (column.remapping.has(value)) {
+                // $FlowFixMe
+                return column.remapping.get(value);
+            }
+
+            const remappedValueIsString = typeof remappedValueSample
+            if (remappedValueIsString) {
+                // A simple way to hide intermediate ticks for a numeral value that has been remapped to a string
+                return null;
+            }
+        }
+    }
+
     if (value == undefined) {
         return null;
     } else if (column && isa(column.special_type, TYPE.URL)) {
@@ -217,10 +270,14 @@ export function formatValue(value: Value, options: FormattingOptions = {}) {
     } else if (typeof value === "string") {
         return formatStringFallback(value, options);
     } else if (typeof value === "number") {
-        if (isCoordinate(column)) {
-            return DECIMAL_DEGREES_FORMATTER(value);
+        const formatter = isCoordinate(column) ?
+            formatCoordinate :
+            formatNumber;
+        const range = rangeForValue(value, options.column);
+        if (range) {
+            return formatRange(range, formatter, options);
         } else {
-            return formatNumber(value, options);
+            return formatter(value, options);
         }
     } else if (typeof value === "object") {
         // no extra whitespace for table cells
@@ -230,6 +287,31 @@ export function formatValue(value: Value, options: FormattingOptions = {}) {
     }
 }
 
+export function formatColumn(column: Column): string {
+    if (!column) {
+        return "";
+    } else if (column.remapped_to_column != null) {
+        // $FlowFixMe: remapped_to_column is a special field added by Visualization.jsx
+        return formatColumn(column.remapped_to_column)
+    } else {
+        let columnTitle = getFriendlyName(column);
+        if (column.unit && column.unit !== "default") {
+            columnTitle += ": " + capitalize(column.unit.replace(/-/g, " "))
+        }
+        return columnTitle;
+    }
+}
+
+export function formatField(field: Field): string {
+    if (!field) {
+        return "";
+    } else if (field.dimensions && field.dimensions.name) {
+        return field.dimensions.name;
+    } else {
+        return field.display_name || field.name;
+    }
+}
+
 // $FlowFixMe
 export function singularize(...args) {
     return inflection.singularize(...args);
diff --git a/frontend/src/metabase/lib/query.js b/frontend/src/metabase/lib/query.js
index 89e40563b36d5b8a40f51c3dc1655d70495cff4a..804e8df02c34e7f5973be71ca1302f8cd9730859 100644
--- a/frontend/src/metabase/lib/query.js
+++ b/frontend/src/metabase/lib/query.js
@@ -7,12 +7,13 @@ import Utils from "metabase/lib/utils";
 import { getOperators } from "metabase/lib/schema_metadata";
 import { createLookupByProperty } from "metabase/lib/table";
 import { isFK, TYPE } from "metabase/lib/types";
-import { stripId } from "metabase/lib/formatting";
+import { stripId, formatField } from "metabase/lib/formatting";
 import { format as formatExpression } from "metabase/lib/expressions/formatter";
 
 import * as Table from "./query/table";
 
 import * as Q from "./query/query";
+import * as F from "./query/field";
 import { mbql, mbqlEq } from "./query/util";
 
 export const NEW_QUERY_TEMPLATES = {
@@ -323,6 +324,8 @@ var Query = {
         return Array.isArray(field) && mbqlEq(field[0], "datetime-field");
     },
 
+    isBinningStrategy: F.isBinningStrategy,
+
     isExpressionField(field) {
         return Array.isArray(field) && field.length === 2 && mbqlEq(field[0], "expression");
     },
@@ -371,6 +374,8 @@ var Query = {
             return Query.getFieldTargetId(field[2]);
         } else if (Query.isDatetimeField(field)) {
             return Query.getFieldTargetId(field[1]);
+        } else if (Query.isBinningStrategy(field)) {
+            return Query.getFieldTargetId(field[1]);
         } else if (Query.isFieldLiteral(field)) {
             return field;
         }
@@ -392,6 +397,8 @@ var Query = {
                 ...Query.getFieldTarget(field[1], tableDef, path),
                 unit: Query.getDatetimeUnit(field)
             };
+        } else if (Query.isBinningStrategy(field)) {
+            return Query.getFieldTarget(field[1], tableDef, path);
         } else if (Query.isExpressionField(field)) {
             // hmmm, since this is a dynamic field we'll need to build this here
             let fieldDef = {
@@ -435,7 +442,7 @@ var Query = {
     },
 
     getFieldPathName(fieldId, tableDef) {
-        return Query.getFieldPath(fieldId, tableDef).map(f => f && f.display_name).join(": ")
+        return Query.getFieldPath(fieldId, tableDef).map(formatField).join(": ")
     },
 
     getDatetimeUnit(field) {
diff --git a/frontend/src/metabase/lib/query/field.js b/frontend/src/metabase/lib/query/field.js
index ecf111155814c75064d803e054b277e6d4d5039b..266bfee8133a094c46561bd724000a983f87cc40 100644
--- a/frontend/src/metabase/lib/query/field.js
+++ b/frontend/src/metabase/lib/query/field.js
@@ -2,7 +2,7 @@
 import { mbqlEq } from "./util";
 
 import type { Field as FieldReference } from "metabase/meta/types/Query";
-import type { Field, FieldId } from "metabase/meta/types/Field";
+import type { Field, FieldId, FieldValues } from "metabase/meta/types/Field";
 
 // gets the target field ID (recursively) from any type of field, including raw field ID, fk->, and datetime-field cast.
 export function getFieldTargetId(field: FieldReference): ?FieldId {
@@ -18,6 +18,9 @@ export function getFieldTargetId(field: FieldReference): ?FieldId {
     } else if (isDatetimeField(field)) {
         // $FlowFixMe
         return getFieldTargetId(field[1]);
+    } else if (isBinningStrategy(field)) {
+        // $FlowFixMe
+        return getFieldTargetId(field[1]);
     } else if (isFieldLiteral(field)) {
         return field;
     }
@@ -40,6 +43,10 @@ export function isDatetimeField(field: FieldReference): boolean {
     return Array.isArray(field) && mbqlEq(field[0], "datetime-field");
 }
 
+export function isBinningStrategy(field: FieldReference): boolean {
+    return Array.isArray(field) && mbqlEq(field[0], "binning-strategy");
+}
+
 export function isFieldLiteral(field: FieldReference): boolean {
     return Array.isArray(field) && field.length === 3 && mbqlEq(field[0], "field-literal");
 }
@@ -52,15 +59,38 @@ export function isAggregateField(field: FieldReference): boolean {
     return Array.isArray(field) && mbqlEq(field[0], "aggregation");
 }
 
+import _ from "underscore";
+
 // Metadata field "values" type is inconsistent
 // https://github.com/metabase/metabase/issues/3417
-export function getFieldValues(field: ?Field): any[] {
+export function getFieldValues(field: ?Field): FieldValues {
     const values = field && field.values;
     if (Array.isArray(values)) {
-        return values;
+        if (values.length === 0 || Array.isArray(values[0])) {
+            return values;
+        } else {
+            // console.warn("deprecated field values array!", values);
+            return values.map(value => [value]);
+        }
     } else if (values && Array.isArray(values.values)) {
-        return values.values;
+        // console.warn("deprecated field values object!", values);
+
+        if (Array.isArray(values.human_readable_values)) {
+            return _.zip(values.values, values.human_readable_values || {});
+        } else if (Array.isArray(values.values)) {
+            // TODO Atte Keinänen 7/12/17: I don't honestly know why we can have a field in `values` property.
+            return getFieldValues(values);
+        } else {
+            // console.warn("missing field values", field);
+            return [];
+        }
     } else {
+        // console.warn("missing field values", field);
         return [];
     }
 }
+
+export function getHumanReadableValue(value: Value, fieldValues?: FieldValues = []) {
+    const fieldValue = _.findWhere(fieldValues, { [0]: value });
+    return fieldValue && fieldValue.length === 2 ? fieldValue[1] : String(value);
+}
diff --git a/frontend/src/metabase/lib/schema_metadata.js b/frontend/src/metabase/lib/schema_metadata.js
index 75b6f9376c52f2d890fdf367377b59d9eaeef73c..ca632802a2eb9f951e29c53810211ad122567ef5 100644
--- a/frontend/src/metabase/lib/schema_metadata.js
+++ b/frontend/src/metabase/lib/schema_metadata.js
@@ -1,6 +1,7 @@
 import _ from "underscore";
 
 import { isa, isFK as isTypeFK, isPK as isTypePK, TYPE } from "metabase/lib/types";
+import { getFieldValues, getHumanReadableValue } from "metabase/lib/query/field";
 
 // primary field types used for picking operators, etc
 export const NUMBER = "NUMBER";
@@ -125,6 +126,8 @@ export const isCoordinate   = (field) => isa(field && field.special_type, TYPE.C
 export const isLatitude     = (field) => isa(field && field.special_type, TYPE.Latitude);
 export const isLongitude    = (field) => isa(field && field.special_type, TYPE.Longitude);
 
+export const isID           = (field) => isFK(field) || isPK(field);
+
 // operator argument constructors:
 
 function freeformArgument(field, table) {
@@ -171,18 +174,17 @@ function equivalentArgument(field, table) {
     }
 
     if (isCategory(field)) {
-        if (table.field_values && field.id in table.field_values && table.field_values[field.id].length > 0) {
-            let validValues = [...table.field_values[field.id]];
-            // this sort function works for both numbers and strings:
-            validValues.sort((a, b) => a === b ? 0 : (a < b ? -1 : 1));
+        const values = getFieldValues(field)
+        if (values && values.length > 0) {
             return {
                 type: "select",
-                values: validValues
-                    .filter(value => value != null)
-                    .map(value => ({
+                values: values
+                    .filter(([value, displayValue]) => value != null)
+                    .map(([value, displayValue]) => ({
                         key: value,
-                        name: value
+                        name: getHumanReadableValue(value, values)
                     }))
+                    .sort((a, b) => a.key === b.key ? 0 : (a.key < b.key ? -1 : 1))
             };
         }
     }
@@ -478,6 +480,9 @@ export function getAggregator(short) {
     return _.findWhere(Aggregators, { short: short });
 }
 
+export const isCompatibleAggregatorForField = (aggregator, field) =>
+    aggregator.validFieldsFilters.every(filter => filter([field]).length === 1)
+
 export function getBreakouts(fields) {
     var result = populateFields(BreakoutAggregator, fields);
     result.fields = result.fields[0];
diff --git a/frontend/src/metabase/meta/types/Dataset.js b/frontend/src/metabase/meta/types/Dataset.js
index e7de3b29a88d1343b6e22525097c99a77608bf17..71637bdf852c860a2108a53ca957927ca339d3a9 100644
--- a/frontend/src/metabase/meta/types/Dataset.js
+++ b/frontend/src/metabase/meta/types/Dataset.js
@@ -7,6 +7,10 @@ import type { DatetimeUnit } from "./Query";
 
 export type ColumnName = string;
 
+export type BinningInfo = {
+    bin_width: number
+}
+
 // TODO: incomplete
 export type Column = {
     id: ?FieldId,
@@ -15,7 +19,8 @@ export type Column = {
     base_type: string,
     special_type: ?string,
     source?: "fields"|"aggregation"|"breakout",
-    unit?: DatetimeUnit
+    unit?: DatetimeUnit,
+    binning_info?: BinningInfo
 };
 
 export type Value = string|number|ISO8601Time|boolean|null|{};
diff --git a/frontend/src/metabase/meta/types/Field.js b/frontend/src/metabase/meta/types/Field.js
index f513dd18e92076e9c58afb12d9e64952ad19896d..53b48f640f716852d39e830818eefa9a5d166b19 100644
--- a/frontend/src/metabase/meta/types/Field.js
+++ b/frontend/src/metabase/meta/types/Field.js
@@ -2,6 +2,7 @@
 
 import type { ISO8601Time } from ".";
 import type { TableId } from "./Table";
+import type { Value } from "./Dataset";
 
 export type FieldId = number;
 
@@ -40,12 +41,16 @@ export type Field = {
     created_at:         ISO8601Time,
     updated_at:         ISO8601Time,
 
-    // Metadata field "values" type is inconsistent
-    // https://github.com/metabase/metabase/issues/3417
-    values: [] | FieldValues
+    values?:            FieldValues,
+    dimensions?:        FieldDimension
 };
 
-export type FieldValues = {
-    // incomplete
-    values: Array<string> | {}
+export type RawFieldValue = Value;
+export type HumanReadableFieldValue = string;
+
+export type FieldValue = [RawFieldValue] | [RawFieldValue, HumanReadableFieldValue];
+export type FieldValues = FieldValue[];
+
+export type FieldDimension = {
+    name: string
 }
diff --git a/frontend/src/metabase/meta/types/Query.js b/frontend/src/metabase/meta/types/Query.js
index 7be2639c27d0100bf081b69f12add8ce9ac05966..3b04f3d98de3a57394d259bd4a8f1460b8083eda 100644
--- a/frontend/src/metabase/meta/types/Query.js
+++ b/frontend/src/metabase/meta/types/Query.js
@@ -144,7 +144,8 @@ export type ConcreteField =
     LocalFieldReference |
     ForeignFieldReference |
     ExpressionReference |
-    DatetimeField;
+    DatetimeField |
+    BinnedField;
 
 export type LocalFieldReference =
     ["field-id", FieldId] |
@@ -163,6 +164,11 @@ export type DatetimeField =
     ["datetime-field", LocalFieldReference | ForeignFieldReference, DatetimeUnit] |
     ["datetime-field", LocalFieldReference | ForeignFieldReference, "as", DatetimeUnit]; // @deprecated: don't include the "as" element
 
+export type BinnedField =
+    ["binning-strategy", LocalFieldReference | ForeignFieldReference, "default"] | // default binning (as defined by backend)
+    ["binning-strategy", LocalFieldReference | ForeignFieldReference, "num-bins", number] | // number of bins
+    ["binning-strategy", LocalFieldReference | ForeignFieldReference, "bin-width", number]; // width of each bin
+
 export type AggregateField = ["aggregation", number];
 
 
diff --git a/frontend/src/metabase/meta/types/Segment.js b/frontend/src/metabase/meta/types/Segment.js
index 1074de8ab6c540e924e927d5ed01fb5effa2262d..37d1101ad7430491e1f41b97dd1c1b684d014a64 100644
--- a/frontend/src/metabase/meta/types/Segment.js
+++ b/frontend/src/metabase/meta/types/Segment.js
@@ -9,5 +9,6 @@ export type Segment = {
     name: string,
     id: SegmentId,
     table_id: TableId,
-    is_active: bool
+    is_active: bool,
+    description: string
 };
diff --git a/frontend/src/metabase/meta/types/Table.js b/frontend/src/metabase/meta/types/Table.js
index f436ad1c4dcd6ee07b1b6d260bcb7f46ac07dd3f..4b2be18cf8ce0962ec7818d41f83995db360dec6 100644
--- a/frontend/src/metabase/meta/types/Table.js
+++ b/frontend/src/metabase/meta/types/Table.js
@@ -1,7 +1,7 @@
 
 import type { ISO8601Time } from ".";
 
-import type { Field, FieldId } from "./Field";
+import type { Field } from "./Field";
 import type { Segment } from "./Segment";
 import type { Metric } from "./Metric";
 import type { DatabaseId } from "./Database";
@@ -11,11 +11,6 @@ export type SchemaName = string;
 
 type TableVisibilityType = string; // FIXME
 
-type FieldValue = any;
-type FieldValues = {
-    [id: FieldId]: FieldValue[]
-}
-
 // TODO: incomplete
 export type Table = {
     id:                      TableId,
@@ -37,8 +32,6 @@ export type Table = {
     segments:                Segment[],
     metrics:                 Metric[],
 
-    field_values:            FieldValues,
-
     rows:                    number,
 
     caveats:                 ?string,
diff --git a/frontend/src/metabase/meta/types/Visualization.js b/frontend/src/metabase/meta/types/Visualization.js
index 3e939d2de50bad825997db8f681bfb3d8e166647..a15f5ad0243ccee7dd10b45feb86d80ff1193fb0 100644
--- a/frontend/src/metabase/meta/types/Visualization.js
+++ b/frontend/src/metabase/meta/types/Visualization.js
@@ -2,6 +2,8 @@
 
 import type { DatasetData, Column } from "metabase/meta/types/Dataset";
 import type { Card, VisualizationSettings } from "metabase/meta/types/Card";
+import type { TableMetadata } from "metabase/meta/types/Metadata";
+import type { Field, FieldId } from "metabase/meta/types/Field";
 import Question from "metabase-lib/lib/Question";
 
 export type ActionCreator = (props: ClickActionProps) => ClickAction[]
@@ -89,5 +91,27 @@ export type VisualizationProps = {
     visualizationIsClickable: (?ClickObject) => boolean,
     onChangeCardAndRun: OnChangeCardAndRun,
 
-    onUpdateVisualizationSettings: ({ [key: string]: any }) => void
+    onUpdateVisualizationSettings: ({ [key: string]: any }) => void,
+
+    // object detail
+    tableMetadata: ?TableMetadata,
+    tableForeignKeys: ?ForeignKey[],
+    tableForeignKeyReferences: { [id: ForeignKeyId]: ForeignKeyCountInfo },
+    loadObjectDetailFKReferences: () => void,
+    followForeignKey: (fk: any) => void,
+}
+
+type ForeignKeyId = number;
+type ForeignKey = {
+    id: ForeignKeyId,
+    relationship: string,
+    origin: Field,
+    origin_id: FieldId,
+    destination: Field,
+    destination_id: FieldId,
 }
+
+type ForeignKeyCountInfo = {
+    status: number,
+    value: number,
+};
diff --git a/frontend/src/metabase/parameters/components/ParameterValueWidget.jsx b/frontend/src/metabase/parameters/components/ParameterValueWidget.jsx
index 6c9a4ba76db65b32cdccb8368310042c872ddec1..8ab0733679ce63b76cc13566999d748c895947b6 100644
--- a/frontend/src/metabase/parameters/components/ParameterValueWidget.jsx
+++ b/frontend/src/metabase/parameters/components/ParameterValueWidget.jsx
@@ -19,8 +19,9 @@ import TextWidget from "./widgets/TextWidget.jsx";
 import S from "./ParameterWidget.css";
 
 import cx from "classnames";
+import _ from "underscore"
 
-const WIDGETS = {
+const DATE_WIDGETS = {
     "date/single": DateSingleWidget,
     "date/range": DateRangeWidget,
     "date/relative": DateRelativeWidget,
@@ -67,10 +68,10 @@ export default class ParameterValueWidget extends Component {
     };
 
     static getWidget(parameter, values) {
-        if (values && values.length > 0) {
+        if (DATE_WIDGETS[parameter.type]) {
+            return DATE_WIDGETS[parameter.type];
+        } else if (values && values.length > 0) {
             return CategoryWidget;
-        } else if (WIDGETS[parameter.type]) {
-            return WIDGETS[parameter.type];
         } else {
             return TextWidget;
         }
@@ -86,7 +87,11 @@ export default class ParameterValueWidget extends Component {
     state = { isFocused: false };
 
     componentWillMount() {
-        this.updateFieldValues(this.props);
+        // In public dashboards we receive field values before mounting this component and
+        // without need to call `fetchFieldValues` separately
+        if (_.isEmpty(this.props.values)) {
+            this.updateFieldValues(this.props);
+        }
     }
 
     componentWillReceiveProps(nextProps) {
@@ -161,7 +166,7 @@ export default class ParameterValueWidget extends Component {
                     triggerElement={
                     <div ref="trigger" className={cx(S.parameter, className, { [S.selected]: hasValue })}>
                         { getParameterTypeIcon() }
-                        <div className="mr1 text-nowrap">{ hasValue ? Widget.format(value) : placeholderText }</div>
+                        <div className="mr1 text-nowrap">{ hasValue ? Widget.format(value, values) : placeholderText }</div>
                         { getWidgetStatusIcon() }
                     </div>
                 }
diff --git a/frontend/src/metabase/parameters/components/widgets/CategoryWidget.jsx b/frontend/src/metabase/parameters/components/widgets/CategoryWidget.jsx
index cd0ed8611693f42a081f469c89fc404fdbcbb814..3f07a25ce6b1e34b35cc706a685b5a7919cdd72b 100644
--- a/frontend/src/metabase/parameters/components/widgets/CategoryWidget.jsx
+++ b/frontend/src/metabase/parameters/components/widgets/CategoryWidget.jsx
@@ -6,8 +6,12 @@ import PropTypes from "prop-types";
 
 import { createMultiwordSearchRegex } from "metabase/lib/string";
 
+import { getHumanReadableValue } from "metabase/lib/query/field";
+
 import ListSearchField from "metabase/components/ListSearchField.jsx";
 
+import cx from "classnames";
+
 type Props = {
     value: any,
     values: any[],
@@ -52,19 +56,19 @@ export default class CategoryWidget extends Component {
         });
     }
 
-    static format(value) {
-        return value;
+    static format(value, fieldValues) {
+        return getHumanReadableValue(value, fieldValues);
     }
 
     render() {
-        let { values, setValue, onClose } = this.props;
+        let { value, values, setValue, onClose } = this.props;
 
         let filteredValues = [];
         let regex = this.state.searchRegex;
 
         if (regex) {
             for (const value of values) {
-                if (regex.test(value)) {
+                if (regex.test(value[0]) || regex.test(value[1])) {
                     filteredValues.push(value);
                 }
             }
@@ -85,13 +89,15 @@ export default class CategoryWidget extends Component {
                   </div>
                 }
                 <ul className="scroll-y scroll-show" style={{ maxHeight: 300 }}>
-                    {filteredValues.map(value =>
+                    {filteredValues.map(([rawValue, humanReadableValue]) =>
                         <li
-                            key={value}
-                            className="px2 py1 bg-brand-hover text-white-hover cursor-pointer"
-                            onClick={() => { setValue(value); onClose(); }}
+                            key={rawValue}
+                            className={cx("px2 py1 bg-brand-hover text-white-hover cursor-pointer", {
+                                "text-white bg-brand": rawValue === value
+                            })}
+                            onClick={() => { setValue(rawValue); onClose(); }}
                         >
-                            {value}
+                            {humanReadableValue || String(rawValue)}
                         </li>
                      )}
                 </ul>
diff --git a/frontend/src/metabase/parameters/components/widgets/TextWidget.jsx b/frontend/src/metabase/parameters/components/widgets/TextWidget.jsx
index 132cbd1bac992129c37730f0b117f5d31164fd40..969d89bbf89f32fa121a6feca897a6f41ae4b9b0 100644
--- a/frontend/src/metabase/parameters/components/widgets/TextWidget.jsx
+++ b/frontend/src/metabase/parameters/components/widgets/TextWidget.jsx
@@ -1,6 +1,8 @@
 /* eslint "react/prop-types": "warn" */
 import React, { Component } from "react";
+import ReactDOM from "react-dom";
 import PropTypes from "prop-types";
+import {forceRedraw} from "metabase/lib/dom";
 
 import { KEYCODE_ENTER, KEYCODE_ESCAPE } from "metabase/lib/keyboard";
 
@@ -33,7 +35,10 @@ export default class TextWidget extends Component {
 
     componentWillReceiveProps(nextProps) {
         if (this.props.value !== nextProps.value) {
-            this.setState({ value: nextProps.value });
+            this.setState({ value: nextProps.value }, () => {
+                // HACK: Address Safari rendering bug which causes https://github.com/metabase/metabase/issues/5335
+                forceRedraw(ReactDOM.findDOMNode(this));
+            });
         }
     }
 
diff --git a/frontend/src/metabase/pulse/components/CardPicker.jsx b/frontend/src/metabase/pulse/components/CardPicker.jsx
index 7d5d883862ed863853644fa679a9ca4893824539..ae4aadbe879448e064dfea9e2140ed4d8b693e08 100644
--- a/frontend/src/metabase/pulse/components/CardPicker.jsx
+++ b/frontend/src/metabase/pulse/components/CardPicker.jsx
@@ -10,18 +10,13 @@ import Query from "metabase/lib/query";
 import _ from "underscore";
 
 export default class CardPicker extends Component {
-    constructor(props, context) {
-        super(props, context);
-
-        this.state = {
-            isOpen: false,
-            inputValue: "",
-            inputWidth: 300,
-            collectionId: undefined,
-        };
+    state = {
+        isOpen: false,
+        inputValue: "",
+        inputWidth: 300,
+        collectionId: undefined,
+    };
 
-        _.bindAll(this, "onChange", "onInputChange", "onInputFocus", "onInputBlur");
-    }
 
     static propTypes = {
         cardList: PropTypes.array.isRequired,
@@ -32,15 +27,15 @@ export default class CardPicker extends Component {
         clearTimeout(this._timer);
     }
 
-    onInputChange(e) {
-        this.setState({ inputValue: e.target.value });
+    onInputChange = ({target}) => {
+        this.setState({ inputValue: target.value });
     }
 
-    onInputFocus() {
+    onInputFocus = () => {
         this.setState({ isOpen: true });
     }
 
-    onInputBlur() {
+    onInputBlur = () => {
         // Without a timeout here isOpen gets set to false when an item is clicked
         // which causes the click handler to not fire. For some reason this even
         // happens with a 100ms delay, but not 200ms?
@@ -54,7 +49,7 @@ export default class CardPicker extends Component {
         }, 250);
     }
 
-    onChange(id) {
+    onChange = (id) => {
         this.props.onChange(id);
         ReactDOM.findDOMNode(this.refs.input).blur();
     }
@@ -107,10 +102,11 @@ export default class CardPicker extends Component {
             .uniq(c => c && c.id)
             .filter(c => c)
             .sortBy("name")
+            // add "Everything else" as the last option for cards without a
+            // collection
+            .concat([{ id: null, name: "Everything else"}])
             .value();
 
-        collections.unshift({ id: null, name: "None" });
-
         let visibleCardList;
         if (inputValue) {
             let searchString = inputValue.toLowerCase();
@@ -162,11 +158,11 @@ export default class CardPicker extends Component {
                         </ul>
                     : collections ?
                         <CollectionList>
-                        {collections.map(collection =>
-                            <CollectionListItem collection={collection} onClick={(e) => {
-                                this.setState({ collectionId: collection.id, isClicking: true });
-                            }}/>
-                        )}
+                            {collections.map(collection =>
+                                <CollectionListItem collection={collection} onClick={(e) => {
+                                    this.setState({ collectionId: collection.id, isClicking: true });
+                                }}/>
+                            )}
                         </CollectionList>
                     : null }
                     </div>
diff --git a/frontend/src/metabase/pulse/components/PulseEditChannels.jsx b/frontend/src/metabase/pulse/components/PulseEditChannels.jsx
index 56363d46a080273da1a2387371a4a5c073569ebc..12aded69f39a92ed97f1ef125977bf08a6ff49b4 100644
--- a/frontend/src/metabase/pulse/components/PulseEditChannels.jsx
+++ b/frontend/src/metabase/pulse/components/PulseEditChannels.jsx
@@ -1,7 +1,6 @@
 /* eslint "react/prop-types": "warn" */
 import React, { Component } from "react";
 import PropTypes from "prop-types";
-
 import _ from "underscore";
 import { assoc, assocIn } from "icepick";
 
@@ -229,7 +228,7 @@ export default class PulseEditChannels extends Component {
                     <ul className="bg-grey-0 px3">{channels}</ul>
                 : channels.length > 0 && !channelSpec.configured ?
                     <div className="p4 text-centered">
-                        <h3>{channelSpec.name} needs to be set up by an administrator.</h3>
+                        <h3 className="mb2">{channelSpec.name} needs to be set up by an administrator.</h3>
                         <SetupMessage user={user} channels={[channelSpec.name]} />
                     </div>
                 : null
diff --git a/frontend/src/metabase/pulse/components/SetupMessage.jsx b/frontend/src/metabase/pulse/components/SetupMessage.jsx
index 416321fdfc6459670743075f335b6d0320e7c847..6b50514798e3baa15e1cdddff96c28bbe9d0636f 100644
--- a/frontend/src/metabase/pulse/components/SetupMessage.jsx
+++ b/frontend/src/metabase/pulse/components/SetupMessage.jsx
@@ -20,12 +20,10 @@ export default class SetupMessage extends Component {
         let content;
         if (user.is_superuser) {
             content = (
-                <div className="flex flex-column">
-                    <div className="ml-auto">
-                        {channels.map(c =>
-                            <Link to={"/admin/settings/"+c.toLowerCase()} key={c.toLowerCase()} className="Button Button--primary mr1" target={window.OSX ? null : "_blank"}>Configure {c}</Link>
-                        )}
-                    </div>
+                <div>
+                    {channels.map(c =>
+                        <Link to={"/admin/settings/"+c.toLowerCase()} key={c.toLowerCase()} className="Button Button--primary mr1" target={window.OSX ? null : "_blank"}>Configure {c}</Link>
+                    )}
                 </div>
             );
 
@@ -38,10 +36,6 @@ export default class SetupMessage extends Component {
                 </div>
             );
         }
-        return (
-            <div className="mx4 mb4">
-                {content}
-            </div>
-        );
+        return content;
     }
 }
diff --git a/frontend/src/metabase/pulse/components/SetupModal.jsx b/frontend/src/metabase/pulse/components/SetupModal.jsx
index c0a7edbf93a14aa7291ec26b38ed63968979d008..a32f78165fdb35a28f19bd46bf01fa4cf4e72ab8 100644
--- a/frontend/src/metabase/pulse/components/SetupModal.jsx
+++ b/frontend/src/metabase/pulse/components/SetupModal.jsx
@@ -17,7 +17,9 @@ export default class SetupModal extends Component {
                 onClose={this.props.onClose}
                 title={`To send pulses, ${ this.props.user.is_superuser ? "you'll need" : "an admin needs"} to set up email or Slack integration.`}
             >
-                <SetupMessage user={this.props.user} />
+                <div className="ml-auto mb4 mr4">
+                    <SetupMessage user={this.props.user} />
+                </div>
             </ModalContent>
         );
     }
diff --git a/frontend/src/metabase/qb/components/__support__/fixtures.js b/frontend/src/metabase/qb/components/__support__/fixtures.js
new file mode 100644
index 0000000000000000000000000000000000000000..d2ce186ac283ba61806ae3499ae71586274806dd
--- /dev/null
+++ b/frontend/src/metabase/qb/components/__support__/fixtures.js
@@ -0,0 +1,130 @@
+/* eslint-disable flowtype/require-valid-file-annotation */
+
+import { TYPE } from "metabase/lib/types";
+
+const FLOAT_FIELD = {
+    id: 1,
+    display_name: "Mock Float Field",
+    base_type: TYPE.Float
+};
+
+const CATEGORY_FIELD = {
+    id: 2,
+    display_name: "Mock Category Field",
+    base_type: TYPE.Text,
+    special_type: TYPE.Category
+};
+
+const DATE_FIELD = {
+    id: 3,
+    display_name: "Mock Date Field",
+    base_type: TYPE.DateTime
+};
+
+const PK_FIELD = {
+    id: 4,
+    display_name: "Mock PK Field",
+    base_type: TYPE.Integer,
+    special_type: TYPE.PK
+};
+
+const foreignTableMetadata = {
+    id: 20,
+    db_id: 100,
+    display_name: "Mock Foreign Table",
+    fields: []
+};
+
+const FK_FIELD = {
+    id: 5,
+    display_name: "Mock FK Field",
+    base_type: TYPE.Integer,
+    special_type: TYPE.FK,
+    target: {
+        id: 25,
+        table_id: foreignTableMetadata.id,
+        table: foreignTableMetadata
+    }
+};
+
+export const tableMetadata = {
+    id: 10,
+    db_id: 100,
+    display_name: "Mock Table",
+    fields: [FLOAT_FIELD, CATEGORY_FIELD, DATE_FIELD, PK_FIELD, FK_FIELD]
+};
+
+export const card = {
+    dataset_query: {
+        type: "query",
+        query: {
+            source_table: 10
+        }
+    }
+};
+
+export const nativeCard = {
+    dataset_query: {
+        type: "native",
+        native: {
+            query: "SELECT count(*) from ORDERS"
+        }
+    }
+};
+
+export const savedCard = {
+    id: 1,
+    dataset_query: {
+        type: "query",
+        query: {
+            source_table: 10
+        }
+    }
+};
+export const savedNativeCard = {
+    id: 2,
+    dataset_query: {
+        type: "native",
+        native: {
+            query: "SELECT count(*) from ORDERS"
+        }
+    }
+};
+
+export const clickedFloatHeader = {
+    column: {
+        ...FLOAT_FIELD,
+        source: "fields"
+    }
+};
+
+export const clickedCategoryHeader = {
+    column: {
+        ...CATEGORY_FIELD,
+        source: "fields"
+    }
+};
+
+export const clickedFloatValue = {
+    column: {
+        ...CATEGORY_FIELD,
+        source: "fields"
+    },
+    value: 1234
+};
+
+export const clickedPKValue = {
+    column: {
+        ...PK_FIELD,
+        source: "fields"
+    },
+    value: 42
+};
+
+export const clickedFKValue = {
+    column: {
+        ...FK_FIELD,
+        source: "fields"
+    },
+    value: 43
+};
diff --git a/frontend/src/metabase/qb/components/actions/CompoundQueryAction.jsx b/frontend/src/metabase/qb/components/actions/CompoundQueryAction.jsx
new file mode 100644
index 0000000000000000000000000000000000000000..3125a354de3dee72eb6cf514d43146d6ebea5e5a
--- /dev/null
+++ b/frontend/src/metabase/qb/components/actions/CompoundQueryAction.jsx
@@ -0,0 +1,21 @@
+/* @flow */
+
+import type {
+    ClickAction,
+    ClickActionProps
+} from "metabase/meta/types/Visualization";
+
+export default ({ question }: ClickActionProps): ClickAction[] => {
+    console.log(question);
+    if (question.id()) {
+        return [
+            {
+                name: "nest-query",
+                title: "Analyze the results of this Query",
+                icon: "table",
+                question: () => question.composeThisQuery()
+            }
+        ];
+    }
+    return [];
+};
diff --git a/frontend/src/metabase/qb/components/actions/PivotByAction.jsx b/frontend/src/metabase/qb/components/actions/PivotByAction.jsx
index 6f79186c1216f550e4969cb31b54393f1aece064..20f3d8e7b2c14efba23b59a5a6a2a6861c4b0d49 100644
--- a/frontend/src/metabase/qb/components/actions/PivotByAction.jsx
+++ b/frontend/src/metabase/qb/components/actions/PivotByAction.jsx
@@ -66,7 +66,7 @@ export default (name: string, icon: string, fieldFilter: FieldFilter) =>
                         fieldOptions={breakoutOptions}
                         onCommitBreakout={breakout => {
                             const nextCard = question
-                                .pivot(breakout, dimensions)
+                                .pivot([breakout], dimensions)
                                 .card();
 
                             if (nextCard) {
diff --git a/frontend/src/metabase/qb/components/actions/SummarizeBySegmentMetricAction.jsx b/frontend/src/metabase/qb/components/actions/SummarizeBySegmentMetricAction.jsx
index a2fa9973ae7ee27e51f557bed3cda85f6d30d850..d94f101415c3fcb061cce4845d64e6270a1dc0dc 100644
--- a/frontend/src/metabase/qb/components/actions/SummarizeBySegmentMetricAction.jsx
+++ b/frontend/src/metabase/qb/components/actions/SummarizeBySegmentMetricAction.jsx
@@ -12,6 +12,16 @@ import type {
 } from "metabase/meta/types/Visualization";
 import type { TableMetadata } from "metabase/meta/types/Metadata";
 
+const omittedAggregations = ["rows", "cum_sum", "cum_count", "stddev"];
+const getAggregationOptionsForSummarize = query => {
+    return query
+        .table()
+        .aggregations()
+        .filter(
+            aggregation => !omittedAggregations.includes(aggregation.short)
+        );
+};
+
 export default ({ question }: ClickActionProps): ClickAction[] => {
     const query = question.query();
     if (!(query instanceof StructuredQuery)) {
@@ -33,7 +43,9 @@ export default ({ question }: ClickActionProps): ClickAction[] => {
                     query={query}
                     tableMetadata={tableMetadata}
                     customFields={query.expressions()}
-                    availableAggregations={query.table().aggregation_options}
+                    availableAggregations={getAggregationOptionsForSummarize(
+                        query
+                    )}
                     onCommitAggregation={aggregation => {
                         onChangeCardAndRun({
                             nextCard: question.summarize(aggregation).card()
@@ -41,6 +53,7 @@ export default ({ question }: ClickActionProps): ClickAction[] => {
                         onClose && onClose();
                     }}
                     onClose={onClose}
+                    showOnlyProvidedAggregations
                 />
             )
         }
diff --git a/frontend/src/metabase/qb/components/actions/XRaySegment.jsx b/frontend/src/metabase/qb/components/actions/XRaySegment.jsx
new file mode 100644
index 0000000000000000000000000000000000000000..c157dce81e9e6b20b6c36a1faf95ddd0ac628639
--- /dev/null
+++ b/frontend/src/metabase/qb/components/actions/XRaySegment.jsx
@@ -0,0 +1,26 @@
+/* @flow */
+/*
+ * NOTE(@kdoh) 8/5/2017 - Disabling this file until we add Card XRay support
+ * import type {
+    ClickAction,
+    ClickActionProps
+} from "metabase/meta/types/Visualization";
+
+export default ({ card, tableMetadata }: ClickActionProps): ClickAction[] => {
+    console.log(card);
+    if (card.id) {
+        return [
+            {
+                name: "underlying-data",
+                title: "XRay this Card",
+                icon: "table",
+                url: () => {
+                    return "/xray/card/" + card.id;
+                }
+            }
+        ];
+    } else {
+        return [];
+    }
+};
+*/
diff --git a/frontend/src/metabase/qb/components/actions/index.js b/frontend/src/metabase/qb/components/actions/index.js
index bfbda41978d864fec0e45d3d4ac9cc32fc7baa8c..4ffc2879f3a722324b9ba3cef3a26d66036b0eaa 100644
--- a/frontend/src/metabase/qb/components/actions/index.js
+++ b/frontend/src/metabase/qb/components/actions/index.js
@@ -3,4 +3,8 @@
 import UnderlyingDataAction from "./UnderlyingDataAction";
 import UnderlyingRecordsAction from "./UnderlyingRecordsAction";
 
-export const DEFAULT_ACTIONS = [UnderlyingDataAction, UnderlyingRecordsAction];
+export const DEFAULT_ACTIONS = [
+    UnderlyingDataAction,
+    UnderlyingRecordsAction
+    // XRaySegment
+];
diff --git a/frontend/src/metabase/qb/components/drill/CountByColumnDrill.js b/frontend/src/metabase/qb/components/drill/CountByColumnDrill.js
index 0267d4389f9dd384d8ca484a3cf05665077b2cab..1238a4f115bbe02fe1a2f7ff55dcb043f76777a7 100644
--- a/frontend/src/metabase/qb/components/drill/CountByColumnDrill.js
+++ b/frontend/src/metabase/qb/components/drill/CountByColumnDrill.js
@@ -30,7 +30,7 @@ export default ({ question, clicked }: ClickActionProps): ClickAction[] => {
             question: () =>
                 question
                     .summarize(["count"])
-                    .pivot(getFieldRefFromColumn(column))
+                    .pivot([getFieldRefFromColumn(column)])
         }
     ];
 };
diff --git a/frontend/src/metabase/qb/components/drill/SummarizeColumnByTimeDrill.js b/frontend/src/metabase/qb/components/drill/SummarizeColumnByTimeDrill.js
index 57dac6e7c7d70d8f9b377555122958725caef78c..ad39e438eb97309af30287a115b334dccf6389b1 100644
--- a/frontend/src/metabase/qb/components/drill/SummarizeColumnByTimeDrill.js
+++ b/frontend/src/metabase/qb/components/drill/SummarizeColumnByTimeDrill.js
@@ -4,7 +4,11 @@ import React from "react";
 
 import StructuredQuery from "metabase-lib/lib/queries/StructuredQuery";
 import { getFieldRefFromColumn } from "metabase/qb/lib/actions";
-import { isNumeric, isDate } from "metabase/lib/schema_metadata";
+import {
+    isDate,
+    getAggregator,
+    isCompatibleAggregatorForField
+} from "metabase/lib/schema_metadata";
 import { capitalize } from "metabase/lib/formatting";
 
 import type {
@@ -20,28 +24,34 @@ export default ({ question, clicked }: ClickActionProps): ClickAction[] => {
 
     const dateField = query.table().fields.filter(isDate)[0];
     if (
-        !dateField ||
-        !clicked ||
-        !clicked.column ||
-        clicked.value !== undefined ||
-        !isNumeric(clicked.column)
+        !dateField || !clicked || !clicked.column || clicked.value !== undefined
     ) {
         return [];
     }
     const { column } = clicked;
 
-    return ["sum", "count"].map(aggregation => ({
-        name: "summarize-by-time",
-        section: "sum",
-        title: <span>{capitalize(aggregation)} by time</span>,
-        question: () =>
-            question
-                .summarize([aggregation, getFieldRefFromColumn(column)])
-                .pivot([
-                    "datetime-field",
-                    getFieldRefFromColumn(dateField),
-                    "as",
-                    "day"
-                ])
-    }));
+    return ["sum", "count"]
+        .map(getAggregator)
+        .filter(aggregator =>
+            isCompatibleAggregatorForField(aggregator, column))
+        .map(aggregator => ({
+            name: "summarize-by-time",
+            section: "sum",
+            title: <span>{capitalize(aggregator.short)} by time</span>,
+            question: () =>
+                question
+                    .summarize(
+                        aggregator.requiresField
+                            ? [aggregator.short, getFieldRefFromColumn(column)]
+                            : [aggregator.short]
+                    )
+                    .pivot([
+                        [
+                            "datetime-field",
+                            getFieldRefFromColumn(dateField),
+                            "as",
+                            "day"
+                        ]
+                    ])
+        }));
 };
diff --git a/frontend/src/metabase/qb/components/drill/SummarizeColumnDrill.js b/frontend/src/metabase/qb/components/drill/SummarizeColumnDrill.js
index 3789339b0122c2d04f38323178607bd1d52449aa..921ee52ab544bb5d35476198d517865f16385e80 100644
--- a/frontend/src/metabase/qb/components/drill/SummarizeColumnDrill.js
+++ b/frontend/src/metabase/qb/components/drill/SummarizeColumnDrill.js
@@ -1,7 +1,10 @@
 /* @flow */
 
 import { getFieldRefFromColumn } from "metabase/qb/lib/actions";
-import { isNumeric } from "metabase/lib/schema_metadata";
+import {
+    getAggregator,
+    isCompatibleAggregatorForField
+} from "metabase/lib/schema_metadata";
 
 import type {
     ClickAction,
@@ -36,21 +39,34 @@ export default ({ question, clicked }: ClickActionProps): ClickAction[] => {
         !clicked ||
         !clicked.column ||
         clicked.value !== undefined ||
-        clicked.column.source !== "fields" ||
-        !isNumeric(clicked.column)
+        clicked.column.source !== "fields"
     ) {
+        // TODO Atte Keinänen 7/21/17: Does it slow down the drill-through option calculations remarkably
+        // that I removed the `isSummable` condition from here and use `isCompatibleAggregator` method below instead?
         return [];
     }
     const { column } = clicked;
 
-    // $FlowFixMe
-    return Object.entries(AGGREGATIONS).map(([aggregation, action]: [string, {
-        section: string,
-        title: string
-    }]) => ({
-        name: action.title.toLowerCase(),
-        ...action,
-        question: () =>
-            question.summarize([aggregation, getFieldRefFromColumn(column)])
-    }));
+    return (
+        Object.entries(AGGREGATIONS)
+            .map(([aggregationShort, action]) => [
+                getAggregator(aggregationShort),
+                action
+            ])
+            .filter(([aggregator]) =>
+                isCompatibleAggregatorForField(aggregator, column))
+            // $FlowFixMe
+            .map(([aggregator, action]: [any, {
+                section: string,
+                title: string
+            }]) => ({
+                name: action.title.toLowerCase(),
+                ...action,
+                question: () =>
+                    question.summarize([
+                        aggregator.short,
+                        getFieldRefFromColumn(column)
+                    ])
+            }))
+    );
 };
diff --git a/frontend/src/metabase/qb/components/drill/TimeseriesPivotDrill.jsx b/frontend/src/metabase/qb/components/drill/ZoomDrill.jsx
similarity index 76%
rename from frontend/src/metabase/qb/components/drill/TimeseriesPivotDrill.jsx
rename to frontend/src/metabase/qb/components/drill/ZoomDrill.jsx
index d31629fd848ccbd087edff42a04a5e55aaea534a..5e1e45280f3d1ebf6f193171697b45dce69c98d2 100644
--- a/frontend/src/metabase/qb/components/drill/TimeseriesPivotDrill.jsx
+++ b/frontend/src/metabase/qb/components/drill/ZoomDrill.jsx
@@ -9,7 +9,7 @@ import type {
 
 export default ({ question, clicked }: ClickActionProps): ClickAction[] => {
     const dimensions = (clicked && clicked.dimensions) || [];
-    const drilldown = drillDownForDimensions(dimensions);
+    const drilldown = drillDownForDimensions(dimensions, question.metadata());
     if (!drilldown) {
         return [];
     }
@@ -19,7 +19,7 @@ export default ({ question, clicked }: ClickActionProps): ClickAction[] => {
             name: "timeseries-zoom",
             section: "zoom",
             title: "Zoom in",
-            question: () => question.pivot(drilldown.breakout, dimensions)
+            question: () => question.pivot(drilldown.breakouts, dimensions)
         }
     ];
 };
diff --git a/frontend/src/metabase/qb/components/drill/index.js b/frontend/src/metabase/qb/components/drill/index.js
index e02e20d305a0b46fc56700c43d0ee162f0609a55..1a1fb85fb599ef66f732d8f47c12c1299e1e5b0f 100644
--- a/frontend/src/metabase/qb/components/drill/index.js
+++ b/frontend/src/metabase/qb/components/drill/index.js
@@ -4,8 +4,10 @@ import SortAction from "./SortAction";
 import ObjectDetailDrill from "./ObjectDetailDrill";
 import QuickFilterDrill from "./QuickFilterDrill";
 import UnderlyingRecordsDrill from "./UnderlyingRecordsDrill";
+import ZoomDrill from "./ZoomDrill";
 
 export const DEFAULT_DRILLS = [
+    ZoomDrill,
     SortAction,
     ObjectDetailDrill,
     QuickFilterDrill,
diff --git a/frontend/src/metabase/qb/components/gui/AggregationPopover.jsx b/frontend/src/metabase/qb/components/gui/AggregationPopover.jsx
index 7e5e0bf1b9fd81dd59c7189db6736f7c7cb25191..4e9a0bbd1508094d3e5539b807188d25d2ed8c38 100644
--- a/frontend/src/metabase/qb/components/gui/AggregationPopover.jsx
+++ b/frontend/src/metabase/qb/components/gui/AggregationPopover.jsx
@@ -12,7 +12,9 @@ type Props = {
     tableMetadata: TableMetadata,
     customFields: { [key: ExpressionName]: any },
     onCommitAggregation: (aggregation: Aggregation) => void,
-    onClose?: () => void
+    onClose?: () => void,
+    availableAggregations: [Aggregation],
+    showOnlyProvidedAggregations: boolean
 };
 
 const AggregationPopover = (props: Props) => (
diff --git a/frontend/src/metabase/qb/components/modes/NativeMode.jsx b/frontend/src/metabase/qb/components/modes/NativeMode.jsx
index debf0cea4f81a0313d1a391be1afe77d97096249..af121db1fef86ce0059d61eecccfb7ba1f46f323 100644
--- a/frontend/src/metabase/qb/components/modes/NativeMode.jsx
+++ b/frontend/src/metabase/qb/components/modes/NativeMode.jsx
@@ -1,10 +1,11 @@
 /* @flow */
 
 import type { QueryMode } from "metabase/meta/types/Visualization";
+import CompoundQueryAction from "../actions/CompoundQueryAction";
 
 const NativeMode: QueryMode = {
     name: "native",
-    actions: [],
+    actions: [CompoundQueryAction],
     drills: []
 };
 
diff --git a/frontend/src/metabase/qb/components/modes/ObjectMode.jsx b/frontend/src/metabase/qb/components/modes/ObjectMode.jsx
new file mode 100644
index 0000000000000000000000000000000000000000..13f551d8faee2fafead49aaa89e1bcf88c86f254
--- /dev/null
+++ b/frontend/src/metabase/qb/components/modes/ObjectMode.jsx
@@ -0,0 +1,13 @@
+/* @flow */
+
+import ObjectDetailDrill from "../drill/ObjectDetailDrill";
+
+import type { QueryMode } from "metabase/meta/types/Visualization";
+
+const ObjectMode: QueryMode = {
+    name: "object",
+    actions: [],
+    drills: [ObjectDetailDrill]
+};
+
+export default ObjectMode;
diff --git a/frontend/src/metabase/qb/components/modes/SegmentMode.jsx b/frontend/src/metabase/qb/components/modes/SegmentMode.jsx
index 93eccd58ca0ffe8d754e568150816fd0ae7c65eb..4cb9150614fe14530841fd7d17c5e84cffe523e5 100644
--- a/frontend/src/metabase/qb/components/modes/SegmentMode.jsx
+++ b/frontend/src/metabase/qb/components/modes/SegmentMode.jsx
@@ -18,6 +18,7 @@ const SegmentMode: QueryMode = {
     name: "segment",
     actions: [
         ...DEFAULT_ACTIONS,
+        // XRaySegment,
         CommonMetricsAction,
         CountByTimeAction,
         SummarizeBySegmentMetricAction
diff --git a/frontend/src/metabase/qb/components/modes/TimeseriesMode.jsx b/frontend/src/metabase/qb/components/modes/TimeseriesMode.jsx
index c3598848b5ccedae81008bfd6b77ceeacb96f6d8..9b749eafc9c9e0e40aca34ec4231e7424ee1b59b 100644
--- a/frontend/src/metabase/qb/components/modes/TimeseriesMode.jsx
+++ b/frontend/src/metabase/qb/components/modes/TimeseriesMode.jsx
@@ -16,8 +16,6 @@ import PivotByLocationAction from "../actions/PivotByLocationAction";
 import PivotByCategoryDrill from "../drill/PivotByCategoryDrill";
 import PivotByLocationDrill from "../drill/PivotByLocationDrill";
 
-import TimeseriesPivotDrill from "../drill/TimeseriesPivotDrill";
-
 import type { QueryMode } from "metabase/meta/types/Visualization";
 import type {
     Card as CardObject,
@@ -48,12 +46,7 @@ export const TimeseriesModeFooter = (props: Props) => {
 const TimeseriesMode: QueryMode = {
     name: "timeseries",
     actions: [PivotByCategoryAction, PivotByLocationAction, ...DEFAULT_ACTIONS],
-    drills: [
-        TimeseriesPivotDrill,
-        PivotByCategoryDrill,
-        PivotByLocationDrill,
-        ...DEFAULT_DRILLS
-    ],
+    drills: [PivotByCategoryDrill, PivotByLocationDrill, ...DEFAULT_DRILLS],
     ModeFooter: TimeseriesModeFooter
 };
 
diff --git a/frontend/src/metabase/qb/lib/actions.js b/frontend/src/metabase/qb/lib/actions.js
index a6ea7fef9061cadfd1aaf7e0c1c53885f27d838e..2bd1c05b0e01663c33754a9b3611c751abbf75d5 100644
--- a/frontend/src/metabase/qb/lib/actions.js
+++ b/frontend/src/metabase/qb/lib/actions.js
@@ -1,6 +1,7 @@
 /* @flow weak */
 
 import moment from "moment";
+import _ from "underscore";
 
 import Q from "metabase/lib/query"; // legacy query lib
 import { fieldIdsEq } from "metabase/lib/query/util";
@@ -9,12 +10,22 @@ import * as Query from "metabase/lib/query/query";
 import * as Field from "metabase/lib/query/field";
 import * as Filter from "metabase/lib/query/filter";
 import { startNewCard } from "metabase/lib/card";
-import { isDate, isState, isCountry } from "metabase/lib/schema_metadata";
+import { rangeForValue } from "metabase/lib/dataset";
+import {
+    isDate,
+    isState,
+    isCountry,
+    isCoordinate
+} from "metabase/lib/schema_metadata";
 import Utils from "metabase/lib/utils";
 
 import type Table from "metabase-lib/lib/metadata/Table";
 import type { Card as CardObject } from "metabase/meta/types/Card";
-import type { StructuredQuery, FieldFilter } from "metabase/meta/types/Query";
+import type {
+    StructuredQuery,
+    FieldFilter,
+    Breakout
+} from "metabase/meta/types/Query";
 import type { DimensionValue } from "metabase/meta/types/Visualization";
 import { parseTimestamp } from "metabase/lib/time";
 
@@ -41,11 +52,11 @@ export const toUnderlyingRecords = (card: CardObject): ?CardObject => {
     }
 };
 
-export const getFieldRefFromColumn = col => {
+export const getFieldRefFromColumn = (col, fieldId = col.id) => {
     if (col.fk_field_id != null) {
-        return ["fk->", col.fk_field_id, col.id];
+        return ["fk->", col.fk_field_id, fieldId];
     } else {
-        return ["field-id", col.id];
+        return ["field-id", fieldId];
     }
 };
 
@@ -97,7 +108,17 @@ const drillFilter = (card, value, column) => {
             parseTimestamp(value, column.unit).toISOString()
         ];
     } else {
-        filter = ["=", getFieldRefFromColumn(column), value];
+        const range = rangeForValue(value, column);
+        if (range) {
+            filter = [
+                "BETWEEN",
+                getFieldRefFromColumn(column),
+                range[0],
+                range[1]
+            ];
+        } else {
+            filter = ["=", getFieldRefFromColumn(column), value];
+        }
     }
 
     return addOrUpdateFilter(card, filter);
@@ -163,41 +184,7 @@ const getNextUnit = unit => {
     return UNITS[Math.max(0, UNITS.indexOf(unit) - 1)];
 };
 
-export const drillDownForDimensions = dimensions => {
-    const timeDimensions = dimensions.filter(
-        dimension => dimension.column.unit
-    );
-    if (timeDimensions.length === 1) {
-        const column = timeDimensions[0].column;
-        let nextUnit = getNextUnit(column.unit);
-        if (nextUnit && nextUnit !== column.unit) {
-            return {
-                name: column.unit,
-                breakout: [
-                    "datetime-field",
-                    getFieldRefFromColumn(column),
-                    "as", // TODO - this is deprecated and should be removed. See https://github.com/metabase/metabase/wiki/Query-Language-'98#datetime-field
-                    nextUnit
-                ]
-            };
-        }
-    }
-};
-
-export const drillTimeseriesFilter = (card, value, column) => {
-    const newCard = drillFilter(card, value, column);
-
-    let nextUnit = UNITS[Math.max(0, UNITS.indexOf(column.unit) - 1)];
-
-    newCard.dataset_query.query.breakout[0] = [
-        "datetime-field",
-        card.dataset_query.query.breakout[0][1],
-        "as",
-        nextUnit
-    ];
-
-    return newCard;
-};
+export { drillDownForDimensions } from "./drilldown";
 
 export const drillUnderlyingRecords = (card, dimensions) => {
     for (const dimension of dimensions) {
@@ -310,15 +297,32 @@ export const updateDateTimeFilter = (card, column, start, end): CardObject => {
     }
 };
 
-export const updateNumericFilter = (card, column, start, end) => {
+export function updateLatLonFilter(
+    card,
+    latitudeColumn,
+    longitudeColumn,
+    bounds
+) {
+    return addOrUpdateFilter(card, [
+        "INSIDE",
+        latitudeColumn.id,
+        longitudeColumn.id,
+        bounds.getNorth(),
+        bounds.getWest(),
+        bounds.getSouth(),
+        bounds.getEast()
+    ]);
+}
+
+export function updateNumericFilter(card, column, start, end) {
     const fieldRef = getFieldRefFromColumn(column);
     return addOrUpdateFilter(card, ["BETWEEN", fieldRef, start, end]);
-};
+}
 
 export const pivot = (
     card: CardObject,
-    breakout,
     tableMetadata: Table,
+    breakouts: Breakout[] = [],
     dimensions: DimensionValue[] = []
 ): ?CardObject => {
     if (card.dataset_query.type !== "query") {
@@ -344,10 +348,12 @@ export const pivot = (
         }
     }
 
-    newCard.dataset_query.query = Query.addBreakout(
-        newCard.dataset_query.query,
-        breakout
-    );
+    for (const breakout of breakouts) {
+        newCard.dataset_query.query = Query.addBreakout(
+            newCard.dataset_query.query,
+            breakout
+        );
+    }
 
     guessVisualization(newCard, tableMetadata);
 
@@ -395,6 +401,13 @@ const guessVisualization = (card: CardObject, tableMetadata: Table) => {
         if (!VISUALIZATIONS_TWO_BREAKOUTS.has(card.display)) {
             if (isDate(breakoutFields[0])) {
                 card.display = "line";
+            } else if (_.all(breakoutFields, isCoordinate)) {
+                card.display = "map";
+                // NOTE Atte Keinänen 8/2/17: Heat/grid maps disabled in the first merged version of binning
+                // Currently show a pin map instead of heat map for double coordinate breakout
+                // This way the binning drill-through works in a somewhat acceptable way (although it is designed for heat maps)
+                card.visualization_settings["map.type"] = "pin";
+                // card.visualization_settings["map.type"] = "grid";
             } else {
                 card.display = "bar";
             }
diff --git a/frontend/src/metabase/qb/lib/drilldown.js b/frontend/src/metabase/qb/lib/drilldown.js
new file mode 100644
index 0000000000000000000000000000000000000000..a20612ef85547e8fb05161195fd4c21bf4c4a579
--- /dev/null
+++ b/frontend/src/metabase/qb/lib/drilldown.js
@@ -0,0 +1,244 @@
+/* @flow */
+
+import { isa, TYPE } from "metabase/lib/types";
+import {
+    isLatitude,
+    isLongitude,
+    isDate,
+    isAny
+} from "metabase/lib/schema_metadata";
+import { getFieldRefFromColumn } from "./actions";
+
+import _ from "underscore";
+import { getIn } from "icepick";
+
+// Helpers for defining drill-down progressions
+const CategoryDrillDown = type => [field => isa(field.special_type, type)];
+const DateTimeDrillDown = unit => [["datetime-field", isDate, unit]];
+const LatLonDrillDown = binWidth => [
+    ["binning-strategy", isLatitude, "bin-width", binWidth],
+    ["binning-strategy", isLongitude, "bin-width", binWidth]
+];
+
+/**
+ * Defines the built-in drill-down progressions
+ */
+const DEFAULT_DRILL_DOWN_PROGRESSIONS = [
+    // DateTime drill downs
+    [
+        DateTimeDrillDown("year"),
+        DateTimeDrillDown("quarter"),
+        DateTimeDrillDown("month"),
+        DateTimeDrillDown("week"),
+        DateTimeDrillDown("day"),
+        DateTimeDrillDown("hour"),
+        DateTimeDrillDown("minute")
+    ],
+    // Country => State => City
+    [
+        CategoryDrillDown(TYPE.Country),
+        CategoryDrillDown(TYPE.State)
+        // CategoryDrillDown(TYPE.City)
+    ],
+    // Country, State, or City => LatLon
+    [CategoryDrillDown(TYPE.Country), LatLonDrillDown(10)],
+    [CategoryDrillDown(TYPE.State), LatLonDrillDown(1)],
+    [CategoryDrillDown(TYPE.City), LatLonDrillDown(0.1)],
+    // LatLon drill downs
+    [
+        LatLonDrillDown(30),
+        LatLonDrillDown(10),
+        LatLonDrillDown(1),
+        LatLonDrillDown(0.1),
+        LatLonDrillDown(0.01)
+    ],
+    [
+        [
+            ["binning-strategy", isLatitude, "num-bins", () => true],
+            ["binning-strategy", isLongitude, "num-bins", () => true]
+        ],
+        LatLonDrillDown(1)
+    ],
+    // generic num-bins drill down
+    [
+        [["binning-strategy", isAny, "num-bins", () => true]],
+        [["binning-strategy", isAny, "default"]]
+    ],
+    // generic bin-width drill down
+    [
+        [["binning-strategy", isAny, "bin-width", () => true]],
+        [
+            [
+                "binning-strategy",
+                isAny,
+                "bin-width",
+                (previous: number) => previous / 10
+            ]
+        ]
+    ]
+];
+
+/**
+ * Returns the next drill down for the current dimension objects
+ */
+export function drillDownForDimensions(dimensions: any, metadata: any) {
+    const table = metadata && tableForDimensions(dimensions, metadata);
+
+    for (const drillProgression of DEFAULT_DRILL_DOWN_PROGRESSIONS) {
+        for (let index = 0; index < drillProgression.length - 1; index++) {
+            const currentDrillBreakoutTemplates = drillProgression[index];
+            const nextDrillBreakoutTemplates = drillProgression[index + 1];
+            if (
+                breakoutTemplatesMatchDimensions(
+                    currentDrillBreakoutTemplates,
+                    dimensions
+                )
+            ) {
+                const breakouts = breakoutsForBreakoutTemplates(
+                    nextDrillBreakoutTemplates,
+                    dimensions,
+                    table
+                );
+                if (breakouts) {
+                    return {
+                        breakouts: breakouts
+                    };
+                }
+            }
+        }
+    }
+    return null;
+}
+
+// Returns true if the supplied dimension object matches the supplied breakout template.
+function breakoutTemplateMatchesDimension(breakoutTemplate, dimension) {
+    const breakout = columnToBreakout(dimension.column);
+    if (Array.isArray(breakoutTemplate) !== Array.isArray(breakout)) {
+        return false;
+    }
+    if (Array.isArray(breakoutTemplate)) {
+        if (!breakoutTemplate[1](dimension.column)) {
+            return false;
+        }
+        for (let i = 2; i < breakoutTemplate.length; i++) {
+            if (typeof breakoutTemplate[i] === "function") {
+                // $FlowFixMe
+                if (!breakoutTemplate[i](breakout[i])) {
+                    return false;
+                }
+            } else {
+                // $FlowFixMe
+                if (breakoutTemplate[i] !== breakout[i]) {
+                    return false;
+                }
+            }
+        }
+        return true;
+    } else {
+        return breakoutTemplate(dimension.column);
+    }
+}
+
+// Returns true if all breakout templates having a matching dimension object, but disregarding order
+function breakoutTemplatesMatchDimensions(breakoutTemplates, dimensions) {
+    dimensions = [...dimensions];
+    return _.all(breakoutTemplates, breakoutTemplate => {
+        const index = _.findIndex(dimensions, dimension =>
+            breakoutTemplateMatchesDimension(breakoutTemplate, dimension));
+        if (index >= 0) {
+            dimensions.splice(index, 1);
+            return true;
+        } else {
+            return false;
+        }
+    });
+}
+
+// Evaluates a breakout template, returning a completed breakout clause
+function breakoutForBreakoutTemplate(breakoutTemplate, dimensions, table) {
+    let fieldFilter = Array.isArray(breakoutTemplate)
+        ? breakoutTemplate[1]
+        : breakoutTemplate;
+    let dimensionColumns = dimensions.map(d => d.column);
+    let field = _.find(dimensionColumns, fieldFilter) ||
+        _.find(table.fields, fieldFilter);
+    if (!field) {
+        return null;
+    }
+    const fieldRef = getFieldRefFromColumn(dimensions[0].column, field.id);
+    if (Array.isArray(breakoutTemplate)) {
+        const prevDimension = _.find(dimensions, dimension =>
+            breakoutTemplateMatchesDimension(breakoutTemplate, dimension));
+        const breakout = [breakoutTemplate[0], fieldRef];
+        for (let i = 2; i < breakoutTemplate.length; i++) {
+            const arg = breakoutTemplate[i];
+            if (typeof arg === "function") {
+                if (!prevDimension) {
+                    return null;
+                }
+                const prevBreakout = columnToBreakout(prevDimension.column);
+                // $FlowFixMe
+                breakout.push(arg(prevBreakout[i]));
+            } else {
+                breakout.push(arg);
+            }
+        }
+        return breakout;
+    } else {
+        return fieldRef;
+    }
+}
+
+// Evaluates all the breakout templates of a drill
+function breakoutsForBreakoutTemplates(breakoutTemplates, dimensions, table) {
+    const breakouts = [];
+    for (const breakoutTemplate of breakoutTemplates) {
+        const breakout = breakoutForBreakoutTemplate(
+            breakoutTemplate,
+            dimensions,
+            table
+        );
+        if (!breakout) {
+            return null;
+        }
+        breakouts.push(breakout);
+    }
+    return breakouts;
+}
+
+// Guesses the breakout corresponding to the provided columm object
+function columnToBreakout(column) {
+    if (column.unit) {
+        return ["datetime-field", column.id, column.unit];
+    } else if (column.binning_info) {
+        let binningStrategy = column.binning_info.binning_strategy;
+
+        switch (binningStrategy) {
+            case "bin-width":
+                return [
+                    "binning-strategy",
+                    column.id,
+                    "bin-width",
+                    column.binning_info.bin_width
+                ];
+            case "num-bins":
+                return [
+                    "binning-strategy",
+                    column.id,
+                    "num-bins",
+                    column.binning_info.num_bins
+                ];
+            default:
+                return null;
+        }
+    } else {
+        return column.id;
+    }
+}
+
+// returns the table metadata for a dimension
+function tableForDimensions(dimensions, metadata) {
+    const fieldId = getIn(dimensions, [0, "column", "id"]);
+    const field = metadata.fields[fieldId];
+    return field && field.table;
+}
diff --git a/frontend/src/metabase/qb/lib/modes.js b/frontend/src/metabase/qb/lib/modes.js
index 244f253b95f36fbec16ad656166419b021ca94de..93eb6295d97b708cb767299c12b147c38e7c9339 100644
--- a/frontend/src/metabase/qb/lib/modes.js
+++ b/frontend/src/metabase/qb/lib/modes.js
@@ -1,10 +1,16 @@
 /* @flow weak */
 
-import Q from "metabase/lib/query"; // legacy query lib
-import { isDate, isAddress, isCategory } from "metabase/lib/schema_metadata";
+import Q_DEPRECATED from "metabase/lib/query"; // legacy query lib
+import {
+    isDate,
+    isAddress,
+    isCategory,
+    isPK
+} from "metabase/lib/schema_metadata";
 import * as Query from "metabase/lib/query/query";
 import * as Card from "metabase/meta/Card";
 
+import ObjectMode from "../components/modes/ObjectMode";
 import SegmentMode from "../components/modes/SegmentMode";
 import MetricMode from "../components/modes/MetricMode";
 import TimeseriesMode from "../components/modes/TimeseriesMode";
@@ -17,6 +23,8 @@ import type { Card as CardObject } from "metabase/meta/types/Card";
 import type { TableMetadata } from "metabase/meta/types/Metadata";
 import type { QueryMode } from "metabase/meta/types/Visualization";
 
+import _ from "underscore";
+
 export function getMode(
     card: CardObject,
     tableMetadata: ?TableMetadata
@@ -37,9 +45,30 @@ export function getMode(
 
         const aggregations = Query.getAggregations(query);
         const breakouts = Query.getBreakouts(query);
+        const filters = Query.getFilters(query);
 
         if (aggregations.length === 0 && breakouts.length === 0) {
-            return SegmentMode;
+            const isPKFilter = filter => {
+                if (
+                    tableMetadata && Array.isArray(filter) && filter[0] === "="
+                ) {
+                    const fieldId = Q_DEPRECATED.getFieldTargetId(filter[1]);
+                    const field = tableMetadata.fields_lookup[fieldId];
+                    if (
+                        field &&
+                        field.table.id === query.source_table &&
+                        isPK(field)
+                    ) {
+                        return true;
+                    }
+                }
+                return false;
+            };
+            if (_.any(filters, isPKFilter)) {
+                return ObjectMode;
+            } else {
+                return SegmentMode;
+            }
         }
         if (aggregations.length > 0 && breakouts.length === 0) {
             return MetricMode;
@@ -47,7 +76,8 @@ export function getMode(
         if (aggregations.length > 0 && breakouts.length > 0) {
             let breakoutFields = breakouts.map(
                 breakout =>
-                    (Q.getFieldTarget(breakout, tableMetadata) || {}).field
+                    (Q_DEPRECATED.getFieldTarget(breakout, tableMetadata) || {
+                    }).field
             );
             if (
                 (breakoutFields.length === 1 && isDate(breakoutFields[0])) ||
diff --git a/frontend/src/metabase/query_builder/actions.js b/frontend/src/metabase/query_builder/actions.js
index a8867412db9b7e3b42dfafcff32bcd7990348cb3..6b27e652f12497a213ef5961670fc64130f1e002 100644
--- a/frontend/src/metabase/query_builder/actions.js
+++ b/frontend/src/metabase/query_builder/actions.js
@@ -5,7 +5,6 @@ import React from 'react'
 import { createAction } from "redux-actions";
 import _ from "underscore";
 import { assocIn } from "icepick";
-import moment from "moment";
 
 import { createThunkAction } from "metabase/lib/redux";
 import { push, replace } from "react-router-redux";
@@ -15,7 +14,7 @@ import MetabaseAnalytics from "metabase/lib/analytics";
 import { loadCard, startNewCard, deserializeCardFromUrl, serializeCardForUrl, cleanCopyCard, urlForCardState } from "metabase/lib/card";
 import { formatSQL } from "metabase/lib/formatting";
 import Query, { createQuery } from "metabase/lib/query";
-import { isPK, isFK } from "metabase/lib/types";
+import { isPK } from "metabase/lib/types";
 import Utils from "metabase/lib/utils";
 import { getEngineNativeType, formatJsonQuery } from "metabase/lib/engine";
 import { defer } from "metabase/lib/promise";
@@ -864,18 +863,22 @@ export const removeQueryBreakout = createQueryAction(
     Query.removeBreakout,
     ["QueryBuilder", "Remove GroupBy"]
 );
+// Exported for integration tests
+export const ADD_QUERY_FILTER = "metabase/qb/ADD_QUERY_FILTER"
 export const addQueryFilter = createQueryAction(
-    "metabase/qb/ADD_QUERY_FILTER",
+    ADD_QUERY_FILTER,
     Query.addFilter,
     ["QueryBuilder", "Add Filter"]
 );
+export const UPDATE_QUERY_FILTER = "metabase/qb/UPDATE_QUERY_FILTER";
 export const updateQueryFilter = createQueryAction(
-    "metabase/qb/UPDATE_QUERY_FILTER",
+    UPDATE_QUERY_FILTER,
     Query.updateFilter,
     ["QueryBuilder", "Modify Filter"]
 );
+export const REMOVE_QUERY_FILTER = "metabase/qb/REMOVE_QUERY_FILTER";
 export const removeQueryFilter = createQueryAction(
-    "metabase/qb/REMOVE_QUERY_FILTER",
+    REMOVE_QUERY_FILTER,
     Query.removeFilter,
     ["QueryBuilder", "Remove Filter"]
 );
@@ -1040,74 +1043,6 @@ export const cancelQuery = createThunkAction(CANCEL_QUERY, () => {
     };
 });
 
-// cellClicked
-export const CELL_CLICKED = "metabase/qb/CELL_CLICKED";
-export const cellClicked = createThunkAction(CELL_CLICKED, (rowIndex, columnIndex, filter) => {
-    return async (dispatch, getState) => {
-        // TODO Atte Keinänen 6/1/17: Should use `queryResults` instead
-        const { qb: { card, queryResult } } = getState();
-        if (!queryResult) return false;
-
-        // lookup the coldef and cell value of the cell we are taking action on
-        var coldef          = queryResult.data.cols[columnIndex],
-            value           = queryResult.data.rows[rowIndex][columnIndex],
-            sourceTableID   = card.dataset_query.query.source_table,
-            isForeignColumn = coldef.table_id && coldef.table_id !== sourceTableID && coldef.fk_field_id,
-            fieldRefForm    = isForeignColumn ? ['fk->', coldef.fk_field_id, coldef.id] : ['field-id', coldef.id];
-
-        if (isPK(coldef.special_type)) {
-            // action is on a PK column
-            let newCard: Card = startNewCard("query", card.dataset_query.database);
-
-            newCard.dataset_query.query.source_table = coldef.table_id;
-            newCard.dataset_query.query.aggregation = ["rows"];
-            newCard.dataset_query.query.filter = ["AND", ["=", coldef.id, value]];
-
-            // run it
-            dispatch(setCardAndRun(newCard));
-
-            MetabaseAnalytics.trackEvent("QueryBuilder", "Table Cell Click", "PK");
-        } else if (isFK(coldef.special_type)) {
-            // action is on an FK column
-            let newCard = startNewCard("query", card.dataset_query.database);
-
-            newCard.dataset_query.query.source_table = coldef.target.table_id;
-            newCard.dataset_query.query.aggregation = ["rows"];
-            newCard.dataset_query.query.filter = ["AND", ["=", coldef.target.id, value]];
-
-            // run it
-            dispatch(setCardAndRun(newCard));
-
-            MetabaseAnalytics.trackEvent("QueryBuilder", "Table Cell Click", "FK");
-        } else {
-            // this is applying a filter by clicking on a cell value
-            let dataset_query = Utils.copy(card.dataset_query);
-
-            if (coldef.unit && coldef.unit != "default" && filter === "=") {
-                // this is someone using quick filters on a datetime value
-                let start = moment(value).format("YYYY-MM-DD");
-                let end = start;
-                switch(coldef.unit) {
-                    case "week": end = moment(value).add(1, "weeks").subtract(1, "days").format("YYYY-MM-DD"); break;
-                    case "month": end = moment(value).add(1, "months").subtract(1, "days").format("YYYY-MM-DD"); break;
-                    case "quarter": end = moment(value).add(1, "quarters").subtract(1, "days").format("YYYY-MM-DD"); break;
-                    case "year": start = moment(value, "YYYY").format("YYYY-MM-DD");
-                                 end = moment(value, "YYYY").add(1, "years").subtract(1, "days").format("YYYY-MM-DD"); break;
-                }
-                Query.addFilter(dataset_query.query, ["BETWEEN", fieldRefForm, start, end]);
-            } else {
-                // quick filtering on a normal value (string/number)
-                Query.addFilter(dataset_query.query, [filter, fieldRefForm, value]);
-            }
-
-            // update and run the query
-            dispatch(setDatasetQuery(dataset_query, true));
-
-            MetabaseAnalytics.trackEvent("QueryBuilder", "Table Cell Click", "Quick Filter");
-        }
-    };
-});
-
 export const FOLLOW_FOREIGN_KEY = "metabase/qb/FOLLOW_FOREIGN_KEY";
 export const followForeignKey = createThunkAction(FOLLOW_FOREIGN_KEY, (fk) => {
     return async (dispatch, getState) => {
@@ -1241,5 +1176,3 @@ export const reloadCardFn = reloadCard;
 export const onRestoreOriginalQuery = reloadCard;
 export const onUpdateVisualizationSettings = updateCardVisualizationSettings;
 export const onReplaceAllVisualizationSettings = replaceAllCardVisualizationSettings;
-export const cellClickedFn = cellClicked;
-export const followForeignKeyFn = followForeignKey;
diff --git a/frontend/src/metabase/query_builder/components/ActionsWidget.jsx b/frontend/src/metabase/query_builder/components/ActionsWidget.jsx
index bd660c022496b8e7ba88abcc94708603f397e407..7400451b2276026acb866d897d677f1cc54dab7c 100644
--- a/frontend/src/metabase/query_builder/components/ActionsWidget.jsx
+++ b/frontend/src/metabase/query_builder/components/ActionsWidget.jsx
@@ -23,8 +23,8 @@ type Props = {
 };
 
 type State = {
-    isVisible: boolean,
-    isOpen: boolean,
+    iconIsVisible: boolean,
+    popoverIsOpen: boolean,
     isClosing: boolean,
     selectedActionIndex: ?number,
 };
@@ -36,8 +36,8 @@ const POPOVER_WIDTH = 350;
 export default class ActionsWidget extends Component {
     props: Props;
     state: State = {
-        isVisible: false,
-        isOpen: false,
+        iconIsVisible: false,
+        popoverIsOpen: false,
         isClosing: false,
         selectedActionIndex: null
     };
@@ -51,23 +51,26 @@ export default class ActionsWidget extends Component {
     }
 
     handleMouseMoved = () => {
-        if (!this.state.isVisible) {
-            this.setState({ isVisible: true });
+        // Don't auto-show or auto-hide the icon if popover is open
+        if (this.state.popoverIsOpen) return;
+
+        if (!this.state.iconIsVisible) {
+            this.setState({ iconIsVisible: true });
         }
         this.handleMouseStoppedMoving();
     };
 
     handleMouseStoppedMoving = _.debounce(
         () => {
-            if (this.state.isVisible) {
-                this.setState({ isVisible: false });
+            if (this.state.iconIsVisible) {
+                this.setState({ iconIsVisible: false });
             }
         },
         1000
     );
 
     close = () => {
-        this.setState({ isClosing: true, isOpen: false, selectedActionIndex: null });
+        this.setState({ isClosing: true, popoverIsOpen: false, selectedActionIndex: null });
         // Needed because when closing the action widget by clicking compass, this is triggered first
         // on mousedown (by OnClickOutsideWrapper) and toggle is triggered on mouseup
         setTimeout(() => this.setState({ isClosing: false }), 500);
@@ -76,11 +79,11 @@ export default class ActionsWidget extends Component {
     toggle = () => {
         if (this.state.isClosing) return;
 
-        if (!this.state.isOpen) {
+        if (!this.state.popoverIsOpen) {
             MetabaseAnalytics.trackEvent("Actions", "Opened Action Menu");
         }
         this.setState({
-            isOpen: !this.state.isOpen,
+            popoverIsOpen: !this.state.popoverIsOpen,
             selectedActionIndex: null
         });
     };
@@ -112,7 +115,7 @@ export default class ActionsWidget extends Component {
     };
     render() {
         const { className, question } = this.props;
-        const { isOpen, isVisible, selectedActionIndex } = this.state;
+        const { popoverIsOpen, iconIsVisible, selectedActionIndex } = this.state;
 
         const mode = question.mode();
         const actions = mode ? mode.actions() : [];
@@ -132,7 +135,7 @@ export default class ActionsWidget extends Component {
                         width: CIRCLE_SIZE,
                         height: CIRCLE_SIZE,
                         transition: "opacity 300ms ease-in-out",
-                        opacity: isOpen || isVisible ? 1 : 0,
+                        opacity: popoverIsOpen || iconIsVisible ? 1 : 0,
                         boxShadow: "2px 2px 4px rgba(0, 0, 0, 0.2)"
                     }}
                     onClick={this.toggle}
@@ -142,14 +145,14 @@ export default class ActionsWidget extends Component {
                         className="text-white"
                         style={{
                             transition: "transform 500ms ease-in-out",
-                            transform: isOpen
+                            transform: popoverIsOpen
                                 ? "rotate(0deg)"
                                 : "rotate(720deg)"
                         }}
                         size={NEEDLE_SIZE}
                     />
                 </div>
-                {isOpen &&
+                {popoverIsOpen &&
                     <OnClickOutsideWrapper handleDismissal={() => {
                         MetabaseAnalytics.trackEvent("Actions", "Dismissed Action Menu");
                         this.close();
diff --git a/frontend/src/metabase/query_builder/components/AggregationPopover.jsx b/frontend/src/metabase/query_builder/components/AggregationPopover.jsx
index 3eb0ddcc460857e2a69543882148f8e0e8d15e70..a1827461df22034b0accb3debb88fde6d436e088 100644
--- a/frontend/src/metabase/query_builder/components/AggregationPopover.jsx
+++ b/frontend/src/metabase/query_builder/components/AggregationPopover.jsx
@@ -40,6 +40,8 @@ export default class AggregationPopover extends Component {
         datasetQuery: PropTypes.object,
         customFields: PropTypes.object,
         availableAggregations: PropTypes.array,
+        // Restricts the shown options to contents of `availableActions` only
+        showOnlyProvidedAggregations: PropTypes.boolean
     };
 
 
@@ -118,7 +120,7 @@ export default class AggregationPopover extends Component {
     }
 
     render() {
-        const { query, tableMetadata } = this.props;
+        const { query, tableMetadata, showOnlyProvidedAggregations } = this.props;
 
         const customFields = this.getCustomFields();
         const availableAggregations = this.getAvailableAggregations();
@@ -134,42 +136,45 @@ export default class AggregationPopover extends Component {
         }
 
         let sections = [];
+        let customExpressionIndex = null;
 
         if (availableAggregations.length > 0) {
             sections.push({
-                name: "Metabasics",
+                name: showOnlyProvidedAggregations ? null : "Metabasics",
                 items: availableAggregations.map(aggregation => ({
                     name: aggregation.name,
                     value: [aggregation.short].concat(aggregation.fields.map(field => null)),
                     isSelected: (agg) => !AggregationClause.isCustom(agg) && AggregationClause.getAggregation(agg) === aggregation.short,
                     aggregation: aggregation
                 })),
-                icon: "table2"
+                icon: showOnlyProvidedAggregations ? null : "table2"
             });
         }
 
-        // we only want to consider active metrics, with the ONE exception that if the currently selected aggregation is a
-        // retired metric then we include it in the list to maintain continuity
-        let metrics = tableMetadata.metrics && tableMetadata.metrics.filter((mtrc) => mtrc.is_active === true || (selectedAggregation && selectedAggregation.id === mtrc.id));
-        if (metrics && metrics.length > 0) {
-            sections.push({
-                name: METRICS_SECTION_NAME,
-                items: metrics.map(metric => ({
-                    name: metric.name,
-                    value: ["METRIC", metric.id],
-                    isSelected: (aggregation) => AggregationClause.getMetric(aggregation) === metric.id,
-                    metric: metric
-                })),
-                icon: "staroutline"
-            });
-        }
+        if (!showOnlyProvidedAggregations) {
+            // we only want to consider active metrics, with the ONE exception that if the currently selected aggregation is a
+            // retired metric then we include it in the list to maintain continuity
+            let metrics = tableMetadata.metrics && tableMetadata.metrics.filter((mtrc) => mtrc.is_active === true || (selectedAggregation && selectedAggregation.id === mtrc.id));
+            if (metrics && metrics.length > 0) {
+                sections.push({
+                    name: METRICS_SECTION_NAME,
+                    items: metrics.map(metric => ({
+                        name: metric.name,
+                        value: ["METRIC", metric.id],
+                        isSelected: (aggregation) => AggregationClause.getMetric(aggregation) === metric.id,
+                        metric: metric
+                    })),
+                    icon: "staroutline"
+                });
+            }
 
-        let customExpressionIndex = sections.length;
-        if (tableMetadata.db.features.indexOf("expression-aggregations") >= 0) {
-            sections.push({
-                name: CUSTOM_SECTION_NAME,
-                icon: "staroutline"
-            });
+            customExpressionIndex = sections.length;
+            if (tableMetadata.db.features.indexOf("expression-aggregations") >= 0) {
+                sections.push({
+                    name: CUSTOM_SECTION_NAME,
+                    icon: "staroutline"
+                });
+            }
         }
 
         if (sections.length === 1) {
@@ -204,7 +209,7 @@ export default class AggregationPopover extends Component {
                                 this.state.error.map(error =>
                                     <div className="text-error mb1" style={{ whiteSpace: "pre-wrap" }}>{error.message}</div>
                                 )
-                            :
+                                :
                                 <div className="text-error mb1">{this.state.error.message}</div>
                         )}
                         <input
@@ -226,7 +231,7 @@ export default class AggregationPopover extends Component {
         } else if (choosingField) {
             const [agg, fieldId] = aggregation;
             return (
-                <div style={{width: 300}}>
+                <div style={{minWidth: 300}}>
                     <div className="text-grey-3 p1 py2 border-bottom flex align-center">
                         <a className="cursor-pointer flex align-center" onClick={this.onClearAggregation}>
                             <Icon name="chevronleft" size={18}/>
diff --git a/frontend/src/metabase/query_builder/components/DataSelector.jsx b/frontend/src/metabase/query_builder/components/DataSelector.jsx
index 2e30b09fe27874bbc1f821b301d8122ced7a111a..d8abe50a6ff238bd4c193e0ac5abf6e56b7d942e 100644
--- a/frontend/src/metabase/query_builder/components/DataSelector.jsx
+++ b/frontend/src/metabase/query_builder/components/DataSelector.jsx
@@ -9,7 +9,6 @@ import { isQueryable } from 'metabase/lib/table';
 import { titleize, humanize } from 'metabase/lib/formatting';
 
 import _ from "underscore";
-import cx from "classnames";
 
 export default class DataSelector extends Component {
 
@@ -173,23 +172,13 @@ export default class DataSelector extends Component {
         const { databases, selectedSchema } = this.state;
 
         let sections = databases
-            .filter(database =>
-                // filter out the saved questions "db" so we can present it
-                // differently
-                !database.is_saved_questions
-            )
             .map(database => ({
                 name: database.name,
-                items: database.schemas.length > 1 ? database.schemas : []
+                items: database.schemas.length > 1 ? database.schemas : [],
+                className: database.is_saved_questions ? "bg-slate-extra-light" : null,
+                icon: database.is_saved_questions ? 'all' : 'database'
             }));
 
-        // do the opposite of what we just did and get a reference to the saved question "db"
-        // there will only ever be one of these hence [0]
-        const savedQuestionSection = databases.filter(db => db.is_saved_questions)[0]
-
-        // some of the change functions need the index in the databases array
-        const savedQuestionSectionIndex = databases.indexOf(savedQuestionSection)
-
         let openSection = selectedSchema && _.findIndex(databases, (db) => _.find(db.schemas, selectedSchema));
         if (openSection >= 0 && databases[openSection] && databases[openSection].schemas.length === 1) {
             openSection = -1;
@@ -205,23 +194,18 @@ export default class DataSelector extends Component {
                     onChange={this.onChangeSchema}
                     onChangeSection={this.onChangeDatabase}
                     itemIsSelected={(schema) => this.state.selectedSchema === schema}
-                    renderSectionIcon={() => <Icon className="Icon text-default" name="database" size={18} />}
+                    renderSectionIcon={item =>
+                        <Icon
+                            className="Icon text-default"
+                            name={item.icon}
+                            size={18}
+                        />
+                    }
                     renderItemIcon={() => <Icon name="folder" size={16} />}
                     initiallyOpenSection={openSection}
                     showItemArrows={true}
                     alwaysTogglable={true}
                 />
-                { savedQuestionSection && (
-                    <div
-                        className="List-section p2 cursor-pointer text-brand-hover bg-slate-extra-light"
-                        onClick={() => this.onChangeDatabase(savedQuestionSectionIndex)}
-                    >
-                        <div className="List-section-header flex align-center">
-                            <Icon className="Icon text-default mr2" size={18} name="all" />
-                            <h3 className="List-section-title">Saved questions</h3>
-                        </div>
-                    </div>
-                )}
             </div>
         );
     }
@@ -275,15 +259,13 @@ export default class DataSelector extends Component {
         const hasMultipleSources = hasMultipleDatabases || hasMultipleSchemas || hasSegments;
 
         let header = (
-            <span className="flex align-center">
-                <span className={cx("flex align-center text-brand-hover text-slate", { "cursor-pointer": hasMultipleSources })} onClick={hasMultipleSources && this.onBack}>
-                    { hasMultipleSources && <Icon name="chevronleft" size={18} /> }
+            <div className="flex flex-wrap align-center">
+                <span className="flex align-center text-brand-hover cursor-pointer" onClick={hasMultipleSources && this.onBack}>
+                    {hasMultipleSources && <Icon name="chevronleft" size={18} /> }
                     <span className="ml1">{schema.database.name}</span>
                 </span>
-                { schema.name &&
-                    <span><span className="mx1">-</span>{schema.name}</span>
-                }
-            </span>
+                { schema.name && <span className="ml1 text-slate">- {schema.name}</span>}
+            </div>
         );
 
         if (schema.tables.length === 0) {
diff --git a/frontend/src/metabase/query_builder/components/FieldList.jsx b/frontend/src/metabase/query_builder/components/FieldList.jsx
index 857cc7ae52ac6167451cb3a5674c36ed4abdafad..fffe81bc9a6100ad3817df5b356b5917676006db 100644
--- a/frontend/src/metabase/query_builder/components/FieldList.jsx
+++ b/frontend/src/metabase/query_builder/components/FieldList.jsx
@@ -10,7 +10,7 @@ import QueryDefinitionTooltip from "./QueryDefinitionTooltip.jsx";
 
 import { stripId, singularize } from "metabase/lib/formatting";
 
-import Dimension from "metabase-lib/lib/Dimension";
+import Dimension, { BinnedDimension } from "metabase-lib/lib/Dimension";
 
 import type { ConcreteField } from "metabase/meta/types/Query";
 import type Table from "metabase-lib/lib/metadata/Table";
@@ -20,7 +20,7 @@ export type AccordianListItem = {
 }
 
 export type AccordianListSection = {
-    name: string;
+    name: ?string;
     items: AccordianListItem[]
 }
 
@@ -36,7 +36,9 @@ type Props = {
     tableMetadata: Table,
 
     alwaysExpanded?: boolean,
-    enableSubDimensions?: boolean
+    enableSubDimensions?: boolean,
+
+    hideSectionHeader?: boolean
 }
 
 type State = {
@@ -54,7 +56,7 @@ export default class FieldList extends Component {
     }
 
     componentWillReceiveProps(newProps) {
-        let { tableMetadata, fieldOptions, segmentOptions } = newProps;
+        let { tableMetadata, fieldOptions, segmentOptions, hideSectionHeader } = newProps;
         let tableName = tableMetadata.display_name;
 
         let specialOptions = [];
@@ -73,17 +75,17 @@ export default class FieldList extends Component {
             }))
 
         let mainSection = {
-            name: singularize(tableName),
+            name: hideSectionHeader ? null : singularize(tableName),
             items: specialOptions.concat(getSectionItems(fieldOptions))
         };
 
         let fkSections = fieldOptions.fks.map(fkOptions => ({
-            name: stripId(fkOptions.field.display_name),
+            name: hideSectionHeader ? null : stripId(fkOptions.field.display_name),
             items: getSectionItems(fkOptions)
         }));
 
         let sections = []
-        if (mainSection.items.length > 0) {
+        if (mainSection.items.length > 0 ) {
             sections.push(mainSection);
         }
         sections.push(...fkSections);
@@ -181,14 +183,24 @@ export default class FieldList extends Component {
     }
 
     onChange = (item) => {
-        if (item.segment && this.props.onFilterChange) {
-            this.props.onFilterChange(item.value);
-        } else if (this.props.field != null && this.itemIsSelected(item)) {
+        const { field, enableSubDimensions, onFilterChange, onFieldChange} = this.props;
+        if (item.segment && onFilterChange) {
+            onFilterChange(item.value);
+        } else if (field != null && this.itemIsSelected(item)) {
             // ensure if we select the same item we don't reset datetime-field's unit
-            this.props.onFieldChange(this.props.field);
+            onFieldChange(field);
         } else {
             const dimension = item.dimension.defaultDimension() || item.dimension;
-            this.props.onFieldChange(dimension.mbql());
+            const shouldExcludeBinning = !enableSubDimensions && dimension instanceof BinnedDimension
+
+            if (shouldExcludeBinning) {
+                // If we don't let user choose the sub-dimension, we don't want to treat the field
+                // as a binned field (which would use the default binning)
+                // Let's unwrap the base field of the binned field instead
+                onFieldChange(dimension.baseDimension().mbql());
+            } else {
+                onFieldChange(dimension.mbql());
+            }
         }
     }
 
@@ -211,7 +223,7 @@ export default class FieldList extends Component {
 
 import cx from "classnames";
 
-const DimensionPicker = ({ className, dimension, dimensions, onChangeDimension }) => {
+export const DimensionPicker = ({ className, dimension, dimensions, onChangeDimension }) => {
     return (
         <ul className="px2 py1">
             { dimensions.map((d, index) =>
diff --git a/frontend/src/metabase/query_builder/components/FieldName.jsx b/frontend/src/metabase/query_builder/components/FieldName.jsx
index 52cfac407576f4a6b3ab6a15d9c60b4c676c136a..bd508a5974cca61a76842f627a220f3d3e2541d1 100644
--- a/frontend/src/metabase/query_builder/components/FieldName.jsx
+++ b/frontend/src/metabase/query_builder/components/FieldName.jsx
@@ -5,7 +5,7 @@ import Clearable from "./Clearable.jsx";
 
 import Query from "metabase/lib/query";
 
-import Dimension from "metabase-lib/lib/Dimension";
+import Dimension, { AggregationDimension } from "metabase-lib/lib/Dimension";
 
 import _ from "underscore";
 import cx from "classnames";
@@ -15,7 +15,8 @@ export default class FieldName extends Component {
         field: PropTypes.oneOfType([PropTypes.number, PropTypes.array]),
         onClick: PropTypes.func,
         removeField: PropTypes.func,
-        tableMetadata: PropTypes.object.isRequired
+        tableMetadata: PropTypes.object.isRequired,
+        query: PropTypes.object
     };
 
     static defaultProps = {
@@ -30,14 +31,20 @@ export default class FieldName extends Component {
     }
 
     render() {
-        let { field, tableMetadata, className } = this.props;
+        let { field, tableMetadata, query, className } = this.props;
 
         let parts = [];
 
         if (field) {
             const dimension = Dimension.parseMBQL(field, tableMetadata && tableMetadata.metadata);
             if (dimension) {
-                parts = dimension.render();
+                if (dimension instanceof AggregationDimension) {
+                    // Aggregation dimension doesn't know about its relation to the current query
+                    // so we have to infer the display name of aggregation here
+                    parts = <span key="field">{query.aggregations()[dimension.aggregationIndex()][0]}</span>
+                } else {
+                    parts = <span key="field">{dimension.render()}</span>;
+                }
             }
             // TODO Atte Keinänen 6/23/17: Move nested queries logic to Dimension subclasses
             // if the Field in question is a field literal, e.g. ["field-literal", <name>, <type>] just use name as-is
diff --git a/frontend/src/metabase/query_builder/components/QueryDownloadWidget.jsx b/frontend/src/metabase/query_builder/components/QueryDownloadWidget.jsx
index b5532b94c8b21313e686cbe5b336dbb52764ccff..8dac1828f84217e29bdd2d2ed95ea3e8665b5985 100644
--- a/frontend/src/metabase/query_builder/components/QueryDownloadWidget.jsx
+++ b/frontend/src/metabase/query_builder/components/QueryDownloadWidget.jsx
@@ -18,14 +18,14 @@ const EXPORT_FORMATS = ["csv", "xlsx", "json"];
 const QueryDownloadWidget = ({ className, card, result, uuid, token }) =>
     <PopoverWithTrigger
         triggerElement={
-            <Tooltip tooltip="Download">
+            <Tooltip tooltip="Download full results">
                 <Icon title="Download this data" name="downarrow" size={16} />
             </Tooltip>
         }
         triggerClasses={cx(className, "text-brand-hover")}
     >
         <div className="p2" style={{ maxWidth: 320 }}>
-            <h4>Download</h4>
+            <h4>Download full results</h4>
             { result.data.rows_truncated != null &&
                 <FieldSet className="my2 text-gold border-gold" legend="Warning">
                     <div className="my1">Your answer has a large number of rows so it could take awhile to download.</div>
diff --git a/frontend/src/metabase/query_builder/components/QueryVisualization.jsx b/frontend/src/metabase/query_builder/components/QueryVisualization.jsx
index ef5edfc86d474da4b684c1dc83b1dac128f70826..fb46c7fac5f2e49255777c2388e7b4c631e0eb3d 100644
--- a/frontend/src/metabase/query_builder/components/QueryVisualization.jsx
+++ b/frontend/src/metabase/query_builder/components/QueryVisualization.jsx
@@ -123,7 +123,8 @@ export default class QueryVisualization extends Component {
             messages.push({
                 icon: "table2",
                 message: (
-                    <div>
+                    // class name is included for the sake of making targeting the element in tests easier
+                    <div className="ShownRowCount">
                         { result.data.rows_truncated != null ? ("Showing first ") : ("Showing ")}
                         <strong>{formatNumber(result.row_count)}</strong>
                         { " " + inflect("row", result.data.rows.length) }
diff --git a/frontend/src/metabase/query_builder/components/QueryVisualizationObjectDetailTable.jsx b/frontend/src/metabase/query_builder/components/QueryVisualizationObjectDetailTable.jsx
deleted file mode 100644
index de4b791b3fa3fd5a58184e4e1566958f0e08dd02..0000000000000000000000000000000000000000
--- a/frontend/src/metabase/query_builder/components/QueryVisualizationObjectDetailTable.jsx
+++ /dev/null
@@ -1,241 +0,0 @@
-import React, { Component } from "react";
-import PropTypes from "prop-types";
-
-import ExpandableString from './ExpandableString.jsx';
-import Icon from 'metabase/components/Icon.jsx';
-import IconBorder from 'metabase/components/IconBorder.jsx';
-import LoadingSpinner from 'metabase/components/LoadingSpinner.jsx';
-import { foreignKeyCountsByOriginTable } from 'metabase/lib/schema_metadata';
-import { TYPE, isa, isPK } from "metabase/lib/types";
-import { singularize, inflect } from 'inflection';
-import { formatValue } from "metabase/lib/formatting";
-import { isQueryable } from "metabase/lib/table";
-
-import cx from "classnames";
-
-export default class QueryVisualizationObjectDetailTable extends Component {
-    constructor(props, context) {
-        super(props, context);
-        this.cellClicked = this.cellClicked.bind(this);
-        this.clickedForeignKey = this.clickedForeignKey.bind(this);
-    }
-
-    static propTypes = {
-        data: PropTypes.object
-    };
-
-    componentDidMount() {
-        // load up FK references
-        this.props.loadObjectDetailFKReferences();
-    }
-
-    componentWillReceiveProps(nextProps) {
-        // if the card has changed then reload fk references
-        if (this.props.data != nextProps.data) {
-            this.props.loadObjectDetailFKReferences();
-        }
-    }
-
-    getIdValue() {
-        if (!this.props.data) return null;
-
-        for (var i=0; i < this.props.data.cols.length; i++) {
-            var coldef = this.props.data.cols[i];
-            if (isPK(coldef.special_type)) {
-                return this.props.data.rows[0][i];
-            }
-        }
-    }
-
-    rowGetter(rowIndex) {
-        // Remember that we are pivoting the data, so for row 5 we want to return an array with [coldef, value]
-        return [this.props.data.cols[rowIndex], this.props.data.rows[0][rowIndex]];
-    }
-
-    cellClicked(rowIndex, columnIndex) {
-        this.props.cellClickedFn(rowIndex, columnIndex);
-    }
-
-    cellRenderer(cellData, cellDataKey, rowData, rowIndex, columnData, width) {
-        // TODO: should we be casting all values toString()?
-        // Check out the expected format of each row above in the rowGetter() function
-        var row = this.rowGetter(rowIndex),
-            key = 'cl'+rowIndex+'_'+cellDataKey;
-
-        if (cellDataKey === 'field') {
-            var colValue = (row[0] !== null) ? (row[0].display_name.toString() || row[0].name.toString()) : null;
-            return (<div key={key}>{colValue}</div>);
-        } else {
-
-            var cellValue;
-            if (row[1] === null || row[1] === undefined || (typeof row[1] === "string" && row[1].length === 0)) {
-                cellValue = (<span className="text-grey-2">Empty</span>);
-            } else if (isa(row[0].special_type, TYPE.SerializedJSON)) {
-                let formattedJson = JSON.stringify(JSON.parse(row[1]), null, 2);
-                cellValue = (<pre className="ObjectJSON">{formattedJson}</pre>);
-            } else if (typeof row[1] === "object") {
-                let formattedJson = JSON.stringify(row[1], null, 2);
-                cellValue = (<pre className="ObjectJSON">{formattedJson}</pre>);
-            } else {
-                cellValue = formatValue(row[1], { column: row[0], jsx: true });
-                if (typeof cellValue === "string") {
-                    cellValue = (<ExpandableString str={cellValue} length={140}></ExpandableString>);
-                }
-            }
-
-            // NOTE: that the values to our function call look off, but that's because we are un-pivoting them
-            if (this.props.cellIsClickableFn(0, rowIndex)) {
-                return (<div key={key}><a className="link" onClick={this.cellClicked.bind(null, 0, rowIndex)}>{cellValue}</a></div>);
-            } else {
-                return (<div key={key}>{cellValue}</div>);
-            }
-        }
-    }
-
-    clickedForeignKey(fk) {
-        this.props.followForeignKeyFn(fk);
-    }
-
-    renderDetailsTable() {
-        var rows = [];
-        for (var i=0; i < this.props.data.cols.length; i++) {
-            var row = this.rowGetter(i),
-                keyCell = this.cellRenderer(row[0], 'field', row, i, 0),
-                valueCell = this.cellRenderer(row[1], 'value', row, i, 0);
-
-            rows[i] = (
-                <div className="Grid mb2" key={i}>
-                    <div className="Grid-cell">{keyCell}</div>
-                    <div style={{wordWrap: 'break-word'}} className="Grid-cell text-bold text-dark">{valueCell}</div>
-                </div>
-            );
-        }
-
-        return rows;
-    }
-
-    renderRelationships() {
-        if (!this.props.tableForeignKeys) return false;
-
-        var tableForeignKeys = this.props.tableForeignKeys.filter(function (fk) {
-            return isQueryable(fk.origin.table);
-        });
-
-        if (tableForeignKeys.length < 1) {
-            return (<p className="my4 text-centered">No relationships found.</p>);
-        }
-
-        const fkCountsByTable = foreignKeyCountsByOriginTable(tableForeignKeys);
-
-        var component = this;
-        var relationships = tableForeignKeys.sort(function(a, b) {
-            return a.origin.table.display_name.localeCompare(b.origin.table.display_name);
-        }).map(function(fk) {
-            var fkCount = (
-                <LoadingSpinner size={25} />
-            ),
-                fkCountValue = 0,
-                fkClickable = false;
-            if (component.props.tableForeignKeyReferences) {
-                var fkCountInfo = component.props.tableForeignKeyReferences[fk.origin.id];
-                if (fkCountInfo && fkCountInfo["status"] === 1) {
-                    fkCount = (<span>{fkCountInfo["value"]}</span>);
-
-                    if (fkCountInfo["value"]) {
-                        fkCountValue = fkCountInfo["value"];
-                        fkClickable = true;
-                    }
-                }
-            }
-            var chevron = (
-                <IconBorder className="flex-align-right">
-                    <Icon name='chevronright' size={10} />
-                </IconBorder>
-            );
-
-            var relationName = inflect(fk.origin.table.display_name, fkCountValue);
-            const via = (fkCountsByTable[fk.origin.table.id] > 1) ? (<span className="text-grey-3 text-normal"> via {fk.origin.display_name}</span>) : null;
-
-            var info = (
-                <div>
-                    <h2>{fkCount}</h2>
-                    <h5 className="block">{relationName}{via}</h5>
-                 </div>
-            );
-            var fkReference;
-            var referenceClasses = cx({
-                'flex': true,
-                'align-center': true,
-                'my2': true,
-                'pb2': true,
-                'border-bottom': true,
-                'text-brand-hover': fkClickable,
-                'cursor-pointer': fkClickable,
-                'text-dark': fkClickable,
-                'text-grey-3': !fkClickable
-            });
-
-            if (fkClickable) {
-                fkReference = (
-                    <div className={referenceClasses} key={fk.id} onClick={component.clickedForeignKey.bind(null, fk)}>
-                        {info}
-                        {chevron}
-                    </div>
-                );
-            } else {
-                fkReference = (
-                    <div className={referenceClasses} key={fk.id}>
-                        {info}
-                    </div>
-                );
-            }
-
-            return (
-                <li>
-                    {fkReference}
-                </li>
-            );
-        });
-
-        return (
-            <ul className="px4">
-                {relationships}
-            </ul>
-        );
-    }
-
-    render() {
-        if(!this.props.data) {
-            return false;
-        }
-
-        var tableName = (this.props.tableMetadata) ? singularize(this.props.tableMetadata.display_name) : "Unknown",
-            // TODO: once we nail down the "title" column of each table this should be something other than the id
-            idValue = this.getIdValue();
-
-        return (
-            <div className="ObjectDetail rounded mt2">
-                <div className="Grid ObjectDetail-headingGroup">
-                    <div className="Grid-cell ObjectDetail-infoMain px4 py3 ml2 arrow-right">
-                        <div className="text-brand text-bold">
-                            <span>{tableName}</span>
-                            <h1>{idValue}</h1>
-                        </div>
-                    </div>
-                    <div className="Grid-cell flex align-center Cell--1of3 bg-alt">
-                        <div className="p4 flex align-center text-bold text-grey-3">
-                            <Icon name="connections" size={17} />
-                            <div className="ml2">
-                                This <span className="text-dark">{tableName}</span> is connected to:
-                            </div>
-                        </div>
-                    </div>
-                </div>
-                <div className="Grid">
-                    <div className="Grid-cell ObjectDetail-infoMain p4">{this.renderDetailsTable()}</div>
-                    <div className="Grid-cell Cell--1of3 bg-alt">{this.renderRelationships()}</div>
-                </div>
-            </div>
-        );
-    }
-}
diff --git a/frontend/src/metabase/query_builder/components/VisualizationResult.jsx b/frontend/src/metabase/query_builder/components/VisualizationResult.jsx
index b5c77015f5404c509d739080b5cc19cef7acd789..3b05fa3b2b21e7b3000042d9c6196db96a48cb7b 100644
--- a/frontend/src/metabase/query_builder/components/VisualizationResult.jsx
+++ b/frontend/src/metabase/query_builder/components/VisualizationResult.jsx
@@ -1,7 +1,6 @@
 /* eslint "react/prop-types": "warn" */
 
 import React from "react";
-import QueryVisualizationObjectDetailTable from './QueryVisualizationObjectDetailTable.jsx';
 import VisualizationErrorMessage from './VisualizationErrorMessage';
 import Visualization from "metabase/visualizations/components/Visualization.jsx";
 import { datasetContainsNoResults } from "metabase/lib/dataset";
@@ -17,10 +16,7 @@ type Props = {
 }
 const VisualizationResult = ({question, isObjectDetail, lastRunDatasetQuery, navigateToNewCardInsideQB, result, results, ...props}: Props) => {
     const noResults = datasetContainsNoResults(result.data);
-
-    if (isObjectDetail) {
-        return <QueryVisualizationObjectDetailTable data={result.data} {...props} />
-    } else if (noResults) {
+    if (noResults) {
         // successful query but there were 0 rows returned with the result
         return <VisualizationErrorMessage
                   type='noRows'
@@ -39,6 +35,7 @@ const VisualizationResult = ({question, isObjectDetail, lastRunDatasetQuery, nav
         const series = question.atomicQueries().map((metricQuery, index) => ({
             card: {
                 ...question.card(),
+                display: isObjectDetail ? "object" : question.card().display,
                 dataset_query: lastRunDatasetQuery
             },
             data: results[index] && results[index].data
diff --git a/frontend/src/metabase/query_builder/components/VisualizationSettings.jsx b/frontend/src/metabase/query_builder/components/VisualizationSettings.jsx
index 8e6b07a5f14b754d79a49eec5ed14eb205550d6c..83136b629dec99a2d2a84a73acc30912538ff80f 100644
--- a/frontend/src/metabase/query_builder/components/VisualizationSettings.jsx
+++ b/frontend/src/metabase/query_builder/components/VisualizationSettings.jsx
@@ -69,7 +69,7 @@ export default class VisualizationSettings extends React.Component {
                                 key={index}
                                 className={cx('p2 flex align-center cursor-pointer bg-brand-hover text-white-hover', {
                                     'ChartType--selected': vizType === card.display,
-                                    'ChartType--notSensible': !(result && result.data && viz.isSensible(result.data.cols, result.data.rows)),
+                                    'ChartType--notSensible': !(result && result.data && viz.isSensible && viz.isSensible(result.data.cols, result.data.rows)),
                                     'hide': viz.hidden
                                 })}
                                 onClick={this.setDisplay.bind(null, vizType)}
diff --git a/frontend/src/metabase/query_builder/components/expressions/ExpressionWidget.jsx b/frontend/src/metabase/query_builder/components/expressions/ExpressionWidget.jsx
index 4f592ba64c357abdaa519082a16e44cae9033d3f..0469775e1d747fef1031af1c74da290150bb55b8 100644
--- a/frontend/src/metabase/query_builder/components/expressions/ExpressionWidget.jsx
+++ b/frontend/src/metabase/query_builder/components/expressions/ExpressionWidget.jsx
@@ -53,10 +53,10 @@ export default class ExpressionWidget extends Component {
                             onChange={(parsedExpression) => this.setState({expression: parsedExpression, error: null})}
                             onError={(errorMessage) => this.setState({error: errorMessage})}
                         />
-                        <p className="h5 text-grey-2">
+                      <p className="h5 text-grey-5">
                             Think of this as being kind of like writing a formula in a spreadsheet program: you can use numbers, fields in this table,
-                            mathematical symbols like +, and some functions.  So you could type, Subtotal - Cost.
-                            <a className="link" href="http://www.metabase.com/docs/latest/users-guide/03-asking-questions.html#creating-a-custom-field">Learn more</a>
+                            mathematical symbols like +, and some functions.  So you could type something like Subtotal &minus; Cost.
+                            &nbsp;<a className="link" target="_blank" href="http://www.metabase.com/docs/latest/users-guide/04-asking-questions.html#creating-a-custom-field">Learn more</a>
                         </p>
                     </div>
 
diff --git a/frontend/src/metabase/query_builder/components/filters/FilterPopover.jsx b/frontend/src/metabase/query_builder/components/filters/FilterPopover.jsx
index a703284f624c6d7845d9957ec9baccadfde58395..7b52ff345fcb800f8c911de6d2aa8b2e2392d36a 100644
--- a/frontend/src/metabase/query_builder/components/filters/FilterPopover.jsx
+++ b/frontend/src/metabase/query_builder/components/filters/FilterPopover.jsx
@@ -14,7 +14,7 @@ import Icon from "metabase/components/Icon.jsx";
 
 import Query from "metabase/lib/query";
 import { isDate } from "metabase/lib/schema_metadata";
-import { singularize } from "metabase/lib/formatting";
+import { formatField, singularize } from "metabase/lib/formatting";
 
 import cx from "classnames";
 
@@ -101,8 +101,10 @@ export default class FilterPopover extends Component {
 
     setValue(index: number, value: any) {
         let { filter } = this.state;
-        filter[index + 2] = value;
-        this.setState({ filter: filter });
+        // $FlowFixMe Flow doesn't like spread operator
+        let newFilter: FieldFilter = [...filter]
+        newFilter[index + 2] = value;
+        this.setState({ filter: newFilter });
     }
 
     setValues = (values: any[]) => {
@@ -261,12 +263,12 @@ export default class FilterPopover extends Component {
                     minWidth: 300
                 }}>
                     <div className="FilterPopover-header text-grey-3 p1 mt1 flex align-center">
-                        <a className="cursor-pointer flex align-center" onClick={this.clearField}>
+                        <a className="cursor-pointer text-purple-hover transition-color flex align-center" onClick={this.clearField}>
                             <Icon name="chevronleft" size={18}/>
                             <h3 className="inline-block">{singularize(table.display_name)}</h3>
                         </a>
                         <h3 className="mx1">-</h3>
-                        <h3 className="text-default">{field.display_name}</h3>
+                        <h3 className="text-default">{formatField(field)}</h3>
                     </div>
                     { isDate(field) ?
                         <DatePicker
diff --git a/frontend/src/metabase/query_builder/components/filters/FilterWidget.jsx b/frontend/src/metabase/query_builder/components/filters/FilterWidget.jsx
index 57321c79ca6d764b8cebdb027139f3f06a80694d..559c54a0ee4da46d3e1ceb2dd72fa9be54f495c5 100644
--- a/frontend/src/metabase/query_builder/components/filters/FilterWidget.jsx
+++ b/frontend/src/metabase/query_builder/components/filters/FilterWidget.jsx
@@ -8,6 +8,7 @@ import Popover from "metabase/components/Popover.jsx";
 import FilterPopover from "./FilterPopover.jsx";
 
 import { generateTimeFilterValuesDescriptions } from "metabase/lib/query_time";
+import { formatValue } from "metabase/lib/formatting";
 
 import cx from "classnames";
 import _ from "underscore";
@@ -62,13 +63,17 @@ export default class FilterWidget extends Component {
 
         const operator = dimension.operator(op);
 
+        let formattedValues;
         // $FlowFixMe: not understanding maxDisplayValues is provided by defaultProps
         if (operator && operator.multi && values.length > maxDisplayValues) {
-            values = [values.length + " selections"];
-        }
-
-        if (dimension.field().isDate()) {
-            values = generateTimeFilterValuesDescriptions(filter);
+            formattedValues = [values.length + " selections"];
+        } else if (dimension.field().isDate()) {
+            formattedValues = generateTimeFilterValuesDescriptions(filter);
+        } else {
+            // TODO Atte Keinänen 7/16/17: Move formatValue to metabase-lib
+            formattedValues = values.filter(value => value !== undefined).map(value =>
+                formatValue(value, { column: dimension.field() })
+            )
         }
 
         return (
@@ -87,16 +92,13 @@ export default class FilterWidget extends Component {
                         <a className="QueryOption flex align-center">{operator && operator.moreVerboseName}</a>
                     </div>
                 </div>
-                { values.length > 0 && (
+                { formattedValues.length > 0 && (
                     <div className="flex align-center flex-wrap">
-                        {values.map((value, valueIndex) => {
-                            var valueString = value != null ? value.toString() : null;
-                            return value != undefined && (
-                                <div key={valueIndex} className="Filter-section Filter-section-value">
-                                    <span className="QueryOption">{valueString}</span>
-                                </div>
-                            );
-                        })}
+                        {formattedValues.map((formattedValue, valueIndex) =>
+                            <div key={valueIndex} className="Filter-section Filter-section-value">
+                                <span className="QueryOption">{formattedValue}</span>
+                            </div>
+                        )}
                     </div>
                 )}
             </div>
diff --git a/frontend/src/metabase/query_builder/components/filters/OperatorSelector.jsx b/frontend/src/metabase/query_builder/components/filters/OperatorSelector.jsx
index de0242e100d42785acc3bcdd6190ef520769556c..4ac57c31ce1902806dc6521817ed0c5a770cf3a7 100644
--- a/frontend/src/metabase/query_builder/components/filters/OperatorSelector.jsx
+++ b/frontend/src/metabase/query_builder/components/filters/OperatorSelector.jsx
@@ -74,7 +74,7 @@ export default class OperatorSelector extends Component {
                     </button>
                 )}
                 { !expanded && expandedOperators.length > 0 ?
-                    <div className="text-grey-3 cursor-pointer" onClick={this.expandOperators}>
+                    <div className="text-grey-3 text-purple-hover transition-color cursor-pointer" onClick={this.expandOperators}>
                         <Icon className="px1" name="chevrondown" size={14} />
                         More Options
                     </div>
diff --git a/frontend/src/metabase/query_builder/components/filters/pickers/SelectPicker.jsx b/frontend/src/metabase/query_builder/components/filters/pickers/SelectPicker.jsx
index b325ab9cafcce16af8c00b0d0f3819d80ac2ea79..44392273b7d1f1b45aabe06e82ec0038018ea4bd 100644
--- a/frontend/src/metabase/query_builder/components/filters/pickers/SelectPicker.jsx
+++ b/frontend/src/metabase/query_builder/components/filters/pickers/SelectPicker.jsx
@@ -91,10 +91,7 @@ export default class SelectPicker extends Component {
     render() {
         let { values, options, placeholder, multi } = this.props;
 
-        let checked = {};
-        for (let value of values) {
-            checked[value] = true;
-        }
+        let checked = new Set(values);
 
         let validOptions = [];
         let regex = this.state.searchRegex;
@@ -130,8 +127,11 @@ export default class SelectPicker extends Component {
                        <ul>
                            {validOptions.map((option, index) =>
                                <li key={index}>
-                                   <label className="flex align-center cursor-pointer p1" onClick={() => this.selectValue(option.key, !checked[option.key])}>
-                                       <CheckBox checked={checked[option.key]} />
+                                   <label className="flex align-center cursor-pointer p1" onClick={() => this.selectValue(option.key, !checked.has(option.key))}>
+                                       <CheckBox
+                                           checked={checked.has(option.key)}
+                                           color='purple'
+                                       />
                                        <h4 className="ml1">{this.nameForOption(option)}</h4>
                                    </label>
                                </li>
@@ -145,7 +145,7 @@ export default class SelectPicker extends Component {
                                        style={{ height: "95px" }}
                                        className={cx("full rounded bordered border-purple text-centered text-bold", {
                                                "text-purple bg-white": values[0] !== option.key,
-                                               "text-white bg-purple-light": values[0] === option.key
+                                               "text-white bg-purple": values[0] === option.key
                                            })}
                                        onClick={() => this.selectValue(option.key, true)}
                                    >
diff --git a/frontend/src/metabase/query_builder/containers/QueryBuilder.jsx b/frontend/src/metabase/query_builder/containers/QueryBuilder.jsx
index 32fcfafda4ef1ae2044cb3ab84b1c80f1c55df67..1dd5ec9d967b320ef901df06b3792c15d381204e 100644
--- a/frontend/src/metabase/query_builder/containers/QueryBuilder.jsx
+++ b/frontend/src/metabase/query_builder/containers/QueryBuilder.jsx
@@ -8,7 +8,6 @@ import cx from "classnames";
 import _ from "underscore";
 
 import { loadTableAndForeignKeys } from "metabase/lib/table";
-import { isPK, isFK } from "metabase/lib/types";
 
 import QueryBuilderTutorial from "metabase/tutorial/QueryBuilderTutorial.jsx";
 
@@ -61,17 +60,6 @@ import { MetabaseApi } from "metabase/services";
 import NativeQuery from "metabase-lib/lib/queries/NativeQuery";
 import StructuredQuery from "metabase-lib/lib/queries/StructuredQuery";
 
-function cellIsClickable(queryResult, rowIndex, columnIndex) {
-    if (!queryResult) return false;
-
-    // lookup the coldef and cell value of the cell we are curious about
-    var coldef = queryResult.data.cols[columnIndex];
-
-    if (!coldef || !coldef.special_type) return false;
-
-    return (coldef.table_id != null && (isPK(coldef.special_type) || (isFK(coldef.special_type) && coldef.target)));
-}
-
 function autocompleteResults(card, prefix) {
     let databaseId = card && card.dataset_query && card.dataset_query.database;
     let apiCall = MetabaseApi.db_autocomplete_suggestions({
@@ -128,7 +116,6 @@ const mapStateToProps = (state, props) => {
 
         loadTableAndForeignKeysFn: loadTableAndForeignKeys,
         autocompleteResultsFn:     (prefix) => autocompleteResults(state.qb.card, prefix),
-        cellIsClickableFn:         (rowIndex, columnIndex) => cellIsClickable(state.qb.queryResult, rowIndex, columnIndex)
     }
 }
 
diff --git a/frontend/src/metabase/query_builder/selectors.js b/frontend/src/metabase/query_builder/selectors.js
index 73f4b60a21a8504364ba5658b4904e80230c0f37..be79650aa595ee7c52ec3899572e23a2e431935e 100644
--- a/frontend/src/metabase/query_builder/selectors.js
+++ b/frontend/src/metabase/query_builder/selectors.js
@@ -5,9 +5,6 @@ import _ from "underscore";
 import { getParametersWithExtras } from "metabase/meta/Card";
 
 import { isCardDirty } from "metabase/lib/card";
-import { parseFieldTargetId } from "metabase/lib/query_time";
-import { isPK } from "metabase/lib/types";
-import Query from "metabase/lib/query";
 import Utils from "metabase/lib/utils";
 
 import Question from "metabase-lib/lib/Question";
@@ -92,57 +89,6 @@ export const getDatabaseFields = createSelector(
     (databaseId, databaseFields) => databaseFields[databaseId]
 );
 
-export const getIsObjectDetail = createSelector(
-    [getQueryResult],
-    (queryResult) => {
-        if (!queryResult || !queryResult.json_query) {
-            return false;
-        }
-
-        const data = queryResult.data,
-              dataset_query = queryResult.json_query;
-
-        let response = false;
-
-        // NOTE: we specifically use only the query result here because we don't want the state of the
-        //       visualization being shown (Object Details) to change as the query/card changes.
-
-        // "rows" type query w/ an '=' filter against the PK column
-        if (dataset_query.query &&
-                dataset_query.query.source_table &&
-                dataset_query.query.filter &&
-                Query.isBareRows(dataset_query.query) &&
-                data.rows &&
-                data.rows.length === 1) {
-
-            // we need to know the PK field of the table that was queried, so find that now
-            let pkField;
-            for (var i=0; i < data.cols.length; i++) {
-                let coldef = data.cols[i];
-                if (coldef.table_id === dataset_query.query.source_table && isPK(coldef.special_type)) {
-                    pkField = coldef.id;
-                }
-            }
-
-            // now check that we have a filter clause w/ '=' filter on PK column
-            if (pkField !== undefined) {
-                for (const filter of Query.getFilters(dataset_query.query)) {
-                    if (Array.isArray(filter) &&
-                            filter.length === 3 &&
-                            filter[0] === "=" &&
-                               parseFieldTargetId(filter[1]) === pkField &&
-                            filter[2] !== null) {
-                        // well, all of our conditions have passed so we have an object detail query here
-                        response = true;
-                    }
-                }
-            }
-        }
-
-        return response;
-    }
-);
-
 
 
 import { getMode as getMode_ } from "metabase/qb/lib/modes";
@@ -152,6 +98,11 @@ export const getMode = createSelector(
     (card, tableMetadata) => getMode_(card, tableMetadata)
 )
 
+export const getIsObjectDetail = createSelector(
+    [getMode],
+    (mode) => mode && mode.name === "object"
+);
+
 export const getParameters = createSelector(
     [getCard, getParameterValues],
     (card, parameterValues) => getParametersWithExtras(card, parameterValues)
diff --git a/frontend/src/metabase/redux/metadata.js b/frontend/src/metabase/redux/metadata.js
index ff2532bac0e9f81084e4a9068d575ddf2b4ed280..da2d38bbcfd1222d62ace660836be583517b0458 100644
--- a/frontend/src/metabase/redux/metadata.js
+++ b/frontend/src/metabase/redux/metadata.js
@@ -146,6 +146,27 @@ export const fetchDatabases = createThunkAction(FETCH_DATABASES, (reload = false
     };
 });
 
+export const FETCH_REAL_DATABASES = "metabase/metadata/FETCH_REAL_DATABASES";
+export const fetchRealDatabases = createThunkAction(FETCH_REAL_DATABASES, (reload = false) => {
+    return async (dispatch, getState) => {
+        const requestStatePath = ["metadata", "databases"];
+        const existingStatePath = requestStatePath;
+        const getData = async () => {
+            const databases = await MetabaseApi.db_real_list_with_tables();
+            return normalize(databases, [DatabaseSchema]);
+        };
+
+        return await fetchData({
+            dispatch,
+            getState,
+            requestStatePath,
+            existingStatePath,
+            getData,
+            reload
+        });
+    };
+});
+
 export const FETCH_DATABASE_METADATA = "metabase/metadata/FETCH_DATABASE_METADATA";
 export const fetchDatabaseMetadata = createThunkAction(FETCH_DATABASE_METADATA, function(dbId, reload = false) {
     return async function(dispatch, getState) {
@@ -275,10 +296,33 @@ export const fetchFieldValues = createThunkAction(FETCH_FIELD_VALUES, function(f
     };
 });
 
+// Docstring from m.api.field:
+// Update the human-readable values for a `Field` whose special type is
+// `category`/`city`/`state`/`country` or whose base type is `type/Boolean`."
+export const UPDATE_FIELD_VALUES = "metabase/metadata/UPDATE_FIELD_VALUES";
+export const updateFieldValues = createThunkAction(UPDATE_FIELD_VALUES, function(fieldId, fieldValuePairs) {
+    return async function(dispatch, getState) {
+        const requestStatePath = ["metadata", "fields", fieldId, "dimension"];
+        const existingStatePath = ["metadata", "fields", fieldId];
+
+        const putData = async () => {
+            return await MetabaseApi.field_values_update({ fieldId, values: fieldValuePairs })
+        };
+
+        return await updateData({
+            dispatch,
+            getState,
+            requestStatePath,
+            existingStatePath,
+            putData
+        });
+    };
+});
+
 export const ADD_PARAM_VALUES = "metabase/metadata/ADD_PARAM_VALUES";
 export const addParamValues = createAction(ADD_PARAM_VALUES);
 
-const UPDATE_FIELD = "metabase/metadata/UPDATE_FIELD";
+export const UPDATE_FIELD = "metabase/metadata/UPDATE_FIELD";
 export const updateField = createThunkAction(UPDATE_FIELD, function(field) {
     return async function(dispatch, getState) {
         const requestStatePath = ["metadata", "fields", field.id];
@@ -302,6 +346,46 @@ export const updateField = createThunkAction(UPDATE_FIELD, function(field) {
     };
 });
 
+export const DELETE_FIELD_DIMENSION = "metabase/metadata/DELETE_FIELD_DIMENSION";
+export const deleteFieldDimension = createThunkAction(DELETE_FIELD_DIMENSION, function(fieldId) {
+    return async function(dispatch, getState) {
+        const requestStatePath = ["metadata", "fields", fieldId, "dimension"];
+        const existingStatePath = ["metadata", "fields", fieldId];
+
+        const putData = async () => {
+            return await MetabaseApi.field_dimension_delete({ fieldId });
+        };
+
+        return await updateData({
+            dispatch,
+            getState,
+            requestStatePath,
+            existingStatePath,
+            putData
+        });
+    };
+});
+
+export const UPDATE_FIELD_DIMENSION = "metabase/metadata/UPDATE_FIELD_DIMENSION";
+export const updateFieldDimension = createThunkAction(UPDATE_FIELD_DIMENSION, function(fieldId, dimension) {
+    return async function(dispatch, getState) {
+        const requestStatePath = ["metadata", "fields", fieldId, "dimension"];
+        const existingStatePath = ["metadata", "fields", fieldId];
+
+        const putData = async () => {
+            return await MetabaseApi.field_dimension_update({ fieldId, ...dimension });
+        };
+
+        return await updateData({
+            dispatch,
+            getState,
+            requestStatePath,
+            existingStatePath,
+            putData
+        });
+    };
+});
+
 export const FETCH_REVISIONS = "metabase/metadata/FETCH_REVISIONS";
 export const fetchRevisions = createThunkAction(FETCH_REVISIONS, (type, id, reload = false) => {
     return async (dispatch, getState) => {
diff --git a/frontend/src/metabase/reference/components/Detail.jsx b/frontend/src/metabase/reference/components/Detail.jsx
index 210d4f11167d328deb124402eb486cc10a46dff5..b59fc544554bd23cbde2f37e5894f566c848cf25 100644
--- a/frontend/src/metabase/reference/components/Detail.jsx
+++ b/frontend/src/metabase/reference/components/Detail.jsx
@@ -23,7 +23,7 @@ const Detail = ({ name, description, placeholder, subtitleClass, url, icon, isEd
                         placeholder={placeholder}
                         {...field}
                         //FIXME: use initialValues from redux forms instead of default value
-                        // to allow for reinitializing on cancel (see ReferenceGettingStartedGuide.jsx)
+                        // to allow for reinitializing on cancel (see GettingStartedGuide.jsx)
                         defaultValue={description}
                     /> :
                     <span className={subtitleClass}>{description || placeholder || 'No description yet'}</span>
diff --git a/frontend/src/metabase/reference/databases/FieldDetail.jsx b/frontend/src/metabase/reference/databases/FieldDetail.jsx
index 940193202323b717a3b2d52f9fa541953eb92968..4935084040b338422efaae53f2a2416431bf9753 100644
--- a/frontend/src/metabase/reference/databases/FieldDetail.jsx
+++ b/frontend/src/metabase/reference/databases/FieldDetail.jsx
@@ -36,7 +36,7 @@ import * as metadataActions from 'metabase/redux/metadata';
 import * as actions from 'metabase/reference/reference';
 
 
-const interestingQuestions = (database, table, field) => {
+const interestingQuestions = (database, table, field, metadata) => {
     return [
         {
             text: `Number of ${table.display_name} grouped by ${field.display_name}`,
@@ -46,7 +46,8 @@ const interestingQuestions = (database, table, field) => {
                 tableId: table.id,
                 fieldId: field.id,
                 getCount: true,
-                visualization: 'bar'
+                visualization: 'bar',
+                metadata
             })
         },
         {
@@ -57,7 +58,8 @@ const interestingQuestions = (database, table, field) => {
                 tableId: table.id,
                 fieldId: field.id,
                 getCount: true,
-                visualization: 'pie'
+                visualization: 'pie',
+                metadata
             })
         },
         {
@@ -66,7 +68,8 @@ const interestingQuestions = (database, table, field) => {
             link: getQuestionUrl({
                 dbId: database.id,
                 tableId: table.id,
-                fieldId: field.id
+                fieldId: field.id,
+                metadata
             })
         }
     ]
@@ -128,6 +131,7 @@ export default class FieldDetail extends Component {
         loading: PropTypes.bool,
         loadingError: PropTypes.object,
         submitting: PropTypes.bool,
+        metadata: PropTypes.object
     };
 
     render() {
@@ -146,6 +150,7 @@ export default class FieldDetail extends Component {
             handleSubmit,
             resetForm,
             submitting,
+            metadata
         } = this.props;
 
         const onSubmit = handleSubmit(async (fields) =>
@@ -226,7 +231,7 @@ export default class FieldDetail extends Component {
                             </li>
 
 
-                            { !isEditing && 
+                            { !isEditing &&
                                 <li className="relative">
                                     <Detail
                                         id="base_type"
@@ -246,7 +251,16 @@ export default class FieldDetail extends Component {
                                 </li>
                             { !isEditing &&
                                 <li className="relative">
-                                    <UsefulQuestions questions={interestingQuestions(this.props.database, this.props.table, this.props.field)} />
+                                    <UsefulQuestions
+                                        questions={
+                                            interestingQuestions(
+                                                this.props.database,
+                                                this.props.table,
+                                                this.props.field,
+                                                metadata
+                                            )
+                                        }
+                                    />
                                 </li>
                             }
 
diff --git a/frontend/src/metabase/reference/databases/FieldDetailContainer.jsx b/frontend/src/metabase/reference/databases/FieldDetailContainer.jsx
index 37d5fabcb9802dbb305f04db060b2c0bbf3ab576..cba6872401902ebcdbcded640cfee49b4fbd2799 100644
--- a/frontend/src/metabase/reference/databases/FieldDetailContainer.jsx
+++ b/frontend/src/metabase/reference/databases/FieldDetailContainer.jsx
@@ -9,6 +9,7 @@ import FieldDetail from "metabase/reference/databases/FieldDetail.jsx"
 
 import * as metadataActions from 'metabase/redux/metadata';
 import * as actions from 'metabase/reference/reference';
+import { getMetadata } from "metabase/selectors/metadata";
 
 import {
     getDatabase,
@@ -23,7 +24,8 @@ const mapStateToProps = (state, props) => ({
     table: getTable(state, props),    
     field: getField(state, props),    
     databaseId: getDatabaseId(state, props),
-    isEditing: getIsEditing(state, props)
+    isEditing: getIsEditing(state, props),
+    metadata: getMetadata(state, props)
 });
 
 const mapDispatchToProps = {
@@ -40,7 +42,8 @@ export default class FieldDetailContainer extends Component {
         databaseId: PropTypes.number.isRequired,
         table: PropTypes.object.isRequired,
         field: PropTypes.object.isRequired,
-        isEditing: PropTypes.bool
+        isEditing: PropTypes.bool,
+        metadata: PropTypes.object
     };
 
     async fetchContainerData(){
diff --git a/frontend/src/metabase/reference/databases/FieldSidebar.jsx b/frontend/src/metabase/reference/databases/FieldSidebar.jsx
index 171ee257214ed2c526c9209ad9cf78fc9759ed7e..b0ca33652f32e583c233e2ba547d4c791099a011 100644
--- a/frontend/src/metabase/reference/databases/FieldSidebar.jsx
+++ b/frontend/src/metabase/reference/databases/FieldSidebar.jsx
@@ -32,6 +32,10 @@ const FieldSidebar =({
                              href={`/reference/databases/${database.id}/tables/${table.id}/fields/${field.id}`} 
                              icon="document" 
                              name="Details" />
+                <SidebarItem key={`/xray/field/${field.id}/approximate`}
+                             href={`/xray/field/${field.id}/approximate`}
+                             icon="document"
+                             name="X-Ray this Field" />
         </ul>
     </div>
 
diff --git a/frontend/src/metabase/reference/databases/TableSidebar.jsx b/frontend/src/metabase/reference/databases/TableSidebar.jsx
index c221a3d3ed06c4b1b282764e05ffbd60b09c9650..2b294109cbe5d1a0c028c80f696dcf0157ff5d9b 100644
--- a/frontend/src/metabase/reference/databases/TableSidebar.jsx
+++ b/frontend/src/metabase/reference/databases/TableSidebar.jsx
@@ -39,6 +39,10 @@ const TableSidebar = ({
                          href={`/reference/databases/${database.id}/tables/${table.id}/questions`} 
                          icon="all" 
                          name="Questions about this table" />
+            <SidebarItem key={`/xray/table/${table.id}/approximate`}
+                         href={`/xray/table/${table.id}/approximate`}
+                         icon="all"
+                         name="X-Ray this table" />
         </ol>
     </div>
 
diff --git a/frontend/src/metabase/reference/guide/GettingStartedGuide.jsx b/frontend/src/metabase/reference/guide/GettingStartedGuide.jsx
new file mode 100644
index 0000000000000000000000000000000000000000..6b207af002cde2f0ccf0a70c3a942beea7aa985e
--- /dev/null
+++ b/frontend/src/metabase/reference/guide/GettingStartedGuide.jsx
@@ -0,0 +1,303 @@
+/* eslint "react/prop-types": "warn" */
+import React, { Component } from "react";
+import PropTypes from "prop-types";
+import { Link } from "react-router";
+import { connect } from 'react-redux';
+
+import cx from "classnames";
+
+import LoadingAndErrorWrapper from "metabase/components/LoadingAndErrorWrapper.jsx";
+
+import GuideHeader from "metabase/reference/components/GuideHeader.jsx";
+import GuideDetail from "metabase/reference/components/GuideDetail.jsx";
+
+import * as metadataActions from 'metabase/redux/metadata';
+import * as actions from 'metabase/reference/reference';
+import { clearRequestState } from "metabase/redux/requests";
+import { createDashboard, updateDashboard } from 'metabase/dashboards/dashboards';
+
+import { updateSetting } from 'metabase/admin/settings/settings';
+
+import {
+    getGuide,
+    getUser,
+    getDashboards,
+    getLoading,
+    getError,
+    getIsEditing,
+    getTables,
+    getFields,
+    getMetrics,
+    getSegments,
+} from '../selectors';
+
+import {
+    getQuestionUrl,
+    has
+} from '../utils';
+
+const isGuideEmpty = ({
+    things_to_know,
+    contact,
+    most_important_dashboard,
+    important_metrics,
+    important_segments,
+    important_tables
+} = {}) => things_to_know ? false :
+    contact && contact.name ? false :
+    contact && contact.email ? false :
+    most_important_dashboard ? false :
+    important_metrics && important_metrics.length !== 0 ? false :
+    important_segments && important_segments.length !== 0 ? false :
+    important_tables && important_tables.length !== 0 ? false :
+    true;
+
+// This function generates a link for each important field of a Metric.
+// The link goes to a question comprised of this Metric broken out by 
+// That important field.
+const exploreLinksForMetric = (metricId, guide, metadataFields, tables) => {
+    if (guide.metric_important_fields[metricId]) { 
+        return guide.metric_important_fields[metricId]
+                .map(fieldId => metadataFields[fieldId])
+                .map(field => ({
+                    name: field.display_name || field.name,
+                    url: getQuestionUrl({
+                        dbId: tables[field.table_id] && tables[field.table_id].db_id,
+                        tableId: field.table_id,
+                        fieldId: field.id,
+                        metricId
+                    })
+                }))
+    }
+}
+
+const mapStateToProps = (state, props) => ({
+    guide: getGuide(state, props),
+    user: getUser(state, props),
+    dashboards: getDashboards(state, props),
+    metrics: getMetrics(state, props),
+    segments: getSegments(state, props),
+    tables: getTables(state, props),
+    // FIXME: avoids naming conflict, tried using the propNamespace option
+    // version but couldn't quite get it to work together with passing in
+    // dynamic initialValues
+    metadataFields: getFields(state, props),
+    loading: getLoading(state, props),
+    // naming this 'error' will conflict with redux form
+    loadingError: getError(state, props),
+    isEditing: getIsEditing(state, props),
+});
+
+const mapDispatchToProps = {
+    updateDashboard,
+    createDashboard,
+    updateSetting,
+    clearRequestState,
+    ...metadataActions,
+    ...actions
+};
+
+@connect(mapStateToProps, mapDispatchToProps)
+export default class GettingStartedGuide extends Component {
+    static propTypes = {
+        fields: PropTypes.object,
+        style: PropTypes.object,
+        guide: PropTypes.object,
+        user: PropTypes.object,
+        dashboards: PropTypes.object,
+        metrics: PropTypes.object,
+        segments: PropTypes.object,
+        tables: PropTypes.object,
+        metadataFields: PropTypes.object,
+        loadingError: PropTypes.any,
+        loading: PropTypes.bool,
+        startEditing: PropTypes.func,
+    };
+
+    render() {
+        const {
+            style,
+            guide,
+            user,
+            dashboards,
+            metrics,
+            segments,
+            tables,
+            metadataFields,
+            loadingError,
+            loading,
+            startEditing,
+        } = this.props;
+
+        return (
+            <div className="full relative py4" style={style}>
+                <LoadingAndErrorWrapper className="full" style={style} loading={!loadingError && loading} error={loadingError}>
+                { () => 
+                    <div>
+                        <GuideHeader
+                            startEditing={startEditing}
+                            isSuperuser={user && user.is_superuser}
+                        />
+
+                        <div className="wrapper wrapper--trim">
+                            { (!guide || isGuideEmpty(guide)) && user && user.is_superuser && (
+                                <AdminInstructions>
+                                    <h2 className="py2">Help your team get started with your data.</h2>
+                                    <GuideText>
+                                        Show your team what’s most important by choosing your top dashboard, metrics, and segments.
+                                    </GuideText>
+                                    <button
+                                        className="Button Button--primary"
+                                        onClick={startEditing}
+                                    >
+                                        Get started
+                                    </button>
+                                </AdminInstructions>
+                            )}
+
+                            { guide.most_important_dashboard !== null && [
+                                <div className="my2">
+                                    <SectionHeader key={'dashboardTitle'}>
+                                        Our most important dashboard
+                                    </SectionHeader>
+                                    <GuideDetail
+                                        key={'dashboardDetail'}
+                                        type="dashboard"
+                                        entity={dashboards[guide.most_important_dashboard]}
+                                        tables={tables}
+                                    />
+                                </div>
+                            ]}
+                            { Object.keys(metrics).length > 0  && (
+                                    <div className="my4 pt4">
+                                        <SectionHeader trim={guide.important_metrics.length === 0}>
+                                            { guide.important_metrics && guide.important_metrics.length > 0 ? 'Numbers that we pay attention to' : 'Metrics' }
+                                        </SectionHeader>
+                                        { (guide.important_metrics && guide.important_metrics.length > 0) ? [
+                                            <div className="my2">
+                                                { guide.important_metrics.map((metricId) =>
+                                                    <GuideDetail
+                                                        key={metricId}
+                                                        type="metric"
+                                                        entity={metrics[metricId]}
+                                                        tables={tables}
+                                                        exploreLinks={exploreLinksForMetric(metricId, guide, metadataFields, tables)}
+                                                    />
+                                                )}
+                                            </div>
+                                        ] :
+                                            <GuideText>
+                                                Metrics are important numbers your company cares about. They often represent a core indicator of how the business is performing.
+                                            </GuideText>
+                                        }
+                                        <div>
+                                            <Link className="Button Button--primary" to={'/reference/metrics'}>
+                                                See all metrics
+                                            </Link>
+                                        </div>
+                                    </div>
+                                )
+                            }
+
+                            <div className="mt4 pt4">
+                                <SectionHeader trim={(!has(guide.important_segments) && !has(guide.important_tables))}>
+                                    { Object.keys(segments).length > 0 ? 'Segments and tables' : 'Tables' }
+                                </SectionHeader>
+                                { has(guide.important_segments) || has(guide.important_tables) ?
+                                    <div className="my2">
+                                        { guide.important_segments.map((segmentId) =>
+                                            <GuideDetail
+                                                key={segmentId}
+                                                type="segment"
+                                                entity={segments[segmentId]}
+                                                tables={tables}
+                                            />
+                                        )}
+                                        { guide.important_tables.map((tableId) =>
+                                            <GuideDetail
+                                                key={tableId}
+                                                type="table"
+                                                entity={tables[tableId]}
+                                                tables={tables}
+                                            />
+                                        )}
+                                    </div>
+                                :
+                                    <GuideText>
+                                        { Object.keys(segments).length > 0 ? (
+                                            <span>
+                                                Segments and tables are the building blocks of your company's data. Tables are collections of the raw information while segments are specific slices with specific meanings, like <b>"Recent orders."</b>
+                                            </span>
+                                        ) : "Tables are the building blocks of your company's data."
+                                        }
+                                    </GuideText>
+                                }
+                                <div>
+                                    { Object.keys(segments).length > 0 && (
+                                        <Link className="Button Button--purple mr2" to={'/reference/segments'}>
+                                            See all segments
+                                        </Link>
+                                    )}
+                                    <Link
+                                        className={cx(
+                                            { 'text-purple text-bold no-decoration text-underline-hover' : Object.keys(segments).length > 0 },
+                                            { 'Button Button--purple' : Object.keys(segments).length === 0 }
+                                        )}
+                                        to={'/reference/databases'}
+                                    >
+                                        See all tables
+                                    </Link>
+                                </div>
+                            </div>
+
+                            <div className="mt4 pt4">
+                                <SectionHeader trim={!guide.things_to_know}>
+                                    { guide.things_to_know ? 'Other things to know about our data' : 'Find out more' }
+                                </SectionHeader>
+                                <GuideText>
+                                    { guide.things_to_know ? guide.things_to_know : "A good way to get to know your data is by spending a bit of time exploring the different tables and other info available to you. It may take a while, but you'll start to recognize names and meanings over time."
+                                    }
+                                </GuideText>
+                                <Link className="Button link text-bold" to={'/reference/databases'}>
+                                    Explore our data
+                                </Link>
+                            </div>
+
+                            <div className="mt4">
+                                { guide.contact && (guide.contact.name || guide.contact.email) && [
+                                    <SectionHeader key={'contactTitle'}>
+                                        Have questions?
+                                    </SectionHeader>,
+                                    <div className="mb4 pb4" key={'contactDetails'}>
+                                            { guide.contact.name &&
+                                                <span className="text-dark mr3">
+                                                    {`Contact ${guide.contact.name}`}
+                                                </span>
+                                            }
+                                            { guide.contact.email &&
+                                                <a className="text-brand text-bold no-decoration" href={`mailto:${guide.contact.email}`}>
+                                                    {guide.contact.email}
+                                                </a>
+                                            }
+                                    </div>
+                                ]}
+                            </div>
+                        </div>
+                    </div>
+                }
+                </LoadingAndErrorWrapper>
+            </div>
+        );
+    }
+}
+
+const GuideText = ({ children }) => // eslint-disable-line react/prop-types
+    <p className="text-paragraph text-measure">{children}</p>
+
+const AdminInstructions = ({ children }) => // eslint-disable-line react/prop-types
+    <div className="bordered border-brand rounded p3 text-brand text-measure text-centered bg-light-blue">
+        {children}
+    </div>
+
+const SectionHeader = ({ trim, children }) => // eslint-disable-line react/prop-types
+    <h2 className={cx('text-dark text-measure', {  "mb0" : trim }, { "mb4" : !trim })}>{children}</h2>
diff --git a/frontend/src/metabase/reference/guide/GettingStartedGuideContainer.jsx b/frontend/src/metabase/reference/guide/GettingStartedGuideContainer.jsx
index 54f40a982b0296c7426ad83f4cd3c0ce5cd31ca1..bfad648263e9b713a96c0c6a651e40cbff9f33c4 100644
--- a/frontend/src/metabase/reference/guide/GettingStartedGuideContainer.jsx
+++ b/frontend/src/metabase/reference/guide/GettingStartedGuideContainer.jsx
@@ -3,7 +3,8 @@ import React, { Component } from 'react';
 import PropTypes from "prop-types";
 import { connect } from 'react-redux';
 
-import ReferenceGettingStartedGuide from "metabase/reference/guide/ReferenceGettingStartedGuide.jsx"
+import GettingStartedGuide from "metabase/reference/guide/GettingStartedGuide.jsx"
+import GettingStartedGuideEditForm from "metabase/reference/guide/GettingStartedGuideEditForm.jsx"
 
 import * as metadataActions from 'metabase/redux/metadata';
 import * as actions from 'metabase/reference/reference';
@@ -55,9 +56,14 @@ export default class GettingStartedGuideContainer extends Component {
     }
 
     render() {
-
         return (
-                <ReferenceGettingStartedGuide {...this.props} />
+            <div>
+                
+            { this.props.isEditing ? 
+                <GettingStartedGuideEditForm {...this.props} /> :
+                <GettingStartedGuide {...this.props} />
+            }            
+            </div>
         );
     }
 }
diff --git a/frontend/src/metabase/reference/guide/ReferenceGettingStartedGuide.jsx b/frontend/src/metabase/reference/guide/GettingStartedGuideEditForm.jsx
similarity index 63%
rename from frontend/src/metabase/reference/guide/ReferenceGettingStartedGuide.jsx
rename to frontend/src/metabase/reference/guide/GettingStartedGuideEditForm.jsx
index e572ebb59b3935ee9ebec44a617e0c316a9b5c89..07a6570c0cb86738d07f889adccffa91c03d9347 100644
--- a/frontend/src/metabase/reference/guide/ReferenceGettingStartedGuide.jsx
+++ b/frontend/src/metabase/reference/guide/GettingStartedGuideEditForm.jsx
@@ -1,7 +1,6 @@
 /* eslint "react/prop-types": "warn" */
 import React, { Component } from "react";
 import PropTypes from "prop-types";
-import { Link } from "react-router";
 import { connect } from 'react-redux';
 import { reduxForm } from "redux-form";
 
@@ -12,9 +11,7 @@ import CreateDashboardModal from 'metabase/components/CreateDashboardModal.jsx';
 import Modal from 'metabase/components/Modal.jsx';
 
 import EditHeader from "metabase/reference/components/EditHeader.jsx";
-import GuideHeader from "metabase/reference/components/GuideHeader.jsx";
 import GuideEditSection from "metabase/reference/components/GuideEditSection.jsx";
-import GuideDetail from "metabase/reference/components/GuideDetail.jsx";
 import GuideDetailEditor from "metabase/reference/components/GuideDetailEditor.jsx";
 
 import * as metadataActions from 'metabase/redux/metadata';
@@ -30,7 +27,6 @@ import S from "../components/GuideDetailEditor.css";
 
 import {
     getGuide,
-    getUser,
     getDashboards,
     getLoading,
     getError,
@@ -43,26 +39,6 @@ import {
     getSegments,
 } from '../selectors';
 
-import {
-    getQuestionUrl,
-    has
-} from '../utils';
-
-const isGuideEmpty = ({
-    things_to_know,
-    contact,
-    most_important_dashboard,
-    important_metrics,
-    important_segments,
-    important_tables
-} = {}) => things_to_know ? false :
-    contact && contact.name ? false :
-    contact && contact.email ? false :
-    most_important_dashboard ? false :
-    important_metrics && important_metrics.length !== 0 ? false :
-    important_segments && important_segments.length !== 0 ? false :
-    important_tables && important_tables.length !== 0 ? false :
-    true;
 
 const mapStateToProps = (state, props) => {
     const guide = getGuide(state, props);
@@ -102,7 +78,6 @@ const mapStateToProps = (state, props) => {
 
     return {
         guide,
-        user: getUser(state, props),
         dashboards,
         metrics,
         segments,
@@ -153,12 +128,11 @@ const mapDispatchToProps = {
         'important_segments_and_tables[].points_of_interest'
     ]
 })
-export default class ReferenceGettingStartedGuide extends Component {
+export default class GettingStartedGuideEditForm extends Component {
     static propTypes = {
         fields: PropTypes.object,
         style: PropTypes.object,
         guide: PropTypes.object,
-        user: PropTypes.object,
         dashboards: PropTypes.object,
         metrics: PropTypes.object,
         segments: PropTypes.object,
@@ -168,7 +142,6 @@ export default class ReferenceGettingStartedGuide extends Component {
         loadingError: PropTypes.any,
         loading: PropTypes.bool,
         isEditing: PropTypes.bool,
-        startEditing: PropTypes.func,
         endEditing: PropTypes.func,
         handleSubmit: PropTypes.func,
         submitting: PropTypes.bool,
@@ -191,7 +164,6 @@ export default class ReferenceGettingStartedGuide extends Component {
             },
             style,
             guide,
-            user,
             dashboards,
             metrics,
             segments,
@@ -201,7 +173,6 @@ export default class ReferenceGettingStartedGuide extends Component {
             loadingError,
             loading,
             isEditing,
-            startEditing,
             endEditing,
             handleSubmit,
             submitting,
@@ -252,7 +223,7 @@ export default class ReferenceGettingStartedGuide extends Component {
                     />
                 }
                 <LoadingAndErrorWrapper className="full" style={style} loading={!loadingError && loading} error={loadingError}>
-                { () => isEditing ?
+                { () => 
                     <div className="wrapper wrapper--trim">
                         <div className="mt4 py2">
                             <h1 className="my3 text-dark">
@@ -455,170 +426,7 @@ export default class ReferenceGettingStartedGuide extends Component {
                                 </div>
                             </div>
                         </GuideEditSection>
-                    </div> :
-                    <div>
-                        <GuideHeader
-                            startEditing={startEditing}
-                            isSuperuser={user && user.is_superuser}
-                        />
-
-                        <div className="wrapper wrapper--trim">
-                            { (!guide || isGuideEmpty(guide)) && user && user.is_superuser && (
-                                <AdminInstructions>
-                                    <h2 className="py2">Help your team get started with your data.</h2>
-                                    <GuideText>
-                                        Show your team what’s most important by choosing your top dashboard, metrics, and segments.
-                                    </GuideText>
-                                    <button
-                                        className="Button Button--primary"
-                                        onClick={startEditing}
-                                    >
-                                        Get started
-                                    </button>
-                                </AdminInstructions>
-                            )}
-
-                            { guide.most_important_dashboard !== null && [
-                                <div className="my2">
-                                    <SectionHeader key={'dashboardTitle'}>
-                                        Our most important dashboard
-                                    </SectionHeader>
-                                    <GuideDetail
-                                        key={'dashboardDetail'}
-                                        type="dashboard"
-                                        entity={dashboards[guide.most_important_dashboard]}
-                                        tables={tables}
-                                    />
-                                </div>
-                            ]}
-                            { Object.keys(metrics).length > 0  && (
-                                    <div className="my4 pt4">
-                                        <SectionHeader trim={guide.important_metrics.length === 0}>
-                                            { guide.important_metrics && guide.important_metrics.length > 0 ? 'Numbers that we pay attention to' : 'Metrics' }
-                                        </SectionHeader>
-                                        { (guide.important_metrics && guide.important_metrics.length > 0) ? [
-                                            <div className="my2">
-                                                { guide.important_metrics.map((metricId) =>
-                                                    <GuideDetail
-                                                        key={metricId}
-                                                        type="metric"
-                                                        entity={metrics[metricId]}
-                                                        tables={tables}
-                                                        exploreLinks={guide.metric_important_fields[metricId] &&
-                                                            guide.metric_important_fields[metricId]
-                                                                .map(fieldId => metadataFields[fieldId])
-                                                                .map(field => ({
-                                                                    name: field.display_name || field.name,
-                                                                    url: getQuestionUrl({
-                                                                        dbId: tables[field.table_id] && tables[field.table_id].db_id,
-                                                                        tableId: field.table_id,
-                                                                        fieldId: field.id,
-                                                                        metricId
-                                                                    })
-                                                                }))
-                                                        }
-                                                    />
-                                                )}
-                                            </div>
-                                        ] :
-                                            <GuideText>
-                                                Metrics are important numbers your company cares about. They often represent a core indicator of how the business is performing.
-                                            </GuideText>
-                                        }
-                                        <div>
-                                            <Link className="Button Button--primary" to={'/reference/metrics'}>
-                                                See all metrics
-                                            </Link>
-                                        </div>
-                                    </div>
-                                )
-                            }
-
-                            <div className="mt4 pt4">
-                                <SectionHeader trim={(!has(guide.important_segments) && !has(guide.important_tables))}>
-                                    { Object.keys(segments).length > 0 ? 'Segments and tables' : 'Tables' }
-                                </SectionHeader>
-                                { has(guide.important_segments) || has(guide.important_tables) ?
-                                    <div className="my2">
-                                        { guide.important_segments.map((segmentId) =>
-                                            <GuideDetail
-                                                key={segmentId}
-                                                type="segment"
-                                                entity={segments[segmentId]}
-                                                tables={tables}
-                                            />
-                                        )}
-                                        { guide.important_tables.map((tableId) =>
-                                            <GuideDetail
-                                                key={tableId}
-                                                type="table"
-                                                entity={tables[tableId]}
-                                                tables={tables}
-                                            />
-                                        )}
-                                    </div>
-                                :
-                                    <GuideText>
-                                        { Object.keys(segments).length > 0 ? (
-                                            <span>
-                                                Segments and tables are the building blocks of your company's data. Tables are collections of the raw information while segments are specific slices with specific meanings, like <b>"Recent orders."</b>
-                                            </span>
-                                        ) : "Tables are the building blocks of your company's data."
-                                        }
-                                    </GuideText>
-                                }
-                                <div>
-                                    { Object.keys(segments).length > 0 && (
-                                        <Link className="Button Button--purple mr2" to={'/reference/segments'}>
-                                            See all segments
-                                        </Link>
-                                    )}
-                                    <Link
-                                        className={cx(
-                                            { 'text-purple text-bold no-decoration text-underline-hover' : Object.keys(segments).length > 0 },
-                                            { 'Button Button--purple' : Object.keys(segments).length === 0 }
-                                        )}
-                                        to={'/reference/databases'}
-                                    >
-                                        See all tables
-                                    </Link>
-                                </div>
-                            </div>
-
-                            <div className="mt4 pt4">
-                                <SectionHeader trim={!guide.things_to_know}>
-                                    { guide.things_to_know ? 'Other things to know about our data' : 'Find out more' }
-                                </SectionHeader>
-                                <GuideText>
-                                    { guide.things_to_know ? guide.things_to_know : "A good way to get to know your data is by spending a bit of time exploring the different tables and other info available to you. It may take a while, but you'll start to recognize names and meanings over time."
-                                    }
-                                </GuideText>
-                                <Link className="Button link text-bold" to={'/reference/databases'}>
-                                    Explore our data
-                                </Link>
-                            </div>
-
-                            <div className="mt4">
-                                { guide.contact && (guide.contact.name || guide.contact.email) && [
-                                    <SectionHeader key={'contactTitle'}>
-                                        Have questions?
-                                    </SectionHeader>,
-                                    <div className="mb4 pb4" key={'contactDetails'}>
-                                            { guide.contact.name &&
-                                                <span className="text-dark mr3">
-                                                    {`Contact ${guide.contact.name}`}
-                                                </span>
-                                            }
-                                            { guide.contact.email &&
-                                                <a className="text-brand text-bold no-decoration" href={`mailto:${guide.contact.email}`}>
-                                                    {guide.contact.email}
-                                                </a>
-                                            }
-                                    </div>
-                                ]}
-                            </div>
-                        </div>
-                    </div>
+                    </div> 
                 }
                 </LoadingAndErrorWrapper>
             </form>
@@ -626,13 +434,5 @@ export default class ReferenceGettingStartedGuide extends Component {
     }
 }
 
-const GuideText = ({ children }) => // eslint-disable-line react/prop-types
-    <p className="text-paragraph text-measure">{children}</p>
-
-const AdminInstructions = ({ children }) => // eslint-disable-line react/prop-types
-    <div className="bordered border-brand rounded p3 text-brand text-measure text-centered bg-light-blue">
-        {children}
-    </div>
-
 const SectionHeader = ({ trim, children }) => // eslint-disable-line react/prop-types
     <h2 className={cx('text-dark text-measure', {  "mb0" : trim }, { "mb4" : !trim })}>{children}</h2>
diff --git a/frontend/src/metabase/reference/metrics/MetricDetail.jsx b/frontend/src/metabase/reference/metrics/MetricDetail.jsx
index e72f971cf43086e2e72a5682a91dac6756b7c4fd..981fef4ed3348e3b40a04834d6b86ab402faba13 100644
--- a/frontend/src/metabase/reference/metrics/MetricDetail.jsx
+++ b/frontend/src/metabase/reference/metrics/MetricDetail.jsx
@@ -7,7 +7,6 @@ import { push } from "react-router-redux";
 
 import List from "metabase/components/List.jsx";
 import LoadingAndErrorWrapper from "metabase/components/LoadingAndErrorWrapper.jsx";
-
 import EditHeader from "metabase/reference/components/EditHeader.jsx";
 import EditableReferenceHeader from "metabase/reference/components/EditableReferenceHeader.jsx";
 import Detail from "metabase/reference/components/Detail.jsx";
diff --git a/frontend/src/metabase/reference/reference.js b/frontend/src/metabase/reference/reference.js
index 8c44378cb7ba8692a0b77ab746c3a9d451ed5cf3..ff94c989aa9227c8211c573a8420b3427d16c7a2 100644
--- a/frontend/src/metabase/reference/reference.js
+++ b/frontend/src/metabase/reference/reference.js
@@ -10,11 +10,11 @@ import {
 
 import MetabaseAnalytics from 'metabase/lib/analytics';
 
-import { GettingStartedApi } from "metabase/services";
+import { GettingStartedApi, XRayApi } from 'metabase/services';
 
-import { 
-    filterUntouchedFields, 
-    isEmptyObject 
+import {
+    filterUntouchedFields,
+    isEmptyObject
 } from "./utils.js"
 
 export const FETCH_GUIDE = "metabase/reference/FETCH_GUIDE";
@@ -74,6 +74,117 @@ export const showDashboardModal = createAction(SHOW_DASHBOARD_MODAL);
 
 export const hideDashboardModal = createAction(HIDE_DASHBOARD_MODAL);
 
+// Xray Fetch Actions
+// These actions are used to fetch Xray fingerprints and comparisons. Most take  a cost which
+// is used by the backend to figure out how precise to be when generating the xray stats.
+
+const FETCH_FIELD_FINGERPRINT = 'metabase/reference/FETCH_FIELD_FINGERPRINT';
+export const fetchFieldFingerPrint = createThunkAction(FETCH_FIELD_FINGERPRINT, function(fieldId, cost) {
+    return async () => {
+        try {
+            let fingerprint = await XRayApi.field_fingerprint({ fieldId, ...cost.method });
+            return fingerprint;
+        } catch (error) {
+            console.error(error);
+        }
+    };
+});
+
+const FETCH_TABLE_FINGERPRINT = 'metabase/reference/FETCH_TABLE_FINGERPRINT';
+export const fetchTableFingerPrint = createThunkAction(FETCH_TABLE_FINGERPRINT, function(tableId, cost) {
+    return async () => {
+        try {
+            let fingerprint = await XRayApi.table_fingerprint({ tableId, ...cost.method });
+            return fingerprint;
+        } catch (error) {
+            console.error(error);
+        }
+    };
+});
+
+
+const FETCH_SEGMENT_FINGERPRINT = 'metabase/reference/FETCH_SEGMENT_FINGERPRINT';
+export const fetchSegmentFingerPrint = createThunkAction(FETCH_SEGMENT_FINGERPRINT, function(segmentId, cost) {
+    return async () => {
+        try {
+            let fingerprint = await XRayApi.segment_fingerprint({ segmentId, ...cost.method });
+            return fingerprint;
+        } catch (error) {
+            console.error(error);
+        }
+    };
+});
+
+const FETCH_CARD_FINGERPRINT = 'metabase/reference/FETCH_CARD_FINGERPRINT';
+export const fetchCardFingerPrint = createThunkAction(FETCH_CARD_FINGERPRINT, function(cardId) {
+    return async () => {
+        try {
+            let fingerprint = await XRayApi.card_fingerprint({ cardId });
+            return fingerprint;
+        } catch (error) {
+            console.error(error);
+        }
+    };
+});
+
+const FETCH_FIELD_COMPARISON = 'metabase/reference/FETCH_FIELD_COMPARISON';
+export const fetchFieldComparison = createThunkAction(FETCH_FIELD_COMPARISON, function(fieldId1, fieldId2) {
+    return async () => {
+        try {
+            let comparison = await XRayApi.field_compare({ fieldId1, fieldId2 })
+            return comparison
+        } catch (error) {
+            console.error(error)
+        }
+    }
+})
+const FETCH_TABLE_COMPARISON = 'metabase/reference/FETCH_TABLE_COMPARISON';
+export const fetchTableComparison = createThunkAction(FETCH_TABLE_COMPARISON, function(tableId1, tableId2) {
+    return async () => {
+        try {
+            let comparison = await XRayApi.table_compare({ tableId1, tableId2 })
+            return comparison
+        } catch (error) {
+            console.error(error)
+        }
+    }
+})
+
+const FETCH_SEGMENT_COMPARISON = 'metabase/reference/FETCH_SEGMENT_COMPARISON';
+export const fetchSegmentComparison = createThunkAction(FETCH_SEGMENT_COMPARISON, function(segmentId1, segmentId2) {
+    return async () => {
+        try {
+            let comparison = await XRayApi.segment_compare({ segmentId1, segmentId2 })
+            return comparison
+        } catch (error) {
+            console.error(error)
+        }
+    }
+})
+
+const FETCH_METRIC_COMPARISON = 'metabase/reference/FETCH_METRIC_COMPARISON';
+export const fetchMetricComparison = createThunkAction(FETCH_METRIC_COMPARISON, function(metricId1, metricId2) {
+    return async () => {
+        try {
+            let comparison = await XRayApi.metric_compare({ metricId1, metricId2 })
+            return comparison
+        } catch (error) {
+            console.error(error)
+        }
+    }
+})
+
+const FETCH_CARD_COMPARISON = 'metabase/reference/FETCH_CARD_COMPARISON';
+export const fetchCardComparison = createThunkAction(FETCH_CARD_COMPARISON, function(cardId1, cardId2) {
+    return async () => {
+        try {
+            let comparison = await XRayApi.card_compare({ cardId1, cardId2 })
+            return comparison
+        } catch (error) {
+            console.error(error)
+        }
+    }
+})
 
 // Helper functions. This is meant to be a transitional state to get things out of tryFetchData() and friends
 
@@ -96,8 +207,8 @@ const fetchDataWrapper = (props, fn) => {
 export const wrappedFetchGuide = async (props) => {
 
     fetchDataWrapper(
-        props, 
-        async () => { 
+        props,
+        async () => {
                 await Promise.all(
                     [props.fetchGuide(),
                      props.fetchDashboards(),
@@ -114,8 +225,8 @@ export const wrappedFetchDatabaseMetadata = (props, databaseID) => {
 export const wrappedFetchDatabaseMetadataAndQuestion = async (props, databaseID) => {
 
     fetchDataWrapper(
-        props, 
-        async (dbID) => { 
+        props,
+        async (dbID) => {
                 await Promise.all(
                     [props.fetchDatabaseMetadata(dbID),
                      props.fetchQuestions()]
@@ -125,11 +236,11 @@ export const wrappedFetchDatabaseMetadataAndQuestion = async (props, databaseID)
 export const wrappedFetchMetricDetail = async (props, metricID) => {
 
     fetchDataWrapper(
-        props, 
-        async (mID) => { 
+        props,
+        async (mID) => {
                 await Promise.all(
                     [props.fetchMetricTable(mID),
-                     props.fetchMetrics(), 
+                     props.fetchMetrics(),
                      props.fetchGuide()]
                 )}
         )(metricID)
@@ -137,11 +248,11 @@ export const wrappedFetchMetricDetail = async (props, metricID) => {
 export const wrappedFetchMetricQuestions = async (props, metricID) => {
 
     fetchDataWrapper(
-        props, 
-        async (mID) => { 
+        props,
+        async (mID) => {
                 await Promise.all(
                     [props.fetchMetricTable(mID),
-                     props.fetchMetrics(), 
+                     props.fetchMetrics(),
                      props.fetchQuestions()]
                 )}
         )(metricID)
@@ -149,8 +260,8 @@ export const wrappedFetchMetricQuestions = async (props, metricID) => {
 export const wrappedFetchMetricRevisions = async (props, metricID) => {
 
     fetchDataWrapper(
-        props, 
-        async (mID) => { 
+        props,
+        async (mID) => {
                 await Promise.all(
                     [props.fetchMetricRevisions(mID),
                      props.fetchMetrics()]
@@ -176,7 +287,7 @@ export const wrappedFetchMetricRevisions = async (props, metricID) => {
 // }
 
 export const wrappedFetchDatabases = (props) => {
-    fetchDataWrapper(props, props.fetchDatabases)({})
+    fetchDataWrapper(props, props.fetchRealDatabases)({})
 }
 export const wrappedFetchMetrics = (props) => {
     fetchDataWrapper(props, props.fetchMetrics)({})
@@ -194,8 +305,8 @@ export const wrappedFetchSegmentDetail = (props, segmentID) => {
 export const wrappedFetchSegmentQuestions = async (props, segmentID) => {
 
     fetchDataWrapper(
-        props, 
-        async (sID) => { 
+        props,
+        async (sID) => {
                 await props.fetchSegments(sID);
                 await Promise.all(
                     [props.fetchSegmentTable(sID),
@@ -206,8 +317,8 @@ export const wrappedFetchSegmentQuestions = async (props, segmentID) => {
 export const wrappedFetchSegmentRevisions = async (props, segmentID) => {
 
     fetchDataWrapper(
-        props, 
-        async (sID) => { 
+        props,
+        async (sID) => {
                 await props.fetchSegments(sID);
                 await Promise.all(
                     [props.fetchSegmentRevisions(sID),
@@ -218,8 +329,8 @@ export const wrappedFetchSegmentRevisions = async (props, segmentID) => {
 export const wrappedFetchSegmentFields = async (props, segmentID) => {
 
     fetchDataWrapper(
-        props, 
-        async (sID) => { 
+        props,
+        async (sID) => {
                 await props.fetchSegments(sID);
                 await Promise.all(
                     [props.fetchSegmentFields(sID),
@@ -229,7 +340,7 @@ export const wrappedFetchSegmentFields = async (props, segmentID) => {
 }
 
 // This is called when a component gets a new set of props.
-// I *think* this is un-necessary in all cases as we're using multiple 
+// I *think* this is un-necessary in all cases as we're using multiple
 // components where the old code re-used the same component
 export const clearState = props => {
     props.endEditing();
@@ -247,9 +358,9 @@ const resetForm = (props) => {
 }
 
 // Update actions
-// these use the "fetchDataWrapper" for now. It should probably be renamed. 
-// Using props to fire off actions, which imo should be refactored to 
-// dispatch directly, since there is no actual dependence with the props 
+// these use the "fetchDataWrapper" for now. It should probably be renamed.
+// Using props to fire off actions, which imo should be refactored to
+// dispatch directly, since there is no actual dependence with the props
 // of that component
 
 const updateDataWrapper = (props, fn) => {
@@ -542,6 +653,7 @@ export const tryUpdateGuide = async (formFields, props) => {
     endEditing();
 };
 
+
 const initialState = {
     error: null,
     isLoading: false,
@@ -553,6 +665,18 @@ export default handleActions({
     [FETCH_GUIDE]: {
         next: (state, { payload }) => assoc(state, 'guide', payload)
     },
+    [FETCH_FIELD_FINGERPRINT]: {
+        next: (state, { payload }) => assoc(state, 'fieldFingerprint', payload)
+    },
+    [FETCH_TABLE_FINGERPRINT]: {
+        next: (state, { payload }) => assoc(state, 'tableFingerprint', payload)
+    },
+    [FETCH_SEGMENT_FINGERPRINT]: {
+        next: (state, { payload }) => assoc(state, 'segmentFingerprint', payload)
+    },
+    [FETCH_FIELD_COMPARISON]: {
+        next: (state, { payload }) => assoc(state, 'fieldComparison', payload)
+    },
     [SET_ERROR]: {
         throw: (state, { payload }) => assoc(state, 'error', payload)
     },
diff --git a/frontend/src/metabase/reference/segments/SegmentList.jsx b/frontend/src/metabase/reference/segments/SegmentList.jsx
index 66ff03ffa4b3af1e8150fbce4bbaff9088716666..6ba991140df7fee579b662fc0440ab1858be8aa0 100644
--- a/frontend/src/metabase/reference/segments/SegmentList.jsx
+++ b/frontend/src/metabase/reference/segments/SegmentList.jsx
@@ -62,7 +62,7 @@ export default class SegmentList extends Component {
 
         return (
             <div style={style} className="full">
-                <ReferenceHeader 
+                <ReferenceHeader
                     name="Segments"
                 />
                 <LoadingAndErrorWrapper loading={!loadingError && loading} error={loadingError}>
@@ -88,8 +88,7 @@ export default class SegmentList extends Component {
                     </div>
                     :
                     <div className={S.empty}>
-                        <AdminAwareEmptyState {...emptyStateData}/>
-                        }
+                        <AdminAwareEmptyState {...emptyStateData} />
                     </div>
                 }
                 </LoadingAndErrorWrapper>
diff --git a/frontend/src/metabase/reference/segments/SegmentSidebar.jsx b/frontend/src/metabase/reference/segments/SegmentSidebar.jsx
index fc955e1a6a0045fd57ffcd8af3a97a5037556d9b..8e05aecebcd288e41bebbf267ee6ab964e2adac2 100644
--- a/frontend/src/metabase/reference/segments/SegmentSidebar.jsx
+++ b/frontend/src/metabase/reference/segments/SegmentSidebar.jsx
@@ -38,6 +38,10 @@ const SegmentSidebar = ({
                              href={`/reference/segments/${segment.id}/questions`} 
                              icon="all" 
                              name={`Questions about this segment`} />
+                <SidebarItem key={`/xray/segment/${segment.id}/approximate`}
+                             href={`/xray/segment/${segment.id}/approximate`}
+                             icon="all"
+                             name={`X-Ray this segment`} />
              { user && user.is_superuser &&
 
                 <SidebarItem key={`/reference/segments/${segment.id}/revisions`}
diff --git a/frontend/src/metabase/reference/selectors.js b/frontend/src/metabase/reference/selectors.js
index 7539d3c7ae23e80b5921010c47d26491e331b600..3a508879eae0b209fc865efcce130045de5f5ded 100644
--- a/frontend/src/metabase/reference/selectors.js
+++ b/frontend/src/metabase/reference/selectors.js
@@ -169,3 +169,28 @@ export const getGuide = (state, props) => state.reference.guide;
 export const getDashboards = (state, props) => getDashboardListing(state) && resourceListToMap(getDashboardListing(state));
 
 export const getIsDashboardModalOpen = (state, props) => state.reference.isDashboardModalOpen;
+
+
+export const getFieldFingerprint = (state) =>
+    state.reference.fieldFingerprint && state.reference.fieldFingerprint.fingerprint
+
+export const getTableFingerprint = (state) =>
+    state.reference.tableFingerprint && state.reference.tableFingerprint.fingerprint
+
+export const getSegmentFingerprint = (state) =>
+    state.reference.segmentFingerprint && state.reference.segmentFingerprint.fingerprint
+
+export const getTableConstituents = (state) =>
+    state.reference.tableFingerprint && (
+        Object.keys(state.reference.tableFingerprint.constituents).map(key =>
+            state.reference.tableFingerprint.constituents[key]
+        )
+    )
+
+export const getSegmentConstituents = (state) =>
+    state.reference.segmentFingerprint && (
+        Object.keys(state.reference.segmentFingerprint.constituents).map(key =>
+            state.reference.segmentFingerprint.constituents[key]
+        )
+    )
+
diff --git a/frontend/src/metabase/routes.jsx b/frontend/src/metabase/routes.jsx
index 6314943f49b64346b3a88222333d9e3aa468a40a..4e9807668f08a1a89348a0b4c18925de2d8f8d36 100644
--- a/frontend/src/metabase/routes.jsx
+++ b/frontend/src/metabase/routes.jsx
@@ -50,13 +50,14 @@ import SegmentApp from "metabase/admin/datamodel/containers/SegmentApp.jsx";
 import RevisionHistoryApp from "metabase/admin/datamodel/containers/RevisionHistoryApp.jsx";
 import AdminPeopleApp from "metabase/admin/people/containers/AdminPeopleApp.jsx";
 import SettingsEditorApp from "metabase/admin/settings/containers/SettingsEditorApp.jsx";
+import FieldApp from "metabase/admin/datamodel/containers/FieldApp.jsx"
 
 import NotFound from "metabase/components/NotFound.jsx";
 import Unauthorized from "metabase/components/Unauthorized.jsx";
 
 // Reference Guide
 import GettingStartedGuideContainer from "metabase/reference/guide/GettingStartedGuideContainer.jsx";
-// Reference Metrics 
+// Reference Metrics
 import MetricListContainer from "metabase/reference/metrics/MetricListContainer.jsx";
 import MetricDetailContainer from "metabase/reference/metrics/MetricDetailContainer.jsx";
 import MetricQuestionsContainer from "metabase/reference/metrics/MetricQuestionsContainer.jsx";
@@ -78,6 +79,18 @@ import FieldListContainer from "metabase/reference/databases/FieldListContainer.
 import FieldDetailContainer from "metabase/reference/databases/FieldDetailContainer.jsx";
 
 
+/* XRay */
+import FieldXRay from "metabase/xray/containers/FieldXray.jsx";
+import TableXRay from "metabase/xray/containers/TableXRay.jsx";
+import SegmentXRay from "metabase/xray/containers/SegmentXRay.jsx";
+import CardXRay from "metabase/xray/containers/CardXRay.jsx";
+
+/* Comparisons */
+import FieldComparison from "metabase/xray/containers/FieldComparison.jsx";
+import TableComparison from "metabase/xray/containers/TableComparison.jsx";
+import SegmentComparison from "metabase/xray/containers/SegmentComparison.jsx";
+import CardComparison from "metabase/xray/containers/CardComparison.jsx";
+
 import getAdminPermissionsRoutes from "metabase/admin/permissions/routes.jsx";
 
 import PeopleListingApp from "metabase/admin/people/containers/PeopleListingApp.jsx";
@@ -228,6 +241,17 @@ export const getRoutes = (store) =>
                     <Route path="databases/:databaseId/tables/:tableId/fields/:fieldId" component={FieldDetailContainer} />
                     <Route path="databases/:databaseId/tables/:tableId/questions" component={TableQuestionsContainer} />
                 </Route>
+                {/* REFERENCE */}
+                <Route path="/xray" title="XRay">
+                    <Route path="segment/:segmentId/:cost" component={SegmentXRay} />
+                    <Route path="table/:tableId/:cost" component={TableXRay} />
+                    <Route path="field/:fieldId/:cost" component={FieldXRay} />
+                    <Route path="card/:cardId" component={CardXRay} />
+                    <Route path="compare/fields/:fieldId1/:fieldId2" component={FieldComparison} />
+                    <Route path="compare/tables/:tableId1/:tableId2" component={TableComparison} />
+                    <Route path="compare/segments/:segmentId1/:segmentId2" component={SegmentComparison} />
+                    <Route path="compare/cards/:cardId1/:cardId2" component={CardComparison} />
+                </Route>
 
                 {/* PULSE */}
                 <Route path="/pulse" title="Pulses">
@@ -256,6 +280,7 @@ export const getRoutes = (store) =>
                     <Route path="database/:databaseId" component={MetadataEditorApp} />
                     <Route path="database/:databaseId/:mode" component={MetadataEditorApp} />
                     <Route path="database/:databaseId/:mode/:tableId" component={MetadataEditorApp} />
+                    <Route path="database/:databaseId/:mode/:tableId/:fieldId" component={FieldApp} />
                     <Route path="metric/create" component={MetricApp} />
                     <Route path="metric/:id" component={MetricApp} />
                     <Route path="segment/create" component={SegmentApp} />
diff --git a/frontend/src/metabase/selectors/metadata.js b/frontend/src/metabase/selectors/metadata.js
index aec80ef25203ac1f0595eb29bd995c6dce6bb369..6cb138df2fd51060ca77156a2e87904acc0395d9 100644
--- a/frontend/src/metabase/selectors/metadata.js
+++ b/frontend/src/metabase/selectors/metadata.js
@@ -88,6 +88,8 @@ export const getMetadata = createSelector(
             getAggregatorsWithFields(t));
         hydrate(meta.tables, "breakout_options", t => getBreakouts(t.fields));
 
+        hydrate(meta.fields, "remapping", f => new Map(getFieldValues(f)));
+
         hydrateLookup(meta.databases, "tables", "id");
         hydrateLookup(meta.tables, "fields", "id");
         hydrateLookup(meta.fields, "operators", "name");
@@ -122,7 +124,17 @@ export const getSegments = createSelector(
 // MISC
 
 export const getParameterFieldValues = (state, props) => {
-    return getFieldValues(getIn(state, ["metadata", "fields", props.parameter.field_id, "values"]));
+    const fieldValues = getFieldValues(getIn(state, ["metadata", "fields", props.parameter.field_id]));
+
+    // HACK Atte Keinänen 7/27/17: Currently the field value analysis code only returns a single value for booleans,
+    // this will be addressed in analysis sync refactor
+    const isBooleanFieldValues =
+        fieldValues && fieldValues.length === 1 && fieldValues[0] && typeof(fieldValues[0][0]) === "boolean"
+    if (isBooleanFieldValues) {
+        return [[true], [false]];
+    } else {
+        return fieldValues;
+    }
 }
 
 // UTILS:
diff --git a/frontend/src/metabase/services.js b/frontend/src/metabase/services.js
index 1c47d5c9c1325ca079a08e90c00f7d7090ae50b4..322d065f20d0d94689f946ae01a1323f59ad8fb0 100644
--- a/frontend/src/metabase/services.js
+++ b/frontend/src/metabase/services.js
@@ -95,6 +95,7 @@ export const LdapApi = {
 export const MetabaseApi = {
     db_list:                     GET("/api/database"),
     db_list_with_tables:         GET("/api/database?include_tables=true&include_cards=true"),
+    db_real_list_with_tables:    GET("/api/database?include_tables=true&include_cards=false"),
     db_create:                  POST("/api/database"),
     db_add_sample_dataset:      POST("/api/database/sample_dataset"),
     db_get:                      GET("/api/database/:dbId"),
@@ -120,16 +121,46 @@ export const MetabaseApi = {
                                         table.metrics.push(...GA.metrics);
                                         table.segments.push(...GA.segments);
                                     }
+
+                                    if (table && table.fields) {
+                                        // replace dimension_options IDs with objects
+                                        for (const field of table.fields) {
+                                            if (field.dimension_options) {
+                                                field.dimension_options = field.dimension_options.map(id => table.dimension_options[id])
+                                            }
+                                            if (field.default_dimension_option) {
+                                                field.default_dimension_option = table.dimension_options[field.default_dimension_option];
+                                            }
+                                        }
+                                    }
+
                                     return table;
                                  }),
     // table_sync_metadata:        POST("/api/table/:tableId/sync"),
     // field_get:                   GET("/api/field/:fieldId"),
     // field_summary:               GET("/api/field/:fieldId/summary"),
     field_values:                GET("/api/field/:fieldId/values"),
-    // field_value_map_update:     POST("/api/field/:fieldId/value_map_update"),
+    field_values_update:        POST("/api/field/:fieldId/values"),
     field_update:                PUT("/api/field/:id"),
+    field_dimension_update:     POST("/api/field/:fieldId/dimension"),
+    field_dimension_delete:   DELETE("/api/field/:fieldId/dimension"),
     dataset:                    POST("/api/dataset"),
-    dataset_duration:           POST("/api/dataset/duration"),
+    dataset_duration:           POST("/api/dataset/duration")
+};
+
+export const XRayApi = {
+    // X-Rays
+    field_fingerprint:           GET("api/fingerprint/field/:fieldId"),
+    table_fingerprint:           GET("api/fingerprint/table/:tableId"),
+    segment_fingerprint:         GET("api/fingerprint/segment/:segmentId"),
+    card_fingerprint:            GET("api/fingerprint/card/:cardId"),
+
+    // Comparisons
+    // TODO - the api is currently set where compare is nested under fingerprint
+    field_compare:               GET("api/fingerprint/compare/fields/:fieldId1/:fieldId2"),
+    table_compare:               GET("api/fingerprint/compare/table/:tableId/:otherTableId"),
+    segment_compare:             GET("api/fingerprint/compare/segment/:segmentId/:otherSegmentId"),
+    card_compare:                GET("api/fingerprint/compare/card/:cardId/:otherCardId")
 };
 
 export const PulseApi = {
diff --git a/frontend/src/metabase/store.js b/frontend/src/metabase/store.js
index 5b683f4396d38ff9955dc31e04f3100b787f89f4..4b524a52cb747082f6b78a319f8c93f9bc1ef1d4 100644
--- a/frontend/src/metabase/store.js
+++ b/frontend/src/metabase/store.js
@@ -24,11 +24,6 @@ const thunkWithDispatchAction = ({ dispatch, getState }) => next => action => {
     return next(action);
 };
 
-let middleware = [thunkWithDispatchAction, promise];
-if (DEBUG) {
-    middleware.push(logger);
-}
-
 const devToolsExtension = window.devToolsExtension ? window.devToolsExtension() : (f => f);
 
 export function getStore(reducers, history, intialState, enhancer = (a) => a) {
@@ -39,7 +34,12 @@ export function getStore(reducers, history, intialState, enhancer = (a) => a) {
         routing,
     });
 
-    middleware.push(routerMiddleware(history));
+    const middleware = [
+        thunkWithDispatchAction,
+        promise,
+        ...(DEBUG ? [logger] : []),
+        routerMiddleware(history)
+    ];
 
     return createStore(reducer, intialState, compose(
         applyMiddleware(...middleware),
diff --git a/frontend/src/metabase/visualizations/components/ChartSettings.jsx b/frontend/src/metabase/visualizations/components/ChartSettings.jsx
index 15e4f01eb7756e53560dce726c4a7eccbee05e5d..eb681d7e5aa837f5661e5db145efa03b7a8b9540 100644
--- a/frontend/src/metabase/visualizations/components/ChartSettings.jsx
+++ b/frontend/src/metabase/visualizations/components/ChartSettings.jsx
@@ -8,7 +8,7 @@ import Warnings from "metabase/query_builder/components/Warnings.jsx";
 import Visualization from "metabase/visualizations/components/Visualization.jsx"
 import { getSettingsWidgets } from "metabase/visualizations/lib/settings";
 import MetabaseAnalytics from "metabase/lib/analytics";
-import { getVisualizationTransformed } from "metabase/visualizations";
+import { getVisualizationTransformed, extractRemappings } from "metabase/visualizations";
 
 const ChartSettingsTab = ({name, active, onClick}) =>
   <a
@@ -57,7 +57,7 @@ class ChartSettings extends Component {
         if (settings) {
             series = assocIn(series, [0, "card", "visualization_settings"], settings);
         }
-        const transformed = getVisualizationTransformed(series);
+        const transformed = getVisualizationTransformed(extractRemappings(series));
         return transformed.series;
     }
 
@@ -118,7 +118,6 @@ class ChartSettings extends Component {
         const tabNames = Object.keys(tabs);
         const currentTab = this.state.currentTab || tabNames[0];
         const widgets = tabs[currentTab];
-        const isDirty = !_.isEqual(this.props.series[0].card.visualization_settings, this.state.settings);
 
         return (
             <div className="flex flex-column spread p4">
@@ -152,7 +151,7 @@ class ChartSettings extends Component {
                     </div>
                 </div>
                 <div className="pt1">
-                  <a className={cx("Button Button--primary", { disabled: !isDirty })} onClick={() => this.onDone()} data-metabase-event="Chart Settings;Done">Done</a>
+                  <a className="Button Button--primary" onClick={() => this.onDone()} data-metabase-event="Chart Settings;Done">Done</a>
                   <a className="text-grey-2 ml2" onClick={onClose} data-metabase-event="Chart Settings;Cancel">Cancel</a>
                   { !_.isEqual(this.state.settings, {}) &&
                       <a className="Button Button--warning float-right" onClick={this.onResetSettings} data-metabase-event="Chart Settings;Reset">Reset to defaults</a>
diff --git a/frontend/src/metabase/visualizations/components/ChartTooltip.jsx b/frontend/src/metabase/visualizations/components/ChartTooltip.jsx
index b4c6bd72d1bbe8da0d839975ed1619d241a78e58..266271be3e806080acc2877b81285ba1ba39565e 100644
--- a/frontend/src/metabase/visualizations/components/ChartTooltip.jsx
+++ b/frontend/src/metabase/visualizations/components/ChartTooltip.jsx
@@ -7,57 +7,77 @@ import Value from "metabase/components/Value.jsx";
 import { getFriendlyName } from "metabase/visualizations/lib/utils";
 
 export default class ChartTooltip extends Component {
-    constructor(props, context) {
-        super(props, context);
-        this.state = {};
-    }
-
     static propTypes = {
         series: PropTypes.array.isRequired,
         hovered: PropTypes.object
     };
-    static defaultProps = {
-    };
 
-    componentWillReceiveProps({ hovered }) {
-        if (hovered && hovered.data && !Array.isArray(hovered.data)) {
-            console.warn("hovered.data should be an array of { key, value, col }", hovered.data);
+    _getRows() {
+        const { series, hovered } = this.props;
+        if (!hovered) {
+            return [];
+        }
+        // Array of key, value, col: { data: [{ key, value, col }], element, event }
+        if (Array.isArray(hovered.data)) {
+            return hovered.data;
         }
+        // ClickObject: { value, column, dimensions: [{ value, column }], element, event }
+        else if (hovered.value !== undefined || hovered.dimensions) {
+            const dimensions = [];
+            if (hovered.value !== undefined) {
+                dimensions.push({ value: hovered.value, column: hovered.column });
+            }
+            if (hovered.dimensions) {
+                dimensions.push(...hovered.dimensions);
+            }
+            return dimensions.map(({ value, column }) => ({
+                key: getFriendlyName(column),
+                value: value,
+                col: column
+            }))
+        }
+        // DEPRECATED: { key, value }
+        else if (hovered.data) {
+            console.warn("hovered should be a ClickObject or hovered.data should be an array of { key, value, col }", hovered.data);
+            let s = series[hovered.index] || series[0];
+            return [
+                {
+                    key: getFriendlyName(s.data.cols[0]),
+                    value: hovered.data.key,
+                    col: s.data.cols[0]
+                },
+                {
+                    key: getFriendlyName(s.data.cols[1]),
+                    value: hovered.data.value,
+                    col: s.data.cols[1]
+                },
+            ]
+        }
+        return [];
     }
 
     render() {
-        const { series, hovered } = this.props;
-        if (!(hovered && hovered.data && ((hovered.element && document.contains(hovered.element)) || hovered.event))) {
-            return <span className="hidden" />;
-        }
-        let s = series[hovered.index] || series[0];
+        const { hovered } = this.props;
+        const rows = this._getRows();
+        const hasEventOrElement = hovered && ((hovered.element && document.contains(hovered.element)) || hovered.event);
+        const isOpen = rows.length > 0 && !!hasEventOrElement;
         return (
             <TooltipPopover
-                target={hovered.element}
-                targetEvent={hovered.event}
+                target={hovered && hovered.element}
+                targetEvent={hovered && hovered.event}
                 verticalAttachments={["bottom", "top"]}
+                isOpen={isOpen}
             >
                 <table className="py1 px2">
                     <tbody>
-                        { Array.isArray(hovered.data)  ?
-                            hovered.data.map(({ key, value, col }, index) =>
-                                <TooltipRow
-                                    key={index}
-                                    name={key}
-                                    value={value}
-                                    column={col}
-                                />
-                            )
-                        :
-                            [["key", 0], ["value", 1]].map(([propName, colIndex]) =>
-                                <TooltipRow
-                                    key={propName}
-                                    name={getFriendlyName(s.data.cols[colIndex])}
-                                    value={hovered.data[propName]}
-                                    column={s.data.cols[colIndex]}
-                                />
-                            )
-                        }
+                        { rows.map(({ key, value, col }, index) =>
+                            <TooltipRow
+                                key={index}
+                                name={key}
+                                value={value}
+                                column={col}
+                            />
+                        ) }
                     </tbody>
                 </table>
             </TooltipPopover>
diff --git a/frontend/src/metabase/visualizations/components/ChoroplethMap.jsx b/frontend/src/metabase/visualizations/components/ChoroplethMap.jsx
index a415d447fca4cefd734e833f8860dea050075168..49c96054693a1888f2ce8a11b9f111f899ab9e81 100644
--- a/frontend/src/metabase/visualizations/components/ChoroplethMap.jsx
+++ b/frontend/src/metabase/visualizations/components/ChoroplethMap.jsx
@@ -12,7 +12,7 @@ import ChartWithLegend from "./ChartWithLegend.jsx";
 import LegacyChoropleth from "./LegacyChoropleth.jsx";
 import LeafletChoropleth from "./LeafletChoropleth.jsx";
 
-import { computeMinimalBounds } from "metabase/visualizations/lib/mapping";
+import { computeMinimalBounds, getCanonicalRowKey } from "metabase/visualizations/lib/mapping";
 
 import d3 from "d3";
 import ss from "simple-statistics";
@@ -146,7 +146,7 @@ export default class ChoroplethMap extends Component {
         const dimensionIndex = _.findIndex(cols, (col) => col.name === settings["map.dimension"]);
         const metricIndex = _.findIndex(cols, (col) => col.name === settings["map.metric"]);
 
-        const getRowKey       = (row) => String(row[dimensionIndex]).toLowerCase();
+        const getRowKey       = (row) => getCanonicalRowKey(row[dimensionIndex], settings["map.region"]);
         const getRowValue     = (row) => row[metricIndex] || 0;
         const getFeatureName  = (feature) => String(feature.properties[nameProperty]);
         const getFeatureKey   = (feature) => String(feature.properties[keyProperty]).toLowerCase();
diff --git a/frontend/src/metabase/visualizations/components/LeafletGridHeatMap.jsx b/frontend/src/metabase/visualizations/components/LeafletGridHeatMap.jsx
new file mode 100644
index 0000000000000000000000000000000000000000..cfd816ebd7419284af9fec4c359482c4a6c9cbe1
--- /dev/null
+++ b/frontend/src/metabase/visualizations/components/LeafletGridHeatMap.jsx
@@ -0,0 +1,114 @@
+import LeafletMap from "./LeafletMap.jsx";
+import L from "leaflet";
+
+import d3 from "d3";
+
+import { rangeForValue } from "metabase/lib/dataset";
+
+export default class LeafletGridHeatMap extends LeafletMap {
+    componentDidMount() {
+        super.componentDidMount();
+
+        this.gridLayer = L.layerGroup([]).addTo(this.map);
+        this.componentDidUpdate({}, {});
+    }
+
+    componentDidUpdate(prevProps, prevState) {
+        super.componentDidUpdate(prevProps, prevState);
+
+        try {
+            const { gridLayer } = this;
+            const { points, min, max } = this.props;
+
+            const { latitudeColumn, longitudeColumn } = this._getLatLonColumns();
+            if (!latitudeColumn.binning_info || !longitudeColumn.binning_info) {
+                throw new Error("Grid map requires binned longitude/latitude.");
+            }
+
+            const color = d3.scale.linear().domain([min,max])
+                .interpolate(d3.interpolateHcl)
+                .range([d3.rgb("#00FF00"), d3.rgb('#FF0000')]);
+
+            let gridSquares = gridLayer.getLayers();
+            let totalSquares = Math.max(points.length, gridSquares.length);
+            for (let i = 0; i < totalSquares; i++) {
+                if (i >= points.length) {
+                    gridLayer.removeLayer(gridSquares[i]);
+                }
+                if (i >= gridSquares.length) {
+                    const gridSquare = this._createGridSquare(i);
+                    gridLayer.addLayer(gridSquare);
+                    gridSquares.push(gridSquare);
+                }
+
+                if (i < points.length) {
+                    gridSquares[i].setStyle({ color: color(points[i][2]) });
+                    const [latMin, latMax] = rangeForValue(points[i][0], latitudeColumn);
+                    const [lonMin, lonMax] = rangeForValue(points[i][1], longitudeColumn);
+                    gridSquares[i].setBounds([
+                        [latMin, lonMin],
+                        [latMax, lonMax]
+                    ]);
+                }
+            }
+        } catch (err) {
+            console.error(err);
+            this.props.onRenderError(err.message || err);
+        }
+    }
+
+    _createGridSquare = (index) => {
+        const bounds = [[54.559322, -5.767822], [56.1210604, -3.021240]];
+        const gridSquare = L.rectangle(bounds, {
+            color: "red",
+            weight: 1,
+            stroke: true,
+            fillOpacity: 0.5,
+            strokeOpacity: 1.0
+        });
+        gridSquare.on("click", this._onVisualizationClick.bind(this, index));
+        gridSquare.on("mousemove", this._onHoverChange.bind(this, index));
+        gridSquare.on("mouseout", this._onHoverChange.bind(this, null));
+        return gridSquare;
+    }
+
+    _clickForPoint(index, e) {
+        const { points } = this.props;
+        const point = points[index];
+        const metricColumn = this._getMetricColumn();
+        const { latitudeColumn, longitudeColumn } = this._getLatLonColumns();
+        return {
+            value: point[2],
+            column: metricColumn,
+            dimensions: [
+                {
+                    value: point[0],
+                    column: latitudeColumn,
+                },
+                {
+                    value: point[1],
+                    column: longitudeColumn,
+                }
+            ],
+            event: e.originalEvent
+        }
+    }
+
+    _onVisualizationClick(index, e) {
+        const { onVisualizationClick } = this.props;
+        if (onVisualizationClick) {
+            onVisualizationClick(this._clickForPoint(index, e));
+        }
+    }
+
+    _onHoverChange(index, e) {
+        const { onHoverChange } = this.props;
+        if (onHoverChange) {
+            if (index == null) {
+                onHoverChange(null);
+            } else {
+                onHoverChange(this._clickForPoint(index, e));
+            }
+        }
+    }
+}
diff --git a/frontend/src/metabase/visualizations/components/LeafletHeatMap.jsx b/frontend/src/metabase/visualizations/components/LeafletHeatMap.jsx
new file mode 100644
index 0000000000000000000000000000000000000000..68c2dfc41b91325788243b539f93bdbf4b9160dc
--- /dev/null
+++ b/frontend/src/metabase/visualizations/components/LeafletHeatMap.jsx
@@ -0,0 +1,39 @@
+import LeafletMap from "./LeafletMap.jsx";
+
+import L from "leaflet";
+import "leaflet.heat";
+
+export default class LeafletHeatMap extends LeafletMap {
+    componentDidMount() {
+        super.componentDidMount();
+
+        // Leaflet map may not be fully initialized
+        // https://stackoverflow.com/a/28903337/113
+        setTimeout(() => {
+            this.pinMarkerLayer = L.layerGroup([]).addTo(this.map);
+            this.heatLayer = L.heatLayer([], { radius: 25 }).addTo(this.map);
+            this.componentDidUpdate({}, {});
+        });
+    }
+
+    componentDidUpdate(prevProps, prevState) {
+        super.componentDidUpdate(prevProps, prevState);
+
+        try {
+            const { heatLayer } = this;
+            const { points, max, settings } = this.props;
+
+            heatLayer.setOptions({
+                max: max,
+                maxZoom: settings["map.heat.max-zoom"],
+                minOpacity: settings["map.heat.min-opacity"],
+                radius:  settings["map.heat.radius"],
+                blur: settings["map.heat.blur"],
+            });
+            heatLayer.setLatLngs(points);
+        } catch (err) {
+            console.error(err);
+            this.props.onRenderError(err.message || err);
+        }
+    }
+}
diff --git a/frontend/src/metabase/visualizations/components/LeafletMap.jsx b/frontend/src/metabase/visualizations/components/LeafletMap.jsx
index 065cc15d6c96eb8c9c895cb8f5880ba725f2ff0f..aea44ae2ed6fa49bffa4d3c850d69582c1ae7a6d 100644
--- a/frontend/src/metabase/visualizations/components/LeafletMap.jsx
+++ b/frontend/src/metabase/visualizations/components/LeafletMap.jsx
@@ -9,9 +9,7 @@ import "leaflet-draw";
 
 import _ from "underscore";
 
-import { updateIn } from "icepick";
-import * as Query from "metabase/lib/query/query";
-import { mbqlEq } from "metabase/lib/query/util";
+import { updateLatLonFilter } from "metabase/qb/lib/actions";
 
 export default class LeafletMap extends Component {
     componentDidMount() {
@@ -21,7 +19,8 @@ export default class LeafletMap extends Component {
             const map = this.map = L.map(element, {
                 scrollWheelZoom: false,
                 minZoom: 2,
-                drawControlTooltips: false
+                drawControlTooltips: false,
+                zoomSnap: false
             });
 
             const drawnItems = new L.FeatureGroup();
@@ -66,15 +65,23 @@ export default class LeafletMap extends Component {
 
     componentDidUpdate(prevProps) {
         const { bounds, settings } = this.props;
-        if (!prevProps || prevProps.points !== this.props.points) {
+        if (!prevProps || prevProps.points !== this.props.points || prevProps.width !== this.props.width || prevProps.height !== this.props.height) {
+            this.map.invalidateSize();
+
             if (settings["map.center_latitude"] != null || settings["map.center_longitude"] != null || settings["map.zoom"] != null) {
                 this.map.setView([
                     settings["map.center_latitude"],
                     settings["map.center_longitude"]
                 ], settings["map.zoom"]);
             } else {
+                // compute ideal lat and lon zoom separately and use the lesser zoom to ensure the bounds are visible
+                const latZoom = this.map.getBoundsZoom(L.latLngBounds([[bounds.getSouth(), 0], [bounds.getNorth(), 0]]))
+                const lonZoom = this.map.getBoundsZoom(L.latLngBounds([[0, bounds.getWest()], [0, bounds.getEast()]]))
+                const zoom = Math.min(latZoom, lonZoom);
+                // NOTE: unclear why calling `fitBounds` twice is sometimes required to get it to work 
+                this.map.fitBounds(bounds);
+                this.map.setZoom(zoom);
                 this.map.fitBounds(bounds);
-                this.map.setZoom(this.map.getBoundsZoom(bounds, true));
             }
         }
     }
@@ -96,22 +103,7 @@ export default class LeafletMap extends Component {
         const latitudeColumn = _.findWhere(cols, { name: settings["map.latitude_column"] });
         const longitudeColumn = _.findWhere(cols, { name: settings["map.longitude_column"] });
 
-        const filter = [
-            "inside",
-            latitudeColumn.id, longitudeColumn.id,
-            bounds.getNorth(), bounds.getWest(), bounds.getSouth(), bounds.getEast()
-        ]
-
-        setCardAndRun(updateIn(card, ["dataset_query", "query"], (query) => {
-            const index = _.findIndex(Query.getFilters(query), (filter) =>
-                mbqlEq(filter[0], "inside") && filter[1] === latitudeColumn.id && filter[2] === longitudeColumn.id
-            );
-            if (index >= 0) {
-                return Query.updateFilter(query, index, filter);
-            } else {
-                return Query.addFilter(query, filter);
-            }
-        }));
+        setCardAndRun(updateLatLonFilter(card, latitudeColumn, longitudeColumn, bounds));
 
         this.props.onFiltering(false);
     }
@@ -123,11 +115,25 @@ export default class LeafletMap extends Component {
         );
     }
 
-    _getLatLongIndexes() {
+    _getLatLonIndexes() {
         const { settings, series: [{ data: { cols }}] } = this.props;
         return {
             latitudeIndex: _.findIndex(cols, (col) => col.name === settings["map.latitude_column"]),
             longitudeIndex: _.findIndex(cols, (col) => col.name === settings["map.longitude_column"])
         };
     }
+
+    _getLatLonColumns() {
+        const { series: [{ data: { cols }}] } = this.props;
+        const { latitudeIndex, longitudeIndex } = this._getLatLonIndexes();
+        return {
+            latitudeColumn: cols[latitudeIndex],
+            longitudeColumn: cols[longitudeIndex]
+        };
+    }
+
+    _getMetricColumn() {
+        const { settings, series: [{ data: { cols }}] } = this.props;
+        return _.findWhere(cols, { name: settings["map.metric_column"] });
+    }
 }
diff --git a/frontend/src/metabase/visualizations/components/LeafletTilePinMap.jsx b/frontend/src/metabase/visualizations/components/LeafletTilePinMap.jsx
index d1862f14adf5bcf57b939465bc3dd5eed42df7d4..caf810183203d163048e2dcf5324cda42ce09876 100644
--- a/frontend/src/metabase/visualizations/components/LeafletTilePinMap.jsx
+++ b/frontend/src/metabase/visualizations/components/LeafletTilePinMap.jsx
@@ -28,7 +28,7 @@ export default class LeafletTilePinMap extends LeafletMap {
     _getTileUrl = (coord, zoom) => {
         const [{ card: { dataset_query }, data: { cols }}] = this.props.series;
 
-        const { latitudeIndex, longitudeIndex } = this._getLatLongIndexes();
+        const { latitudeIndex, longitudeIndex } = this._getLatLonIndexes();
         const latitudeField = cols[latitudeIndex];
         const longitudeField = cols[longitudeIndex];
 
diff --git a/frontend/src/metabase/visualizations/components/LegacyChoropleth.jsx b/frontend/src/metabase/visualizations/components/LegacyChoropleth.jsx
index 21d67e218c818785768789a9b509d54ddb527e0f..2c296d640f67c174bde72cca4c2452fae1dc8479 100644
--- a/frontend/src/metabase/visualizations/components/LegacyChoropleth.jsx
+++ b/frontend/src/metabase/visualizations/components/LegacyChoropleth.jsx
@@ -29,10 +29,10 @@ const LegacyChoropleth = ({ series, geoJson, projection, getColor, onHoverFeatur
                             })}
                             onMouseLeave={() => onHoverFeature(null)}
                             className={cx({ "cursor-pointer": !!onClickFeature })}
-                            onClick={(e) => onClickFeature({
+                            onClick={onClickFeature && ((e) => onClickFeature({
                                 feature: feature,
                                 event: e.nativeEvent
-                            })}
+                            }))}
                         />
                     )}
                     </svg>
diff --git a/frontend/src/metabase/visualizations/components/LineAreaBarChart.jsx b/frontend/src/metabase/visualizations/components/LineAreaBarChart.jsx
index a669b82448b11c1890a1961734a54d2f52dedf8d..bf8b9ae6555cd5b793c3eaed62272b27c42251bf 100644
--- a/frontend/src/metabase/visualizations/components/LineAreaBarChart.jsx
+++ b/frontend/src/metabase/visualizations/components/LineAreaBarChart.jsx
@@ -5,6 +5,7 @@ import PropTypes from "prop-types";
 
 import CardRenderer from "./CardRenderer.jsx";
 import LegendHeader from "./LegendHeader.jsx";
+import { TitleLegendHeader } from "./TitleLegendHeader.jsx";
 
 import "./LineAreaBarChart.css";
 
@@ -95,7 +96,9 @@ export default class LineAreaBarChart extends Component {
         }
 
         // both or neither primary dimension must be numeric
-        if (isNumeric(initialDimensions[0]) !== isNumeric(newDimensions[0])) {
+        // a timestamp field is both date and number so don't enforce the condition if both fields are dates; see #2811
+        if ((isNumeric(initialDimensions[0]) !== isNumeric(newDimensions[0])) &&
+            !(isDate(initialDimensions[0]) && isDate(newDimensions[0]))) {
             return false;
         }
 
@@ -186,48 +189,31 @@ export default class LineAreaBarChart extends Component {
 
         const settings = this.getSettings();
 
-        let titleHeaderSeries, multiseriesHeaderSeries;
-
-        // $FlowFixMe
-        let originalSeries = series._raw || series;
-        let cardIds = _.uniq(originalSeries.map(s => s.card.id))
-        const isComposedOfMultipleQuestions = cardIds.length > 1;
-
-        if (showTitle && settings["card.title"]) {
-            titleHeaderSeries = [{ card: {
-                name: settings["card.title"],
-                ...(isComposedOfMultipleQuestions ? {} : {
-                    id: cardIds[0],
-                    dataset_query: originalSeries[0].card.dataset_query
-                }),
-            }}];
-        }
-
+        let multiseriesHeaderSeries;
         if (series.length > 1) {
             multiseriesHeaderSeries = series;
         }
 
+        const hasTitle = showTitle && settings["card.title"];
+
         return (
             <div className={cx("LineAreaBarChart flex flex-column p1", this.getHoverClasses(), this.props.className)}>
-                { titleHeaderSeries ?
-                    <LegendHeader
-                        className="flex-no-shrink"
-                        series={titleHeaderSeries}
-                        description={settings["card.description"]}
+                { hasTitle &&
+                    <TitleLegendHeader
+                        series={series}
+                        settings={settings}
+                        onChangeCardAndRun={onChangeCardAndRun}
                         actionButtons={actionButtons}
-                        // If a dashboard card is composed of multiple questions, its custom card title
-                        // shouldn't act as a link as it's ambiguous that which question it should open
-                        onChangeCardAndRun={ isComposedOfMultipleQuestions ? null : onChangeCardAndRun }
                     />
-                : null }
-                { multiseriesHeaderSeries || (!titleHeaderSeries && actionButtons) ? // always show action buttons if we have them
+                }
+                { multiseriesHeaderSeries || (!hasTitle && actionButtons) ? // always show action buttons if we have them
                     <LegendHeader
                         className="flex-no-shrink"
                         series={multiseriesHeaderSeries}
                         settings={settings}
                         hovered={hovered}
                         onHoverChange={this.props.onHoverChange}
-                        actionButtons={!titleHeaderSeries ? actionButtons : null}
+                        actionButtons={!hasTitle ? actionButtons : null}
                         onChangeCardAndRun={onChangeCardAndRun}
                         onVisualizationClick={onVisualizationClick}
                         visualizationIsClickable={visualizationIsClickable}
@@ -306,7 +292,7 @@ function transformSingleSeries(s, series, seriesIndex) {
                     // show series title if it's multiseries
                     series.length > 1 && card.name,
                     // always show grouping value
-                    formatValue(breakoutValue, cols[seriesColumnIndex])
+                    formatValue(breakoutValue, { column: cols[seriesColumnIndex] })
                 ].filter(n => n).join(": "),
                 _transformed: true,
                 _breakoutValue: breakoutValue,
diff --git a/frontend/src/metabase/visualizations/components/PinMap.jsx b/frontend/src/metabase/visualizations/components/PinMap.jsx
index a5f1371878b28fef983ad7c735b0cb68d19ff559..be8035193338c16e89204336e64dcc6e1949f707 100644
--- a/frontend/src/metabase/visualizations/components/PinMap.jsx
+++ b/frontend/src/metabase/visualizations/components/PinMap.jsx
@@ -5,11 +5,14 @@ import React, { Component } from "react";
 import { hasLatitudeAndLongitudeColumns } from "metabase/lib/schema_metadata";
 import { LatitudeLongitudeError } from "metabase/visualizations/lib/errors";
 
-import LeafletMarkerPinMap from "./LeafletMarkerPinMap.jsx";
-import LeafletTilePinMap from "./LeafletTilePinMap.jsx";
+import LeafletMarkerPinMap from "./LeafletMarkerPinMap";
+import LeafletTilePinMap from "./LeafletTilePinMap";
+import LeafletHeatMap from "./LeafletHeatMap";
+import LeafletGridHeatMap from "./LeafletGridHeatMap";
 
 import _ from "underscore";
 import cx from "classnames";
+import d3 from "d3";
 
 import L from "leaflet";
 
@@ -20,6 +23,10 @@ type Props = VisualizationProps;
 type State = {
     lat: ?number,
     lng: ?number,
+    min: ?number,
+    max: ?number,
+    binHeight: ?number,
+    binWidth: ?number,
     zoom: ?number,
     points: L.Point[],
     bounds: L.Bounds,
@@ -29,6 +36,8 @@ type State = {
 const MAP_COMPONENTS_BY_TYPE = {
     "markers": LeafletMarkerPinMap,
     "tiles": LeafletTilePinMap,
+    "heat": LeafletHeatMap,
+    "grid": LeafletGridHeatMap,
 }
 
 export default class PinMap extends Component {
@@ -62,7 +71,14 @@ export default class PinMap extends Component {
     }
 
     componentWillReceiveProps(newProps: Props) {
-        if (newProps.series[0].data !== this.props.series[0].data) {
+        const SETTINGS_KEYS = ["map.latitude_column", "map.longitude_column", "map.metric_column"];
+        if (newProps.series[0].data !== this.props.series[0].data ||
+            !_.isEqual(
+                // $FlowFixMe
+                _.pick(newProps.settings, ...SETTINGS_KEYS),
+                // $FlowFixMe
+                _.pick(this.props.settings, ...SETTINGS_KEYS))
+        ) {
             this.setState(this._getPoints(newProps))
         }
     }
@@ -94,12 +110,30 @@ export default class PinMap extends Component {
         const { settings, series: [{ data: { cols, rows }}] } = props;
         const latitudeIndex = _.findIndex(cols, (col) => col.name === settings["map.latitude_column"]);
         const longitudeIndex = _.findIndex(cols, (col) => col.name === settings["map.longitude_column"]);
+        const metricIndex = _.findIndex(cols, (col) => col.name === settings["map.metric_column"]);
+
         const points = rows.map(row => [
             row[latitudeIndex],
-            row[longitudeIndex]
+            row[longitudeIndex],
+            metricIndex >= 0 ? row[metricIndex] : 1
         ]);
+
         const bounds = L.latLngBounds(points);
-        return { points, bounds };
+
+        const min = d3.min(points, point => point[2]);
+        const max = d3.max(points, point => point[2]);
+
+        const binWidth = cols[longitudeIndex] && cols[longitudeIndex].binning_info && cols[longitudeIndex].binning_info.bin_width;
+        const binHeight = cols[latitudeIndex] && cols[latitudeIndex].binning_info && cols[latitudeIndex].binning_info.bin_width;
+
+        if (binWidth != null) {
+            bounds._northEast.lng += binWidth;
+        }
+        if (binHeight != null) {
+            bounds._northEast.lat += binHeight;
+        }
+
+        return { points, bounds, min, max, binWidth, binHeight };
     }
 
     render() {
@@ -109,7 +143,7 @@ export default class PinMap extends Component {
 
         const Map = MAP_COMPONENTS_BY_TYPE[settings["map.pin_type"]];
 
-        const { points, bounds } = this.state;//this._getPoints(this.props);
+        const { points, bounds, min, max, binHeight, binWidth } = this.state;
 
         return (
             <div className={cx(className, "PinMap relative hover-parent hover--visibility")} onMouseDownCapture={(e) =>e.stopPropagation() /* prevent dragging */}>
@@ -125,6 +159,10 @@ export default class PinMap extends Component {
                         zoom={zoom}
                         points={points}
                         bounds={bounds}
+                        min={min}
+                        max={max}
+                        binWidth={binWidth}
+                        binHeight={binHeight}
                         onFiltering={(filtering) => this.setState({ filtering })}
                     />
                 : null }
diff --git a/frontend/src/metabase/visualizations/components/TableInteractive.jsx b/frontend/src/metabase/visualizations/components/TableInteractive.jsx
index 6db0134cef0b2341f46624c5a679cd4029c1c1c4..6271fde23fb58701c7819cbd4a77e14cb27d95a1 100644
--- a/frontend/src/metabase/visualizations/components/TableInteractive.jsx
+++ b/frontend/src/metabase/visualizations/components/TableInteractive.jsx
@@ -8,8 +8,8 @@ import "./TableInteractive.css";
 
 import Icon from "metabase/components/Icon.jsx";
 
-import { formatValue, capitalize } from "metabase/lib/formatting";
-import { getFriendlyName } from "metabase/visualizations/lib/utils";
+import { formatValue, formatColumn } from "metabase/lib/formatting";
+import { isID } from "metabase/lib/schema_metadata";
 import { getTableCellClickedObject, isColumnRightAligned } from "metabase/visualizations/lib/table";
 
 import _ from "underscore";
@@ -228,7 +228,8 @@ export default class TableInteractive extends Component {
                 className={cx("TableInteractive-cellWrapper", {
                     "TableInteractive-cellWrapper--firstColumn": columnIndex === 0,
                     "cursor-pointer": isClickable,
-                    "justify-end": isColumnRightAligned(column)
+                    "justify-end": isColumnRightAligned(column),
+                    "link": isClickable && isID(column)
                 })}
                 onClick={isClickable && ((e) => {
                     onVisualizationClick({ ...clicked, element: e.currentTarget });
@@ -252,10 +253,7 @@ export default class TableInteractive extends Component {
         const { cols } = this.props.data;
         const column = cols[columnIndex];
 
-        let columnTitle = getFriendlyName(column);
-        if (column.unit && column.unit !== "default") {
-            columnTitle += ": " + capitalize(column.unit.replace(/-/g, " "))
-        }
+        let columnTitle = formatColumn(column);
         if (!columnTitle && this.props.isPivoted && columnIndex !== 0) {
             columnTitle = "Unset";
         }
diff --git a/frontend/src/metabase/visualizations/components/TitleLegendHeader.jsx b/frontend/src/metabase/visualizations/components/TitleLegendHeader.jsx
new file mode 100644
index 0000000000000000000000000000000000000000..b6d4fe7f03e0ac6057ce81bb556529286975912a
--- /dev/null
+++ b/frontend/src/metabase/visualizations/components/TitleLegendHeader.jsx
@@ -0,0 +1,36 @@
+import React from "react";
+import LegendHeader from "./LegendHeader.jsx";
+import _ from "underscore";
+
+export const TitleLegendHeader = ({ series, settings, onChangeCardAndRun, actionButtons }) => {
+    // $FlowFixMe
+    let originalSeries = series._raw || series;
+    let cardIds = _.uniq(originalSeries.map(s => s.card.id))
+    const isComposedOfMultipleQuestions = cardIds.length > 1;
+
+    if (settings["card.title"]) {
+        const titleHeaderSeries = [{ card: {
+            name: settings["card.title"],
+            ...(isComposedOfMultipleQuestions ? {} : {
+                id: cardIds[0],
+                dataset_query: originalSeries[0].card.dataset_query
+            }),
+        }}];
+
+        return (
+            <LegendHeader
+            className="flex-no-shrink"
+            series={titleHeaderSeries}
+            description={settings["card.description"]}
+            actionButtons={actionButtons}
+            // If a dashboard card is composed of multiple questions, its custom card title
+            // shouldn't act as a link as it's ambiguous that which question it should open
+            onChangeCardAndRun={ isComposedOfMultipleQuestions ? null : onChangeCardAndRun }
+        />
+        )
+    } else {
+        // If the title isn't provided in settings, render nothing
+        return null
+    }
+}
+
diff --git a/frontend/src/metabase/visualizations/components/Visualization.jsx b/frontend/src/metabase/visualizations/components/Visualization.jsx
index de0a58579b320f456cf250d16a6c457b187a9d8b..6d39a5c9d50535757f81e06dc1d743b40a9a139a 100644
--- a/frontend/src/metabase/visualizations/components/Visualization.jsx
+++ b/frontend/src/metabase/visualizations/components/Visualization.jsx
@@ -13,7 +13,7 @@ import Tooltip from "metabase/components/Tooltip.jsx";
 import { duration, formatNumber } from "metabase/lib/formatting";
 import MetabaseAnalytics from "metabase/lib/analytics";
 
-import { getVisualizationTransformed } from "metabase/visualizations";
+import { getVisualizationTransformed, extractRemappings } from "metabase/visualizations";
 import { getSettings } from "metabase/visualizations/lib/settings";
 import { isSameSeries } from "metabase/visualizations/lib/utils";
 
@@ -67,10 +67,6 @@ type Props = {
     // used for showing content in place of visualization, e.x. dashcard filter mapping
     replacementContent: Element<any>,
 
-    // used by TableInteractive
-    cellIsClickableFn: (number, number) => boolean,
-    cellClickedFn: (number, number) => void,
-
     // misc
     onUpdateWarnings: (string[]) => void,
     onOpenChartSettings: () => void,
@@ -101,6 +97,8 @@ export default class Visualization extends Component {
     state: State;
     props: Props;
 
+    _resetHoverTimer: ?number;
+
     constructor(props: Props) {
         super(props);
 
@@ -167,20 +165,33 @@ export default class Visualization extends Component {
             error: null,
             warnings: [],
             yAxisSplit: null,
-            ...getVisualizationTransformed(newProps.series)
+            ...getVisualizationTransformed(extractRemappings(newProps.series))
         });
     }
 
     handleHoverChange = (hovered) => {
-        const { yAxisSplit } = this.state;
         if (hovered) {
+            const { yAxisSplit } = this.state;
             // if we have Y axis split info then find the Y axis index (0 = left, 1 = right)
             if (yAxisSplit) {
                 const axisIndex = _.findIndex(yAxisSplit, (indexes) => _.contains(indexes, hovered.index));
                 hovered = assoc(hovered, "axisIndex", axisIndex);
             }
+            this.setState({ hovered });
+            // If we previously set a timeout for clearing the hover clear it now since we received
+            // a new hover.
+            if (this._resetHoverTimer !== null) {
+                clearTimeout(this._resetHoverTimer);
+                this._resetHoverTimer = null;
+            }
+        } else {
+            // When reseting the hover wait in case we're simply transitioning from one
+            // element to another. This allows visualizations to use mouseleave events etc.
+            this._resetHoverTimer = setTimeout(() => {
+                this.setState({ hovered: null });
+                this._resetHoverTimer = null;
+            }, 0);
         }
-        this.setState({ hovered });
     }
 
     getClickActions(clicked: ?ClickObject) {
diff --git a/frontend/src/metabase/visualizations/index.js b/frontend/src/metabase/visualizations/index.js
index f9d9595e62af91aaee477d559617309d1e8abcfa..3cc598688c6bcbd1ba34ca24e0488c49c7af69eb 100644
--- a/frontend/src/metabase/visualizations/index.js
+++ b/frontend/src/metabase/visualizations/index.js
@@ -11,6 +11,7 @@ import AreaChart   from "./visualizations/AreaChart.jsx";
 import MapViz      from "./visualizations/Map.jsx";
 import ScatterPlot from "./visualizations/ScatterPlot.jsx";
 import Funnel      from "./visualizations/Funnel.jsx";
+import ObjectDetail from "./visualizations/ObjectDetail.jsx";
 
 import _ from "underscore";
 
@@ -72,6 +73,44 @@ export function getVisualizationTransformed(series: Series) {
     return { series, CardVisualization };
 }
 
+export const extractRemappings = (series) => {
+    const se =  series.map(s => ({
+        ...s,
+        data: s.data && extractRemappedColumns(s.data)
+    }));
+    return se;
+}
+
+// removes columns with `remapped_from` property and adds a `remapping` to the appropriate column
+const extractRemappedColumns = (data) => {
+    const cols = data.cols.map(col => ({
+        ...col,
+        remapped_from_index: col.remapped_from && _.findIndex(data.cols, c => c.name === col.remapped_from),
+        remapping: col.remapped_to && new Map()
+    }));
+
+    const rows = data.rows.map((row, rowIndex) =>
+        row.filter((value, colIndex) => {
+            const col = cols[colIndex];
+            if (col.remapped_from != null) {
+                cols[col.remapped_from_index].remapped_to_column = col;
+                cols[col.remapped_from_index].remapping.set(
+                    row[col.remapped_from_index],
+                    row[colIndex]
+                );
+                return false;
+            } else {
+                return true;
+            }
+        })
+    )
+    return {
+        ...data,
+        rows,
+        cols: cols.filter(col => col.remapped_from == null)
+    }
+}
+
 registerVisualization(Scalar);
 registerVisualization(Progress);
 registerVisualization(Table);
@@ -83,5 +122,6 @@ registerVisualization(ScatterPlot);
 registerVisualization(PieChart);
 registerVisualization(MapViz);
 registerVisualization(Funnel);
+registerVisualization(ObjectDetail);
 
 export default visualizations;
diff --git a/frontend/src/metabase/visualizations/lib/LineAreaBarRenderer.js b/frontend/src/metabase/visualizations/lib/LineAreaBarRenderer.js
index 1d212bdd67d9c5a8bfada61406ddd47687215fb5..0fcde21bfcbfe861fa092dfd3f9bc8ab3b9542d2 100644
--- a/frontend/src/metabase/visualizations/lib/LineAreaBarRenderer.js
+++ b/frontend/src/metabase/visualizations/lib/LineAreaBarRenderer.js
@@ -31,7 +31,7 @@ import {
 import { determineSeriesIndexFromElement } from "./tooltip";
 
 import { clipPathReference } from "metabase/lib/dom";
-import { formatValue } from "metabase/lib/formatting";
+import { formatValue, formatNumber } from "metabase/lib/formatting";
 import { parseTimestamp } from "metabase/lib/time";
 import { isStructured } from "metabase/meta/Card";
 
@@ -861,6 +861,18 @@ function forceSortedGroupsOfGroups(groupsOfGroups: CrossfilterGroup[][], indexMa
     }
 }
 
+export function hasRemappingAndValuesAreStrings({ cols }, i = 0) {
+    const column = cols[i];
+
+    if (column.remapping && column.remapping.size > 0) {
+        // We have remapped values, so check their type for determining whether the dimension is numeric
+        // ES6 Map makes the lookup of first value a little verbose
+        return typeof column.remapping.values().next().value === "string";
+    } else {
+        return false
+    }
+}
+
 type LineAreaBarProps = VisualizationProps & {
     chartType: "line" | "area" | "bar" | "scatter",
     isScalarSeries: boolean,
@@ -880,6 +892,19 @@ export default function lineAreaBar(element: Element, {
 }: LineAreaBarProps) {
     const colors = settings["graph.colors"];
 
+    // force histogram to be ordinal axis with zero-filled missing points
+    const isHistogram = settings["graph.x_axis.scale"] === "histogram";
+    if (isHistogram) {
+        settings["line.missing"] = "zero";
+        settings["graph.x_axis.scale"] = "ordinal"
+    }
+
+    // bar histograms have special tick formatting:
+    // * aligned with beginning of bar to show bin boundaries
+    // * label only shows beginning value of bin
+    // * includes an extra tick at the end for the end of the last bin
+    const isHistogramBar = isHistogram && chartType === "bar";
+
     const isTimeseries = settings["graph.x_axis.scale"] === "timeseries";
     const isQuantitative = ["linear", "log", "pow"].indexOf(settings["graph.x_axis.scale"]) >= 0;
     const isOrdinal = !isTimeseries && !isQuantitative;
@@ -889,7 +914,6 @@ export default function lineAreaBar(element: Element, {
     const isMultiCardSeries = series.length > 1 &&
         getIn(series, [0, "card", "id"]) !== getIn(series, [1, "card", "id"]);
 
-    const enableBrush = !!(onChangeCardAndRun && !isMultiCardSeries && isStructured(series[0].card));
 
     // find the first nonempty single series
     // $FlowFixMe
@@ -897,6 +921,9 @@ export default function lineAreaBar(element: Element, {
 
     const isDimensionTimeseries = dimensionIsTimeseries(firstSeries.data);
     const isDimensionNumeric = dimensionIsNumeric(firstSeries.data);
+    const isRemappedToString = hasRemappingAndValuesAreStrings(firstSeries.data);
+
+    const enableBrush = !!(onChangeCardAndRun && !isMultiCardSeries && isStructured(series[0].card) && !isRemappedToString);
 
     if (firstSeries.data.cols.length < 2) {
         throw new Error("This chart type requires at least 2 columns.");
@@ -940,11 +967,19 @@ export default function lineAreaBar(element: Element, {
         // compute the interval
         let unit = minTimeseriesUnit(series.map(s => s.data.cols[0].unit));
         xInterval = computeTimeseriesDataInverval(xValues, unit);
-    } else if (isQuantitative) {
-        xInterval = computeNumericDataInverval(xValues);
+    } else if (isQuantitative || isHistogram) {
+        if (firstSeries.data.cols[0].binning_info) {
+            // Get the bin width from binning_info, if available
+            // TODO: multiseries?
+            xInterval = firstSeries.data.cols[0].binning_info.bin_width;
+        } else {
+            // Otherwise try to infer from the X values
+            xInterval = computeNumericDataInverval(xValues);
+        }
     }
 
     if (settings["line.missing"] === "zero" || settings["line.missing"] === "none") {
+        const fillValue = settings["line.missing"] === "zero" ? 0 : null;
         if (isTimeseries) {
             // $FlowFixMe
             const { interval, count } = xInterval;
@@ -956,26 +991,38 @@ export default function lineAreaBar(element: Element, {
                 datas = fillMissingValues(
                     datas,
                     xValues,
-                    settings["line.missing"] === "zero" ? 0 : null,
+                    fillValue,
                     (m) => d3.round(m.toDate().getTime(), -1) // sometimes rounds up 1ms?
                 );
             }
-        } if (isQuantitative) {
+        } if (isQuantitative || isHistogram) {
             // $FlowFixMe
             const count = Math.abs((xDomain[1] - xDomain[0]) / xInterval);
             if (count <= MAX_FILL_COUNT) {
-                xValues = d3.range(xDomain[0], xDomain[1] + xInterval, xInterval);
+                let [start, end] = xDomain;
+                if (isHistogramBar) {
+                    // NOTE: intentionally add an end point for bar histograms
+                    // $FlowFixMe
+                    end += xInterval * 1.5
+                } else {
+                    // NOTE: avoid including endpoint due to floating point error
+                    // $FlowFixMe
+                    end += xInterval * 0.5
+                }
+                xValues = d3.range(start, end, xInterval);
                 datas = fillMissingValues(
                     datas,
                     xValues,
-                    settings["line.missing"] === "zero" ? 0 : null,
+                    fillValue,
+                    // NOTE: normalize to xInterval to avoid floating point issues
+                    (v) => Math.round(v / xInterval)
                 );
             }
         } else {
             datas = fillMissingValues(
                 datas,
                 xValues,
-                settings["line.missing"] === "zero" ? 0 : null
+                fillValue
             );
         }
     }
@@ -1161,7 +1208,9 @@ export default function lineAreaBar(element: Element, {
         const goalValue = settings["graph.goal_value"];
         const goalData = [[xDomain[0], goalValue], [xDomain[1], goalValue]];
         const goalDimension = crossfilter(goalData).dimension(d => d[0]);
-        const goalGroup = goalDimension.group().reduceSum(d => d[1]);
+        // Take the last point rather than summing in case xDomain[0] === xDomain[1], e.x. when the chart
+        // has just a single row / datapoint
+        const goalGroup = goalDimension.group().reduce((p,d) => d[1], (p,d) => p, () => 0);
         const goalIndex = charts.length;
         let goalChart = dc.lineChart(parent)
             .dimension(goalDimension)
@@ -1204,6 +1253,24 @@ export default function lineAreaBar(element: Element, {
                 });
             }
         })
+    } else if (isHistogramBar) {
+        parent.on("renderlet.histogram-bar", function (chart) {
+            let barCharts = chart.selectAll(".sub rect:first-child")[0].map(node => node.parentNode.parentNode.parentNode);
+            if (barCharts.length > 0) {
+                // manually size bars to fill space, minus 1 pixel padding
+                const bars = barCharts[0].querySelectorAll("rect");
+                let barWidth = parseFloat(bars[0].getAttribute("width"));
+                let newBarWidth = parseFloat(bars[1].getAttribute("x")) - parseFloat(bars[0].getAttribute("x")) - 1;
+                if (newBarWidth > barWidth) {
+                    chart.selectAll("g.sub .bar").attr("width", newBarWidth);
+                }
+
+                // shift half of bar width so ticks line up with start of each bar
+                for (const barChart of barCharts) {
+                    barChart.setAttribute("transform", `translate(${barWidth / 2}, 0)`);
+                }
+            }
+        })
     }
 
     // HACK: compositeChart + ordinal X axis shenanigans
@@ -1222,6 +1289,11 @@ export default function lineAreaBar(element: Element, {
         applyChartOrdinalXAxis(parent, settings, series, xValues);
     }
 
+    // override tick format for bars. ticks are aligned with beginning of bar, so just show the start value
+    if (isHistogramBar) {
+        parent.xAxis().tickFormat(d => formatNumber(d));
+    }
+
     // y-axis settings
     let [left, right] = yAxisSplit.map(indexes => ({
         series: indexes.map(index => series[index]),
@@ -1287,11 +1359,20 @@ export function rowRenderer(
 
   const colors = settings["graph.colors"];
 
-  // format the dimension axis
+  const formatDimension = (row) =>
+      formatValue(row[0], { column: cols[0], type: "axis" })
+
+  // dc.js doesn't give us a way to format the row labels from unformatted data, so we have to
+  // do it here then construct a mapping to get the original dimension for tooltipsd/clicks
   const rows = series[0].data.rows.map(row => [
-      formatValue(row[0], { column: cols[0], type: "axis" }),
+      formatDimension(row),
       row[1]
   ]);
+  const formattedDimensionMap = new Map(rows.map(([formattedDimension], index) => [
+      formattedDimension,
+      series[0].data.rows[index][0]
+  ]))
+
   const dataset = crossfilter(rows);
   const dimension = dataset.dimension(d => d[0]);
   const group = dimension.group().reduceSum(d => d[1]);
@@ -1310,7 +1391,7 @@ export function rowRenderer(
                 index: -1,
                 event: d3.event,
                 data: [
-                  { key: getFriendlyName(cols[0]), value: d.key, col: cols[0] },
+                  { key: getFriendlyName(cols[0]), value: formattedDimensionMap.get(d.key), col: cols[0] },
                   { key: getFriendlyName(cols[1]), value: d.value, col: cols[1] }
                 ]
               });
@@ -1325,7 +1406,7 @@ export function rowRenderer(
                   value: d.value,
                   column: cols[1],
                   dimensions: [{
-                      value: d.key,
+                      value: formattedDimensionMap.get(d.key),
                       column: cols[0]
                   }],
                   element: this
diff --git a/frontend/src/metabase/visualizations/lib/mapping.js b/frontend/src/metabase/visualizations/lib/mapping.js
index d2f23e2f8a0e1cff80aa75b3c9669e5c70b84b9b..ed2b7ae07666c7e4009ca11a9eb6ae2fce5e321b 100644
--- a/frontend/src/metabase/visualizations/lib/mapping.js
+++ b/frontend/src/metabase/visualizations/lib/mapping.js
@@ -72,3 +72,83 @@ export function getAllFeaturesPoints(features) {
     }
     return points;
 }
+
+const STATE_CODES = [
+    ["AL", "Alabama"],
+    ["AK", "Alaska"],
+    ["AS", "American Samoa"],
+    ["AZ", "Arizona"],
+    ["AR", "Arkansas"],
+    ["CA", "California"],
+    ["CO", "Colorado"],
+    ["CT", "Connecticut"],
+    ["DE", "Delaware"],
+    ["DC", "District Of Columbia"],
+    ["FM", "Federated States Of Micronesia"],
+    ["FL", "Florida"],
+    ["GA", "Georgia"],
+    ["GU", "Guam"],
+    ["HI", "Hawaii"],
+    ["ID", "Idaho"],
+    ["IL", "Illinois"],
+    ["IN", "Indiana"],
+    ["IA", "Iowa"],
+    ["KS", "Kansas"],
+    ["KY", "Kentucky"],
+    ["LA", "Louisiana"],
+    ["ME", "Maine"],
+    ["MH", "Marshall Islands"],
+    ["MD", "Maryland"],
+    ["MA", "Massachusetts"],
+    ["MI", "Michigan"],
+    ["MN", "Minnesota"],
+    ["MS", "Mississippi"],
+    ["MO", "Missouri"],
+    ["MT", "Montana"],
+    ["NE", "Nebraska"],
+    ["NV", "Nevada"],
+    ["NH", "New Hampshire"],
+    ["NJ", "New Jersey"],
+    ["NM", "New Mexico"],
+    ["NY", "New York"],
+    ["NC", "North Carolina"],
+    ["ND", "North Dakota"],
+    ["MP", "Northern Mariana Islands"],
+    ["OH", "Ohio"],
+    ["OK", "Oklahoma"],
+    ["OR", "Oregon"],
+    ["PW", "Palau"],
+    ["PA", "Pennsylvania"],
+    ["PR", "Puerto Rico"],
+    ["RI", "Rhode Island"],
+    ["SC", "South Carolina"],
+    ["SD", "South Dakota"],
+    ["TN", "Tennessee"],
+    ["TX", "Texas"],
+    ["UT", "Utah"],
+    ["VT", "Vermont"],
+    ["VI", "Virgin Islands"],
+    ["VA", "Virginia"],
+    ["WA", "Washington"],
+    ["WV", "West Virginia"],
+    ["WI", "Wisconsin"],
+    ["WY", "Wyoming"],
+];
+
+const stateNamesMap = new Map(STATE_CODES.map(([key, name]) => [name.toLowerCase(), key.toLowerCase()]))
+
+/**
+ * Canonicalizes row values to match those in the GeoJSONs.
+ *
+ * Currently transforms US state names to state codes for the "us_states" region map, and just lowercases all others.
+ */
+export function getCanonicalRowKey(key, region) {
+    key = String(key).toLowerCase();
+    // Special case for supporting both US state names and state codes
+    // This should be ok because we know there's no overlap between state names and codes, and we know the "us_states" region map expects codes
+    if (region === "us_states" && stateNamesMap.has(key)) {
+        return stateNamesMap.get(key);
+    } else {
+        return key;
+    }
+}
diff --git a/frontend/src/metabase/visualizations/lib/numeric.js b/frontend/src/metabase/visualizations/lib/numeric.js
index aa6de4a28a742bbce34d6fa5e39af37ff9e21136..8d0e296af07a27a9d1cd26d0382c6bd9224ab6ff 100644
--- a/frontend/src/metabase/visualizations/lib/numeric.js
+++ b/frontend/src/metabase/visualizations/lib/numeric.js
@@ -23,6 +23,13 @@ export function precision(a) {
     return e;
 }
 
+export function decimalCount(a) {
+    if (!isFinite(a)) return 0;
+    var e = 1, p = 0;
+    while (Math.round(a * e) / e !== a) { e *= 10; p++; }
+    return p;
+}
+
 export function computeNumericDataInverval(xValues) {
     let bestPrecision = Infinity;
     for (const value of xValues) {
diff --git a/frontend/src/metabase/visualizations/lib/settings.js b/frontend/src/metabase/visualizations/lib/settings.js
index 8f6b54e33f0d4f484d375f255b05977acddcb63f..aa813cd3af294f42c5d3846c46dd0de4e594a103 100644
--- a/frontend/src/metabase/visualizations/lib/settings.js
+++ b/frontend/src/metabase/visualizations/lib/settings.js
@@ -1,6 +1,7 @@
 import { getVisualizationRaw } from "metabase/visualizations";
 
 import {
+    columnsAreValid,
     getChartTypeFromData,
     DIMENSION_DIMENSION_METRIC,
     DIMENSION_METRIC,
@@ -33,22 +34,6 @@ const WIDGETS = {
     colors: ChartSettingColorsPicker,
 }
 
-export function columnsAreValid(colNames, data, filter = () => true) {
-    if (typeof colNames === "string") {
-        colNames = [colNames]
-    }
-    if (!data || !Array.isArray(colNames)) {
-        return false;
-    }
-    const colsByName = {};
-    for (const col of data.cols) {
-        colsByName[col.name] = col;
-    }
-    return colNames.reduce((acc, name) =>
-        acc && (name == undefined || (colsByName[name] && filter(colsByName[name])))
-    , true);
-}
-
 export function getDefaultColumns(series) {
     if (series[0].card.display === "scatter") {
         return getDefaultScatterColumns(series);
@@ -116,6 +101,11 @@ export function getDefaultDimensionAndMetric([{ data: { cols, rows } }]) {
             dimension: cols[0].name,
             metric: cols[1].name
         };
+    } else if (type === DIMENSION_DIMENSION_METRIC) {
+        return {
+            dimension: null,
+            metric: cols[2].name
+        };
     } else {
         return {
             dimension: null,
diff --git a/frontend/src/metabase/visualizations/lib/settings/graph.js b/frontend/src/metabase/visualizations/lib/settings/graph.js
index 46dc61c60f59666a63d4bfea914afc8b7d395f74..200d6296675c6cc74a3e61f738547a007aa0126c 100644
--- a/frontend/src/metabase/visualizations/lib/settings/graph.js
+++ b/frontend/src/metabase/visualizations/lib/settings/graph.js
@@ -1,8 +1,8 @@
 import { capitalize } from "metabase/lib/formatting";
 import { isDimension, isMetric, isNumeric, isAny } from "metabase/lib/schema_metadata";
 
-import { columnsAreValid, getDefaultColumns, getOptionFromColumn } from "metabase/visualizations/lib/settings";
-import { getCardColors, getFriendlyName } from "metabase/visualizations/lib/utils";
+import { getDefaultColumns, getOptionFromColumn } from "metabase/visualizations/lib/settings";
+import { columnsAreValid, getCardColors, getFriendlyName } from "metabase/visualizations/lib/utils";
 import { dimensionIsNumeric } from "metabase/visualizations/lib/numeric";
 import { dimensionIsTimeseries } from "metabase/visualizations/lib/timeseries";
 
@@ -194,13 +194,22 @@ export const GRAPH_AXIS_SETTINGS = {
       getDefault: ([{ data }], vizSettings) =>
           dimensionIsNumeric(data, _.findIndex(data.cols, (c) => c.name === vizSettings["graph.dimensions"].filter(d => d)[0]))
   },
+  "graph.x_axis._is_histogram": {
+      getDefault: ([{ data: { cols } }], vizSettings) =>
+        cols[0].binning_info != null
+  },
   "graph.x_axis.scale": {
       section: "Axes",
       title: "X-axis scale",
       widget: "select",
       default: "ordinal",
-      readDependencies: ["graph.x_axis._is_timeseries", "graph.x_axis._is_numeric"],
+      readDependencies: [
+          "graph.x_axis._is_timeseries",
+          "graph.x_axis._is_numeric",
+          "graph.x_axis._is_histogram"
+      ],
       getDefault: (series, vizSettings) =>
+          vizSettings["graph.x_axis._is_histogram"] ? "histogram" :
           vizSettings["graph.x_axis._is_timeseries"] ? "timeseries" :
           vizSettings["graph.x_axis._is_numeric"] ? "linear" :
           "ordinal",
@@ -211,8 +220,11 @@ export const GRAPH_AXIS_SETTINGS = {
           }
           if (vizSettings["graph.x_axis._is_numeric"]) {
               options.push({ name: "Linear", value: "linear" });
-              options.push({ name: "Power", value: "pow" });
-              options.push({ name: "Log", value: "log" });
+              if (!vizSettings["graph.x_axis._is_histogram"]) {
+                  options.push({ name: "Power", value: "pow" });
+                  options.push({ name: "Log", value: "log" });
+              }
+              options.push({ name: "Histogram", value: "histogram" });
           }
           options.push({ name: "Ordinal", value: "ordinal" });
           return { options };
diff --git a/frontend/src/metabase/visualizations/lib/utils.js b/frontend/src/metabase/visualizations/lib/utils.js
index 5aa2aa826e3e930abb3b51c26fa759567b2889f3..66316013ff08710efa172628184a631149b26079 100644
--- a/frontend/src/metabase/visualizations/lib/utils.js
+++ b/frontend/src/metabase/visualizations/lib/utils.js
@@ -11,6 +11,24 @@ import * as colors from "metabase/lib/colors";
 const SPLIT_AXIS_UNSPLIT_COST = -100;
 const SPLIT_AXIS_COST_FACTOR = 2;
 
+// NOTE Atte Keinänen 8/3/17: Moved from settings.js because this way we
+// are able to avoid circular dependency errors in integrated tests
+export function columnsAreValid(colNames, data, filter = () => true) {
+    if (typeof colNames === "string") {
+        colNames = [colNames]
+    }
+    if (!data || !Array.isArray(colNames)) {
+        return false;
+    }
+    const colsByName = {};
+    for (const col of data.cols) {
+        colsByName[col.name] = col;
+    }
+    return colNames.reduce((acc, name) =>
+        acc && (name == undefined || (colsByName[name] && filter(colsByName[name])))
+        , true);
+}
+
 // computed size properties (drop 'px' and convert string -> Number)
 function getComputedSizeProperty(prop, element) {
     var val = document.defaultView.getComputedStyle(element, null).getPropertyValue(prop);
@@ -116,10 +134,8 @@ export function getXValues(datas, chartType) {
     return xValues;
 }
 
-export function getFriendlyName(col) {
-    let name = col.display_name || col.name;
-    let friendlyName = FRIENDLY_NAME_MAP[name.toLowerCase().trim()];
-    return friendlyName || name;
+export function getFriendlyName(column) {
+    return column.display_name || FRIENDLY_NAME_MAP[column.name.toLowerCase().trim()] || column.name;
 }
 
 export function getCardColors(card) {
@@ -168,7 +184,8 @@ export const DIMENSION_METRIC = "DIMENSION_METRIC";
 export const DIMENSION_METRIC_METRIC = "DIMENSION_METRIC_METRIC";
 export const DIMENSION_DIMENSION_METRIC = "DIMENSION_DIMENSION_METRIC";
 
-const MAX_SERIES = 10;
+// NOTE Atte Keinänen 7/31/17 Commented MAX_SERIES out as it wasn't being used
+// const MAX_SERIES = 10;
 
 export const isDimensionMetric = (cols, strict = true) =>
     (!strict || cols.length === 2) &&
@@ -203,9 +220,9 @@ export function getChartTypeFromData(cols, rows, strict = true) {
     if (isDimensionMetricMetric(cols, strict)) {
         return DIMENSION_METRIC_METRIC;
     } else if (isDimensionDimensionMetric(cols, strict)) {
-        if (getColumnCardinality(cols, rows, 0) < MAX_SERIES || getColumnCardinality(cols, rows, 1) < MAX_SERIES) {
+        // if (getColumnCardinality(cols, rows, 0) < MAX_SERIES || getColumnCardinality(cols, rows, 1) < MAX_SERIES) {
             return DIMENSION_DIMENSION_METRIC;
-        }
+        // }
     } else if (isDimensionMetric(cols, strict)) {
         return DIMENSION_METRIC;
     }
@@ -277,4 +294,3 @@ export function getCardAfterVisualizationClick(nextCard, previousCard) {
         };
     }
 }
-
diff --git a/frontend/src/metabase/visualizations/visualizations/Funnel.jsx b/frontend/src/metabase/visualizations/visualizations/Funnel.jsx
index dd73f1a37a58e7afa166d00a76e36a222450d140..113958feebc824305e5342676c9e26b06f2f7666 100644
--- a/frontend/src/metabase/visualizations/visualizations/Funnel.jsx
+++ b/frontend/src/metabase/visualizations/visualizations/Funnel.jsx
@@ -16,6 +16,7 @@ import _ from "underscore";
 import cx from "classnames";
 
 import type { VisualizationProps } from "metabase/meta/types/Visualization";
+import { TitleLegendHeader } from "metabase/visualizations/components/TitleLegendHeader";
 
 export default class Funnel extends Component {
     props: VisualizationProps;
@@ -108,17 +109,27 @@ export default class Funnel extends Component {
     render() {
         const { settings } = this.props;
 
+        const hasTitle = settings["card.title"];
+
         if (settings["funnel.type"] === "bar") {
             return <FunnelBar {...this.props} />
         } else {
             const { actionButtons, className, onChangeCardAndRun, series } = this.props;
             return (
                 <div className={cx(className, "flex flex-column p1")}>
+                    { hasTitle &&
+                        <TitleLegendHeader
+                            series={series}
+                            settings={settings}
+                            onChangeCardAndRun={onChangeCardAndRun}
+                            actionButtons={actionButtons}
+                        />
+                    }
                     <LegendHeader
                         className="flex-no-shrink"
                         // $FlowFixMe
                         series={series._raw || series}
-                        actionButtons={actionButtons}
+                        actionButtons={!hasTitle && actionButtons}
                         onChangeCardAndRun={onChangeCardAndRun}
                     />
                     <FunnelNormal {...this.props} className="flex-full" />
diff --git a/frontend/src/metabase/visualizations/visualizations/Map.jsx b/frontend/src/metabase/visualizations/visualizations/Map.jsx
index 1be87044162d96b9792e73bdf28632e2455ad72b..a48894ac2e9adbd8532fe079dcd2eea992376abb 100644
--- a/frontend/src/metabase/visualizations/visualizations/Map.jsx
+++ b/frontend/src/metabase/visualizations/visualizations/Map.jsx
@@ -6,17 +6,19 @@ import ChoroplethMap from "../components/ChoroplethMap.jsx";
 import PinMap from "../components/PinMap.jsx";
 
 import { ChartSettingsError } from "metabase/visualizations/lib/errors";
-import { isNumeric, isLatitude, isLongitude, hasLatitudeAndLongitudeColumns } from "metabase/lib/schema_metadata";
+import { isNumeric, isLatitude, isLongitude, hasLatitudeAndLongitudeColumns, isState, isCountry } from "metabase/lib/schema_metadata";
 import { metricSetting, dimensionSetting, fieldSetting } from "metabase/visualizations/lib/settings";
 import MetabaseSettings from "metabase/lib/settings";
 
-import type { VisualizationProps } from "metabase/meta/types/Visualization";
+import { isSameSeries } from "metabase/visualizations/lib/utils";
 
 import _ from "underscore";
 
-export default class Map extends Component {
-    props: VisualizationProps;
+// NOTE Atte Keinänen 8/2/17: Heat/grid maps disabled in the first merged version of binning
+// const PIN_MAP_TYPES = new Set(["pin", "heat", "grid"]);
+const PIN_MAP_TYPES = new Set(["pin"]);
 
+export default class Map extends Component {
     static uiName = "Map";
     static identifier = "map";
     static iconName = "pinmap";
@@ -35,11 +37,14 @@ export default class Map extends Component {
             widget: "select",
             props: {
                 options: [
-                    { name: "Pin map", value: "pin" },
-                    { name: "Region map", value: "region" }
+                    { name: "Region map", value: "region" },
+                    { name: "Pin map", value: "pin" }
+                    // NOTE Atte Keinänen 8/2/17: Heat/grid maps disabled in the first merged version of binning
+                    // { name: "Heat map", value: "heat" },
+                    // { name: "Grid map", value: "grid" }
                 ]
             },
-            getDefault: ([{ card, data: { cols } }]) => {
+            getDefault: ([{ card, data: { cols } }], settings) => {
                 switch (card.display) {
                     case "state":
                     case "country":
@@ -47,37 +52,80 @@ export default class Map extends Component {
                     case "pin_map":
                         return "pin";
                     default:
+                        // NOTE Atte Keinänen 8/2/17: Heat/grid maps disabled in the first merged version of binning
                         if (hasLatitudeAndLongitudeColumns(cols)) {
+                        //     const latitudeColumn = _.findWhere(cols, { name: settings["map.latitude_column"] });
+                        //     const longitudeColumn = _.findWhere(cols, { name: settings["map.longitude_column"] });
+                        //     if (latitudeColumn && longitudeColumn && latitudeColumn.binning_info && longitudeColumn.binning_info) {
+                        //         // lat/lon columns are binned, use grid by default
+                        //         return "grid";
+                        //     } else if (settings["map.metric_column"]) {
+                        //         //
+                        //         return "heat";
+                        //     } else {
                             return "pin";
+                        //     }
                         } else {
                             return "region";
                         }
                 }
-            }
+            },
+            readDependencies: ["map.latitude_column", "map.longitude_column", "map.metric_column"]
+        },
+        "map.pin_type": {
+            title: "Pin type",
+            // Don't expose this in the UI for now
+            // widget: "select",
+            props: {
+                options: [
+                    { name: "Tiles", value: "tiles" },
+                    { name: "Markers", value: "markers" },
+                    // NOTE Atte Keinänen 8/2/17: Heat/grid maps disabled in the first merged version of binning
+                    // { name: "Heat", value: "heat" },
+                    // { name: "Grid", value: "grid" }
+                ]
+            },
+            getDefault: (series, vizSettings) =>
+                vizSettings["map.type"] === "heat" ?
+                    "heat"
+                : vizSettings["map.type"] === "grid" ?
+                    "grid"
+                : series[0].data.rows.length >= 1000 ?
+                    "tiles"
+                :
+                    "markers",
+            getHidden: (series, vizSettings) => !PIN_MAP_TYPES.has(vizSettings["map.type"])
         },
         "map.latitude_column": {
             title: "Latitude field",
             ...fieldSetting("map.latitude_column", isNumeric,
                 ([{ data: { cols }}]) => (_.find(cols, isLatitude) || {}).name),
-            getHidden: (series, vizSettings) => vizSettings["map.type"] !== "pin"
+            getHidden: (series, vizSettings) => !PIN_MAP_TYPES.has(vizSettings["map.type"])
         },
         "map.longitude_column": {
             title: "Longitude field",
             ...fieldSetting("map.longitude_column", isNumeric,
                 ([{ data: { cols }}]) => (_.find(cols, isLongitude) || {}).name),
-            getHidden: (series, vizSettings) => vizSettings["map.type"] !== "pin"
+            getHidden: (series, vizSettings) => !PIN_MAP_TYPES.has(vizSettings["map.type"])
+        },
+        "map.metric_column": {
+            title: "Metric field",
+            ...metricSetting("map.metric_column"),
+            getHidden: (series, vizSettings) =>
+                !PIN_MAP_TYPES.has(vizSettings["map.type"]) || (
+                    (vizSettings["map.pin_type"] !== "heat" && vizSettings["map.pin_type"] !== "grid")
+                ),
         },
         "map.region": {
             title: "Region map",
             widget: "select",
             getDefault: ([{ card, data: { cols }}]) => {
-                switch (card.display) {
-                    case "country":
-                        return "world_countries";
-                    case "state":
-                    default:
-                        return "us_states";
+                if (card.display === "state" || _.any(cols, isState)) {
+                    return "us_states";
+                } else if (card.display === "country" || _.any(cols, isCountry)) {
+                    return "world_countries";
                 }
+                return null;
             },
             getProps: () => ({
                 // $FlowFixMe:
@@ -102,34 +150,57 @@ export default class Map extends Component {
         },
         "map.center_longitude": {
         },
-        "map.pin_type": {
-            title: "Pin type",
-            // Don't expose this in the UI for now
-            // widget: ChartSettingSelect,
-            props: {
-                options: [{ name: "Tiles", value: "tiles" }, { name: "Markers", value: "markers" }]
-            },
-            getDefault: (series) => series[0].data.rows.length >= 1000 ? "tiles" : "markers",
-            getHidden: (series, vizSettings) => vizSettings["map.type"] !== "pin"
-        }
+        "map.heat.radius": {
+            title: "Radius",
+            widget: "number",
+            default: 30,
+            getHidden: (series, vizSettings) => vizSettings["map.type"] !== "heat"
+        },
+        "map.heat.blur": {
+            title: "Blur",
+            widget: "number",
+            default: 60,
+            getHidden: (series, vizSettings) => vizSettings["map.type"] !== "heat"
+        },
+        "map.heat.min-opacity": {
+            title: "Min Opacity",
+            widget: "number",
+            default: 0,
+            getHidden: (series, vizSettings) => vizSettings["map.type"] !== "heat"
+        },
+        "map.heat.max-zoom": {
+            title: "Max Zoom",
+            widget: "number",
+            default: 1,
+            getHidden: (series, vizSettings) => vizSettings["map.type"] !== "heat"
+        },
     }
 
     static checkRenderable([{ data: { cols, rows} }], settings) {
-        if (settings["map.type"] === "pin") {
+        if (PIN_MAP_TYPES.has(settings["map.type"])) {
             if (!settings["map.longitude_column"] || !settings["map.latitude_column"]) {
                 throw new ChartSettingsError("Please select longitude and latitude columns in the chart settings.", "Data");
             }
         } else if (settings["map.type"] === "region"){
+            if (!settings["map.region"]) {
+                throw new ChartSettingsError("Please select a region map.", "Data");
+            }
             if (!settings["map.dimension"] || !settings["map.metric"]) {
                 throw new ChartSettingsError("Please select region and metric columns in the chart settings.", "Data");
             }
         }
     }
 
+    shouldComponentUpdate(nextProps: any, nextState: any) {
+        let sameSize = (this.props.width === nextProps.width && this.props.height === nextProps.height);
+        let sameSeries = isSameSeries(this.props.series, nextProps.series);
+        return !(sameSize && sameSeries);
+    }
+
     render() {
         const { settings } = this.props;
         const type = settings["map.type"];
-        if (type === "pin") {
+        if (PIN_MAP_TYPES.has(type)) {
             return <PinMap {...this.props} />
         } else if (type === "region") {
             return <ChoroplethMap {...this.props} />
diff --git a/frontend/src/metabase/visualizations/visualizations/ObjectDetail.jsx b/frontend/src/metabase/visualizations/visualizations/ObjectDetail.jsx
new file mode 100644
index 0000000000000000000000000000000000000000..91860b819164629464a245049253148264eae328
--- /dev/null
+++ b/frontend/src/metabase/visualizations/visualizations/ObjectDetail.jsx
@@ -0,0 +1,237 @@
+/* @flow weak */
+
+import React, { Component } from "react";
+
+import ExpandableString from 'metabase/query_builder/components/ExpandableString.jsx';
+import Icon from 'metabase/components/Icon.jsx';
+import IconBorder from 'metabase/components/IconBorder.jsx';
+import LoadingSpinner from 'metabase/components/LoadingSpinner.jsx';
+
+import { isID, isPK, foreignKeyCountsByOriginTable } from 'metabase/lib/schema_metadata';
+import { TYPE, isa } from "metabase/lib/types";
+import { singularize, inflect } from 'inflection';
+import { formatValue, formatColumn } from "metabase/lib/formatting";
+import { isQueryable } from "metabase/lib/table";
+
+import cx from "classnames";
+import _ from "underscore";
+
+import type { VisualizationProps } from "metabase/meta/types/Visualization";
+
+type Props = VisualizationProps;
+
+export default class ObjectDetail extends Component {
+    props: Props;
+
+    static uiName = "Object Detail";
+    static identifier = "object";
+    static iconName = "document";
+    static noun = "object";
+
+    static hidden = true;
+
+    componentDidMount() {
+        // load up FK references
+        this.props.loadObjectDetailFKReferences();
+    }
+
+    componentWillReceiveProps(nextProps) {
+        // if the card has changed then reload fk references
+        if (this.props.data != nextProps.data) {
+            this.props.loadObjectDetailFKReferences();
+        }
+    }
+
+    getIdValue() {
+        if (!this.props.data) return null;
+
+        const { data: { cols, rows }} = this.props;
+        const columnIndex = _.findIndex(cols, col => isPK(col));
+        return rows[0][columnIndex];
+    }
+
+    foreignKeyClicked = (fk) => {
+        this.props.followForeignKey(fk);
+    }
+
+    cellRenderer(column, value, isColumn) {
+        const { onVisualizationClick, visualizationIsClickable } = this.props;
+
+        let cellValue;
+        let clicked;
+        let isLink;
+
+        if (isColumn) {
+            cellValue = (column !== null) ? formatColumn(column) : null;
+            clicked = {
+                column
+            };
+            isLink = false;
+        } else {
+            if (value === null || value === undefined || value === "") {
+                cellValue = (<span className="text-grey-2">Empty</span>);
+            } else if (isa(value.special_type, TYPE.SerializedJSON)) {
+                let formattedJson = JSON.stringify(JSON.parse(value), null, 2);
+                cellValue = (<pre className="ObjectJSON">{formattedJson}</pre>);
+            } else if (typeof value === "object") {
+                let formattedJson = JSON.stringify(value, null, 2);
+                cellValue = (<pre className="ObjectJSON">{formattedJson}</pre>);
+            } else {
+                cellValue = formatValue(value, { column: column, jsx: true });
+                if (typeof cellValue === "string") {
+                    cellValue = (<ExpandableString str={cellValue} length={140}></ExpandableString>);
+                }
+            }
+            clicked = {
+                column,
+                value
+            };
+            isLink = isID(column);
+        }
+
+        const isClickable = onVisualizationClick && visualizationIsClickable(clicked);
+
+        return (
+            <div>
+                <span
+                    className={cx({ "cursor-pointer": isClickable, "link": isClickable && isLink })}
+                    onClick={isClickable && ((e) => {
+                        onVisualizationClick({ ...clicked, element: e.currentTarget });
+                    })}
+                >
+                    {cellValue}
+                </span>
+            </div>
+        );
+    }
+
+    renderDetailsTable() {
+        const { data: { cols, rows }} = this.props;
+        return cols.map((column, columnIndex) =>
+            <div className="Grid mb2" key={columnIndex}>
+                <div className="Grid-cell">
+                    {this.cellRenderer(column, rows[0][columnIndex], true)}
+                </div>
+                <div style={{wordWrap: 'break-word'}} className="Grid-cell text-bold text-dark">
+                    {this.cellRenderer(column, rows[0][columnIndex], false)}
+                </div>
+            </div>
+        )
+    }
+
+    renderRelationships() {
+        let { tableForeignKeys, tableForeignKeyReferences } = this.props;
+        if (!tableForeignKeys) {
+            return null;
+        }
+
+        tableForeignKeys = tableForeignKeys.filter(fk => isQueryable(fk.origin.table));
+
+        if (tableForeignKeys.length < 1) {
+            return (<p className="my4 text-centered">No relationships found.</p>);
+        }
+
+        const fkCountsByTable = foreignKeyCountsByOriginTable(tableForeignKeys);
+
+        const relationships = tableForeignKeys.sort((a, b) =>
+            a.origin.table.display_name.localeCompare(b.origin.table.display_name)
+        ).map((fk) => {
+            let fkCount = (<LoadingSpinner size={25} />)
+            let fkCountValue = 0;
+            let fkClickable = false;
+            if (tableForeignKeyReferences) {
+                const fkCountInfo = tableForeignKeyReferences[fk.origin.id];
+                if (fkCountInfo && fkCountInfo.status === 1) {
+                    fkCount = (<span>{fkCountInfo.value}</span>);
+
+                    if (fkCountInfo.value) {
+                        fkCountValue = fkCountInfo.value;
+                        fkClickable = true;
+                    }
+                }
+            }
+            const chevron = (
+                <IconBorder className="flex-align-right">
+                    <Icon name='chevronright' size={10} />
+                </IconBorder>
+            );
+
+            const relationName = inflect(fk.origin.table.display_name, fkCountValue);
+            const via = (fkCountsByTable[fk.origin.table.id] > 1) ? (<span className="text-grey-3 text-normal"> via {fk.origin.display_name}</span>) : null;
+
+            const info = (
+                <div>
+                    <h2>{fkCount}</h2>
+                    <h5 className="block">{relationName}{via}</h5>
+                 </div>
+            );
+            let fkReference;
+            const referenceClasses = cx('flex align-center my2 pb2 border-bottom', {
+                'text-brand-hover cursor-pointer text-dark': fkClickable,
+                'text-grey-3': !fkClickable
+            });
+
+            if (fkClickable) {
+                fkReference = (
+                    <div className={referenceClasses} key={fk.id} onClick={this.foreignKeyClicked.bind(null, fk)}>
+                        {info}
+                        {chevron}
+                    </div>
+                );
+            } else {
+                fkReference = (
+                    <div className={referenceClasses} key={fk.id}>
+                        {info}
+                    </div>
+                );
+            }
+
+            return (
+                <li>
+                    {fkReference}
+                </li>
+            );
+        });
+
+        return (
+            <ul className="px4">
+                {relationships}
+            </ul>
+        );
+    }
+
+    render() {
+        if(!this.props.data) {
+            return false;
+        }
+
+        const tableName = (this.props.tableMetadata) ? singularize(this.props.tableMetadata.display_name) : "Unknown";
+        // TODO: once we nail down the "title" column of each table this should be something other than the id
+        const idValue = this.getIdValue();
+
+        return (
+            <div className="ObjectDetail rounded mt2">
+                <div className="Grid ObjectDetail-headingGroup">
+                    <div className="Grid-cell ObjectDetail-infoMain px4 py3 ml2 arrow-right">
+                        <div className="text-brand text-bold">
+                            <span>{tableName}</span>
+                            <h1>{idValue}</h1>
+                        </div>
+                    </div>
+                    <div className="Grid-cell flex align-center Cell--1of3 bg-alt">
+                        <div className="p4 flex align-center text-bold text-grey-3">
+                            <Icon name="connections" size={17} />
+                            <div className="ml2">
+                                This <span className="text-dark">{tableName}</span> is connected to:
+                            </div>
+                        </div>
+                    </div>
+                </div>
+                <div className="Grid">
+                    <div className="Grid-cell ObjectDetail-infoMain p4">{this.renderDetailsTable()}</div>
+                    <div className="Grid-cell Cell--1of3 bg-alt">{this.renderRelationships()}</div>
+                </div>
+            </div>
+        );
+    }
+}
diff --git a/frontend/src/metabase/visualizations/visualizations/PieChart.jsx b/frontend/src/metabase/visualizations/visualizations/PieChart.jsx
index aa494237f81358b5025dd55ee22eafa99bc0bdba..b78c0cfcd8615cd2edf450236fe9f733447e5b2e 100644
--- a/frontend/src/metabase/visualizations/visualizations/PieChart.jsx
+++ b/frontend/src/metabase/visualizations/visualizations/PieChart.jsx
@@ -46,7 +46,7 @@ export default class PieChart extends Component {
 
     static checkRenderable([{ data: { cols, rows} }], settings) {
         if (!settings["pie.dimension"] || !settings["pie.metric"]) {
-            throw new ChartSettingsError("Which columns do want to use?", "Data");
+            throw new ChartSettingsError("Which columns do you want to use?", "Data");
         }
     }
 
diff --git a/frontend/src/metabase/visualizations/visualizations/Table.jsx b/frontend/src/metabase/visualizations/visualizations/Table.jsx
index 07840ddd15ddf4d350a9fc6ecb1c6567262b22b9..a1fd30b8be8f1d912d91c020c92d85e337a5fa80 100644
--- a/frontend/src/metabase/visualizations/visualizations/Table.jsx
+++ b/frontend/src/metabase/visualizations/visualizations/Table.jsx
@@ -9,8 +9,7 @@ import * as DataGrid from "metabase/lib/data_grid";
 
 import Query from "metabase/lib/query";
 import { isMetric, isDimension } from "metabase/lib/schema_metadata";
-import { columnsAreValid } from "metabase/visualizations/lib/settings";
-import { getFriendlyName } from "metabase/visualizations/lib/utils";
+import { columnsAreValid, getFriendlyName } from "metabase/visualizations/lib/utils";
 import ChartSettingOrderedFields from "metabase/visualizations/components/settings/ChartSettingOrderedFields.jsx";
 
 import _ from "underscore";
@@ -143,3 +142,15 @@ export default class Table extends Component {
         );
     }
 }
+
+/**
+ * A modified version of TestPopover for Jest/Enzyme tests.
+ * It always uses TableSimple which Enzyme is able to render correctly.
+ * TableInteractive uses react-virtualized library which requires a real browser viewport.
+ */
+export const TestTable = (props: Props) => <Table {...props} isDashboard={true} />
+TestTable.uiName = Table.uiName;
+TestTable.identifier = Table.identifier;
+TestTable.iconName = Table.iconName;
+TestTable.minSize = Table.minSize;
+TestTable.settings = Table.settings;
\ No newline at end of file
diff --git a/frontend/src/metabase/xray/Histogram.jsx b/frontend/src/metabase/xray/Histogram.jsx
new file mode 100644
index 0000000000000000000000000000000000000000..ad57ecc5ffee465442a43722428b03137459f5ae
--- /dev/null
+++ b/frontend/src/metabase/xray/Histogram.jsx
@@ -0,0 +1,20 @@
+import React from 'react'
+import Visualization from 'metabase/visualizations/components/Visualization'
+
+const Histogram = ({ histogram }) =>
+    <Visualization
+        className="full-height"
+        series={[
+            {
+                card: {
+                    display: "bar",
+                    visualization_settings: {}
+                },
+                data: histogram
+            }
+        ]}
+        showTitle={false}
+    />
+
+export default Histogram
+
diff --git a/frontend/src/metabase/xray/SimpleStat.jsx b/frontend/src/metabase/xray/SimpleStat.jsx
new file mode 100644
index 0000000000000000000000000000000000000000..46c9b81ebbc2da7e6bcacece8cfbe2ede8033450
--- /dev/null
+++ b/frontend/src/metabase/xray/SimpleStat.jsx
@@ -0,0 +1,22 @@
+import React from 'react'
+import Tooltip from 'metabase/components/Tooltip'
+import Icon from 'metabase/components/Icon'
+
+const SimpleStat = ({ stat, showDescription }) =>
+    <div>
+        <div className="flex align-center">
+            <h3 className="mr1 text-grey-4">{stat.label}</h3>
+            { showDescription && (
+                <Tooltip tooltip={stat.description}>
+                    <Icon name='infooutlined' />
+                </Tooltip>
+            )}
+        </div>
+        { /* call toString to ensure that values like true / false show up */ }
+        <h1 className="my1">
+            {stat.value.toString()}
+        </h1>
+    </div>
+
+export default SimpleStat
+
diff --git a/frontend/src/metabase/xray/components/Constituent.jsx b/frontend/src/metabase/xray/components/Constituent.jsx
new file mode 100644
index 0000000000000000000000000000000000000000..9204f85788e2c453ccb26a15ac6db0379b4107ae
--- /dev/null
+++ b/frontend/src/metabase/xray/components/Constituent.jsx
@@ -0,0 +1,40 @@
+import React from 'react'
+import { Link } from 'react-router'
+
+import Histogram from 'metabase/xray/Histogram'
+import SimpleStat from 'metabase/xray/SimpleStat'
+
+const Constituent = ({constituent}) =>
+    <Link
+        to={`xray/field/${constituent.field.id}/approximate`}
+        className="no-decoration"
+    >
+        <div className="Grid my3 bg-white bordered rounded shadowed shadow-hover no-decoration">
+            <div className="Grid-cell Cell--1of3 border-right">
+                <div className="p4">
+                    <h2 className="text-bold text-brand">{constituent.field.display_name}</h2>
+                    <p className="text-measure text-paragraph">{constituent.field.description}</p>
+
+                    <div className="flex align-center">
+                        { constituent.min && (
+                            <SimpleStat
+                                stat={constituent.min}
+                            />
+                        )}
+                        { constituent.max && (
+                            <SimpleStat
+                                stat={constituent.max}
+                            />
+                        )}
+                    </div>
+                </div>
+            </div>
+            <div className="Grid-cell p3">
+                <div style={{ height: 220 }}>
+                    { constituent.histogram && (<Histogram histogram={constituent.histogram.value} />) }
+                </div>
+            </div>
+        </div>
+    </Link>
+
+export default Constituent
diff --git a/frontend/src/metabase/xray/components/CostSelect.jsx b/frontend/src/metabase/xray/components/CostSelect.jsx
new file mode 100644
index 0000000000000000000000000000000000000000..675c84e11f603931bd1f47e5b551398ccf049537
--- /dev/null
+++ b/frontend/src/metabase/xray/components/CostSelect.jsx
@@ -0,0 +1,41 @@
+import React from 'react'
+import cx from 'classnames'
+import { Link } from 'react-router'
+
+import Icon from 'metabase/components/Icon'
+import Tooltip from 'metabase/components/Tooltip'
+
+import COSTS from 'metabase/xray/costs'
+
+const CostSelect = ({ currentCost, xrayType, id }) =>
+    <ol className="bordered rounded shadowed bg-white flex align-center overflow-hidden">
+        { Object.keys(COSTS).map(cost => {
+            const c = COSTS[cost]
+            return (
+                <Link
+                    to={`/xray/${xrayType}/${id}/${cost}`}
+                    className="no-decoration"
+                >
+                    <li
+                        key={cost}
+                        className={cx(
+                            "flex align-center justify-center cursor-pointer bg-brand-hover text-white-hover transition-background transition-text text-grey-2",
+                            { 'bg-brand text-white': currentCost === cost }
+                        )}
+                    >
+                        <Tooltip
+                            tooltip={c.description}
+                        >
+                            <Icon
+                                size={32}
+                                name={c.icon}
+                                className="p1 border-right"
+                            />
+                        </Tooltip>
+                    </li>
+                </Link>
+            )
+        })}
+    </ol>
+
+export default CostSelect
diff --git a/frontend/src/metabase/xray/components/StatGroup.jsx b/frontend/src/metabase/xray/components/StatGroup.jsx
new file mode 100644
index 0000000000000000000000000000000000000000..8531df06b304b077ca2ccfe9b147e3b4c40f9f07
--- /dev/null
+++ b/frontend/src/metabase/xray/components/StatGroup.jsx
@@ -0,0 +1,29 @@
+import React from 'react'
+import { Heading } from 'metabase/xray/components/XRayLayout'
+import SimpleStat from 'metabase/xray/SimpleStat'
+
+const atLeastOneStat = (fingerprint, stats) =>
+    stats.filter(s => fingerprint[s]).length > 0
+
+const StatGroup = ({ heading, fingerprint, stats, showDescriptions }) =>
+    atLeastOneStat(fingerprint, stats) && (
+        <div className="my4">
+            <Heading heading={heading} />
+            <div className="bordered rounded shadowed bg-white">
+                <ol className="Grid Grid--1of4">
+                    { stats.map(stat =>
+                        !!fingerprint[stat] && (
+                            <li className="Grid-cell lg-p3 lg-px4 border-right border-bottom" key={stat}>
+                                <SimpleStat
+                                    stat={fingerprint[stat]}
+                                    showDescription={showDescriptions}
+                                />
+                            </li>
+                        )
+                    )}
+                </ol>
+            </div>
+        </div>
+    )
+
+export default StatGroup
diff --git a/frontend/src/metabase/xray/components/XRayLayout.jsx b/frontend/src/metabase/xray/components/XRayLayout.jsx
new file mode 100644
index 0000000000000000000000000000000000000000..6c25f9718156cd848d48ef13c19e56b325846a7d
--- /dev/null
+++ b/frontend/src/metabase/xray/components/XRayLayout.jsx
@@ -0,0 +1,13 @@
+import React from 'react'
+
+
+// A small wrapper to get consistent page structure
+export const XRayPageWrapper = ({ children }) =>
+    <div className="wrapper bg-slate-extra-light pb4 full-height" style={{ paddingLeft: '6em', paddingRight: '6em' }}>
+        { children }
+    </div>
+
+
+// A unified heading for XRay pages
+export const Heading = ({ heading }) =>
+    <h2 className="py3">{heading}</h2>
diff --git a/frontend/src/metabase/xray/containers/CardComparison.jsx b/frontend/src/metabase/xray/containers/CardComparison.jsx
new file mode 100644
index 0000000000000000000000000000000000000000..9437fd16cac71e70638d2ae612fbffb287f1565c
--- /dev/null
+++ b/frontend/src/metabase/xray/containers/CardComparison.jsx
@@ -0,0 +1,31 @@
+import React, { Component } from 'react'
+import { connect } from 'react-redux'
+
+import { fetchCardComparison } from 'metabase/reference/reference'
+
+import LoadingAndErrorWrapper from 'metabase/components/LoadingAndErrorWrapper'
+
+const mapStateToProps = state => ({
+    cardComparison: state.reference.cardComparison
+})
+
+const mapDispatchToProps = {
+    fetchCardComparison
+}
+
+class CardComparison extends Component {
+    componentWillMount () {
+        const { cardId1, cardId2 } = this.props.params
+        console.log('ids', cardId1, cardId2)
+        this.props.fetchCardComparison(cardId1, cardId2)
+    }
+    render () {
+        return (
+            <LoadingAndErrorWrapper loading={!this.props.cardComparison}>
+                { JSON.stringify(this.props.cardComparison, null, 2) }
+            </LoadingAndErrorWrapper>
+        )
+    }
+}
+
+export default connect(mapStateToProps, mapDispatchToProps)(CardComparison)
diff --git a/frontend/src/metabase/xray/containers/CardXRay.jsx b/frontend/src/metabase/xray/containers/CardXRay.jsx
new file mode 100644
index 0000000000000000000000000000000000000000..10d28fdfe49a519cd1058640878844b415c69f1a
--- /dev/null
+++ b/frontend/src/metabase/xray/containers/CardXRay.jsx
@@ -0,0 +1,190 @@
+import React, { Component } from 'react'
+
+import { connect } from 'react-redux'
+
+import { fetchCardFingerPrint } from 'metabase/reference/reference'
+
+import LoadingAndErrorWrapper from 'metabase/components/LoadingAndErrorWrapper'
+import SimpleStat from 'metabase/xray/SimpleStat'
+
+type Props = {
+    fetchCardFingerPrint: () => void,
+    fingerprint: {}
+}
+
+const FingerPrintList = ({ fingerprint }) =>
+    <div>
+        <ol className="full">
+            { Object.keys(fingerprint).map(fieldName => {
+                const f = fingerprint[fieldName]
+                return (
+                    <li key={fieldName}>
+                        <h4>{fieldName}</h4>
+                        <div>
+                            <ol className="Grid">
+                                <li className="Grid-cell">
+                                    <SimpleStat name="Min" data={f.min} />
+                                </li>
+                                <li className="Grid-cell">
+                                    <SimpleStat name="Skewness" data={f.skewness} />
+                                </li>
+                                <li className="Grid-cell">
+                                    <SimpleStat name="Mean" data={f.mean} />
+                                </li>
+                            </ol>
+                        </div>
+                    </li>
+                )
+            })}
+        </ol>
+    </div>
+
+const FingerprintGrid = ({ fingerprint, fields, distribution }) =>
+    <div className="full">
+        <ol>
+            <li className="border-bottom border-dark">
+                <ol className="Grid Grid--gutters">
+                    <li className="Grid-cell">
+                        <h3>Field</h3>
+                    </li>
+                    { fields.map(field =>
+                        <li className="Grid-cell">
+                            <h3>{field}</h3>
+                        </li>
+                    )}
+                    { distribution && (
+                        <li className="Grid-cell">
+                            <h3>Distribution</h3>
+                        </li>
+                    )}
+                </ol>
+            </li>
+            { Object.keys(fingerprint).map(key => {
+                const field = fingerprint[key]
+                return (
+                    <li className="border-bottom">
+                        <ol className="Grid Grid--gutters">
+                            <li className="Grid-cell">
+                                <a className="link text-bold">{key}</a>
+                            </li>
+                            { fields.map(f =>
+                                <li className="Grid-cell">
+                                    { field[f] }
+                                </li>
+                            )}
+                            { /*
+                            <li className="Grid-cell">
+                                { field['has-nils?'] }
+                            </li>
+                            <li className="Grid-cell">
+                                { field['all-distinct?'] }
+                            </li>
+                            <li className="Grid-cell">
+                                { field.mean }
+                            </li>
+                            <li className="Grid-cell">
+                                { field.min }
+                            </li>
+                            <li className="Grid-cell">
+                                { field.max }
+                            </li>
+                            <li className="Grid-cell">
+                                { field.median }
+                            </li>
+                            */}
+                            { distribution && (
+                                <li className="Grid-cell">
+                                </li>
+                            )}
+                        </ol>
+                    </li>
+                )
+            })}
+        </ol>
+    </div>
+
+class CardXRay extends Component {
+    props: Props
+
+    state = {
+        grid: true
+    }
+
+    componentDidMount () {
+        this.props.fetchCardFingerPrint(this.props.params.cardId)
+    }
+
+
+    render () {
+        const { fingerprint } = this.props
+        return (
+            <div className="wrapper" style={{ marginLeft: '6em', marginRight: '6em'}}>
+                <div className="my4 py4">
+                    <h1>Xray</h1>
+                </div>
+                <LoadingAndErrorWrapper loading={!fingerprint}>
+                    { () =>
+                        <div className="full">
+                            { this.state.grid ?(
+                                <div className="mt3">
+                                    <div className="my4">
+                                        <h2 className="py3 my3">Overview</h2>
+                                        <FingerprintGrid
+                                            fingerprint={fingerprint}
+                                            fields={['count', 'min', 'max', 'mean', 'median']}
+                                            distribution={false}
+                                        />
+                                    </div>
+                                    <div className="my4">
+                                        <h2 className="py3 my3">I am a cool math wizard</h2>
+                                        <FingerprintGrid
+                                            fingerprint={fingerprint}
+                                            fields={['skewness', 'has-nils?', 'all-distinct?', 'range-vs-spread', 'sum-of-squares', 'range-vs-sd']}
+                                            distribution={true}
+                                        />
+                                    </div>
+                                    { fingerprint['CREATED_AT'] && (
+                                        <div className="my4">
+                                            <h2 className="py3 my3">Time breakdown</h2>
+                                            <div className="my3">
+                                                <h4>Hour</h4>
+                                            </div>
+                                            <div className="my3">
+                                                <h4>Day</h4>
+                                            </div>
+                                            <div className="my3">
+                                                <h4>Month</h4>
+                                            </div>
+                                            <div className="my3">
+                                                <h4>Quarter</h4>
+                                            </div>
+                                        </div>
+                                    )}
+                                </div>
+                            )
+                            : (
+                                <FingerPrintList fingerprint={fingerprint} />
+                            )}
+                            <pre>
+
+                                <code>
+                                    { JSON.stringify(fingerprint, null, 2) }
+                                </code>
+                            </pre>
+                        </div>
+                    }
+                </LoadingAndErrorWrapper>
+            </div>
+        )
+    }
+}
+
+const mapStateToProps = state => ({
+    fingerprint: state.reference.tableFingerprint,
+})
+
+const mapDispatchToProps = {
+    fetchCardFingerPrint: fetchCardFingerPrint
+}
+
+export default connect(mapStateToProps, mapDispatchToProps)(CardXRay)
diff --git a/frontend/src/metabase/xray/containers/FieldComparison.jsx b/frontend/src/metabase/xray/containers/FieldComparison.jsx
new file mode 100644
index 0000000000000000000000000000000000000000..fb1204c8125eb1ec859bcedab9bfae66d62f3c99
--- /dev/null
+++ b/frontend/src/metabase/xray/containers/FieldComparison.jsx
@@ -0,0 +1,31 @@
+import React, { Component } from 'react'
+import { connect } from 'react-redux'
+
+import { fetchFieldComparison } from 'metabase/reference/reference'
+
+import LoadingAndErrorWrapper from 'metabase/components/LoadingAndErrorWrapper'
+
+const mapStateToProps = state => ({
+    fieldComparison: state.reference.fieldComparison
+})
+
+const mapDispatchToProps = {
+    fetchFieldComparison
+}
+
+class FieldComparison extends Component {
+    componentWillMount () {
+        const { fieldId1, fieldId2 } = this.props.params
+        console.log('ids', fieldId1, fieldId2)
+        this.props.fetchFieldComparison(fieldId1, fieldId2)
+    }
+    render () {
+        return (
+            <LoadingAndErrorWrapper loading={!this.props.fieldComparison}>
+                { JSON.stringify(this.props.fieldComparison, null, 2) }
+            </LoadingAndErrorWrapper>
+        )
+    }
+}
+
+export default connect(mapStateToProps, mapDispatchToProps)(FieldComparison)
diff --git a/frontend/src/metabase/xray/containers/FieldXray.jsx b/frontend/src/metabase/xray/containers/FieldXray.jsx
new file mode 100644
index 0000000000000000000000000000000000000000..1e050a6b8a77a38af89b0ad997c6b8362a3bb1a2
--- /dev/null
+++ b/frontend/src/metabase/xray/containers/FieldXray.jsx
@@ -0,0 +1,189 @@
+/* @flow */
+import React, { Component } from 'react'
+
+import { connect } from 'react-redux'
+import title from 'metabase/hoc/Title'
+import { Link } from 'react-router'
+
+import { isDate } from 'metabase/lib/schema_metadata'
+import { fetchFieldFingerPrint } from 'metabase/reference/reference'
+import { getFieldFingerprint } from 'metabase/reference/selectors'
+
+import COSTS from 'metabase/xray/costs'
+
+import {
+    PERIODICITY,
+    ROBOTS,
+    STATS_OVERVIEW,
+    VALUES_OVERVIEW
+} from 'metabase/xray/stats'
+
+import Icon from 'metabase/components/Icon'
+import LoadingAndErrorWrapper from 'metabase/components/LoadingAndErrorWrapper'
+import CostSelect from 'metabase/xray/components/CostSelect'
+import StatGroup from 'metabase/xray/components/StatGroup'
+import Histogram from 'metabase/xray/Histogram'
+import { Heading, XRayPageWrapper } from 'metabase/xray/components/XRayLayout'
+
+import type { Field } from 'metabase/meta/types/Field'
+import type { Table } from 'metabase/meta/types/Table'
+
+type Props = {
+    fetchFieldFingerPrint: () => void,
+    fingerprint: {
+        table: Table,
+        field: Field,
+        histogram: {
+            value: {}
+        }
+    },
+    params: {
+        cost: string,
+        fieldId: number
+    },
+}
+
+const Periodicity = ({fingerprint}) =>
+    <div>
+        <Heading heading="Time breakdown" />,
+        <div className="bg-white bordered rounded shadowed">
+            <div className="Grid Grid--gutters Grid--1of4">
+                { PERIODICITY.map(period =>
+                    fingerprint[`histogram-${period}`] && (
+                        <div className="Grid-cell">
+                            <div className="p4 border-right border-bottom">
+                                <div style={{ height: 120}}>
+                                    <h4>
+                                        {fingerprint[`histogram-${period}`].label}
+                                    </h4>
+                                    <Histogram
+                                        histogram={fingerprint[`histogram-${period}`].value}
+                                    />
+                                </div>
+                            </div>
+                        </div>
+                    )
+                )}
+            </div>
+        </div>
+    </div>
+
+const mapStateToProps = state => ({
+    fingerprint: getFieldFingerprint(state)
+})
+
+const mapDispatchToProps = {
+    fetchFieldFingerPrint
+}
+
+@connect(mapStateToProps, mapDispatchToProps)
+@title(({ fingerprint }) => fingerprint && fingerprint.field.display_name || "Field")
+class FieldXRay extends Component {
+    props: Props
+
+    state = {
+       error: null
+    }
+
+    componentDidMount () {
+        this.fetchFieldFingerprint()
+    }
+
+    async fetchFieldFingerprint() {
+        const { params } = this.props
+        const cost = COSTS[params.cost]
+        try {
+            await this.props.fetchFieldFingerPrint(params.fieldId, cost)
+        } catch (error) {
+            this.setState({ error })
+        }
+
+    }
+
+    componentDidUpdate (prevProps: Props) {
+        if(prevProps.params.cost !== this.props.params.cost) {
+            this.fetchFieldFingerprint()
+        }
+    }
+
+    render () {
+        const { fingerprint, params } = this.props
+        const { error } = this.state
+        return (
+                <LoadingAndErrorWrapper
+                    loading={!fingerprint}
+                    error={error}
+                    noBackground
+                >
+                    { () =>
+                        <XRayPageWrapper>
+                        <div className="full">
+                            <div className="my3 flex align-center">
+                                <div>
+                                    <Link
+                                        className="my2 px2 text-bold text-brand-hover inline-block bordered bg-white p1 h4 no-decoration rounded shadowed"
+                                        to={`/xray/table/${fingerprint.table.id}/approximate`}
+                                    >
+                                        {fingerprint.table.display_name}
+                                    </Link>
+                                    <h1 className="mt2 flex align-center">
+                                        {fingerprint.field.display_name}
+                                        <Icon name="chevronright" className="mx1 text-grey-3" size={16} />
+                                        <span className="text-grey-3">XRay</span>
+                                    </h1>
+                                    <p className="mt1 text-paragraph text-measure">
+                                        {fingerprint.field.description}
+                                    </p>
+                                </div>
+                                <div className="ml-auto flex align-center">
+                                    <h3 className="mr2 text-grey-3">Fidelity</h3>
+                                    <CostSelect
+                                        xrayType='field'
+                                        id={fingerprint.field.id}
+                                        currentCost={params.cost}
+                                    />
+                                </div>
+                            </div>
+                            <div className="mt4">
+                                <Heading heading="Distribution" />
+                                <div className="bg-white bordered shadowed">
+                                    <div className="lg-p4">
+                                        <div style={{ height: 300 }}>
+                                            <Histogram histogram={fingerprint.histogram.value} />
+                                        </div>
+                                    </div>
+                                </div>
+                            </div>
+
+                            { isDate(fingerprint.field) && <Periodicity fingerprint={fingerprint} /> }
+
+                            <StatGroup
+                                heading="Values overview"
+                                fingerprint={fingerprint}
+                                stats={VALUES_OVERVIEW}
+                            />
+
+                            <StatGroup
+                                heading="Statistical overview"
+                                fingerprint={fingerprint}
+                                showDescriptions
+                                stats={STATS_OVERVIEW}
+                            />
+
+                            <StatGroup
+                                heading="Robots"
+                                fingerprint={fingerprint}
+                                showDescriptions
+                                stats={ROBOTS}
+                            />
+                        </div>
+                    </XRayPageWrapper>
+                }
+            </LoadingAndErrorWrapper>
+        )
+    }
+}
+
+export default FieldXRay
+
+
diff --git a/frontend/src/metabase/xray/containers/SegmentComparison.jsx b/frontend/src/metabase/xray/containers/SegmentComparison.jsx
new file mode 100644
index 0000000000000000000000000000000000000000..53c08771f3d055e566eecd9956d677f7deb2b633
--- /dev/null
+++ b/frontend/src/metabase/xray/containers/SegmentComparison.jsx
@@ -0,0 +1,31 @@
+import React, { Component } from 'react'
+import { connect } from 'react-redux'
+
+import { fetchSegmentComparison } from 'metabase/reference/reference'
+
+import LoadingAndErrorWrapper from 'metabase/components/LoadingAndErrorWrapper'
+
+const mapStateToProps = state => ({
+    segmentComparison: state.reference.segmentComparison
+})
+
+const mapDispatchToProps = {
+    fetchSegmentComparison
+}
+
+class SegmentComparison extends Component {
+    componentWillMount () {
+        const { segmentId1, segmentId2 } = this.props.params
+        console.log('ids', segmentId1, segmentId2)
+        this.props.fetchSegmentComparison(segmentId1, segmentId2)
+    }
+    render () {
+        return (
+            <LoadingAndErrorWrapper loading={!this.props.segmentComparison}>
+                { JSON.stringify(this.props.segmentComparison, null, 2) }
+            </LoadingAndErrorWrapper>
+        )
+    }
+}
+
+export default connect(mapStateToProps, mapDispatchToProps)(SegmentComparison)
diff --git a/frontend/src/metabase/xray/containers/SegmentXRay.jsx b/frontend/src/metabase/xray/containers/SegmentXRay.jsx
new file mode 100644
index 0000000000000000000000000000000000000000..3994eec540fe727854a99b4f6bc37794d572ac55
--- /dev/null
+++ b/frontend/src/metabase/xray/containers/SegmentXRay.jsx
@@ -0,0 +1,134 @@
+/* @flow */
+import React, { Component } from 'react'
+import { connect } from 'react-redux'
+import title from 'metabase/hoc/Title'
+
+import { Link } from 'react-router'
+
+import LoadingAndErrorWrapper from 'metabase/components/LoadingAndErrorWrapper'
+import { XRayPageWrapper } from 'metabase/xray/components/XRayLayout'
+import { fetchSegmentFingerPrint } from 'metabase/reference/reference'
+
+import Icon from 'metabase/components/Icon'
+import COSTS from 'metabase/xray/costs'
+import CostSelect from 'metabase/xray/components/CostSelect'
+
+import {
+    getSegmentConstituents,
+    getSegmentFingerprint
+} from 'metabase/reference/selectors'
+
+import Constituent from 'metabase/xray/components/Constituent'
+
+import type { Table } from 'metabase/meta/types/Table'
+import type { Segment } from 'metabase/meta/types/Segment'
+
+type Props = {
+    fetchSegmentFingerPrint: () => void,
+    constituents: [],
+    fingerprint: {
+        table: Table,
+        segment: Segment,
+    },
+    params: {
+        segmentId: number,
+        cost: string,
+    }
+}
+
+const mapStateToProps = state => ({
+    fingerprint: getSegmentFingerprint(state),
+    constituents: getSegmentConstituents(state)
+})
+
+const mapDispatchToProps = {
+    fetchSegmentFingerPrint
+}
+
+@connect(mapStateToProps, mapDispatchToProps)
+@title(({ fingerprint }) => fingerprint && fingerprint.segment.name || "Segment" )
+class SegmentXRay extends Component {
+    props: Props
+
+    state = {
+        error: null
+    }
+
+    componentDidMount () {
+        this.fetchSegmentFingerPrint()
+    }
+
+    async fetchSegmentFingerPrint () {
+        const { params } = this.props
+        const cost = COSTS[params.cost]
+        try {
+            await this.props.fetchSegmentFingerPrint(params.segmentId, cost)
+        } catch (error) {
+            this.setState({ error })
+        }
+    }
+
+    componentDidUpdate (prevProps: Props) {
+        if(prevProps.params.cost !== this.props.params.cost) {
+            this.fetchSegmentFingerPrint()
+        }
+    }
+
+    render () {
+        const { constituents, fingerprint, params } = this.props
+        const { error } = this.state
+        return (
+            <XRayPageWrapper>
+                <LoadingAndErrorWrapper
+                    loading={!constituents}
+                    error={error}
+                    noBackground
+                >
+                    { () =>
+                        <div className="full">
+                            <div className="my4 flex align-center py2">
+                                <div>
+                                    <Link
+                                        className="my2 px2 text-bold text-brand-hover inline-block bordered bg-white p1 h4 no-decoration shadowed rounded"
+                                        to={`/xray/table/${fingerprint.table.id}/approximate`}
+                                    >
+                                        {fingerprint.table.display_name}
+                                    </Link>
+                                    <h1 className="mt2 flex align-center">
+                                        {fingerprint.segment.name}
+                                        <Icon name="chevronright" className="mx1 text-grey-3" size={16} />
+                                        <span className="text-grey-3">XRay</span>
+                                    </h1>
+                                    <p className="mt1 text-paragraph text-measure">
+                                        {fingerprint.segment.description}
+                                    </p>
+                                </div>
+                                <div className="ml-auto flex align-center">
+                                   <h3 className="mr2 text-grey-3">Fidelity</h3>
+                                    <CostSelect
+                                        currentCost={params.cost}
+                                        xrayType='segment'
+                                        id={fingerprint.segment.id}
+                                    />
+                                </div>
+                            </div>
+                            <ol>
+                                { constituents.map(c => {
+                                    return (
+                                        <li>
+                                            <Constituent
+                                                constituent={c}
+                                            />
+                                        </li>
+                                    )
+                                })}
+                            </ol>
+                        </div>
+                    }
+                </LoadingAndErrorWrapper>
+            </XRayPageWrapper>
+        )
+    }
+}
+
+export default SegmentXRay
diff --git a/frontend/src/metabase/xray/containers/TableComparison.jsx b/frontend/src/metabase/xray/containers/TableComparison.jsx
new file mode 100644
index 0000000000000000000000000000000000000000..0bfc85f737665e419310bfc253b58f63eb8913af
--- /dev/null
+++ b/frontend/src/metabase/xray/containers/TableComparison.jsx
@@ -0,0 +1,31 @@
+import React, { Component } from 'react'
+import { connect } from 'react-redux'
+
+import { fetchTableComparison } from 'metabase/reference/reference'
+
+import LoadingAndErrorWrapper from 'metabase/components/LoadingAndErrorWrapper'
+
+const mapStateToProps = state => ({
+    tableComparison: state.reference.tableComparison
+})
+
+const mapDispatchToProps = {
+    fetchTableComparison
+}
+
+class TableComparison extends Component {
+    componentWillMount () {
+        const { tableId1, tableId2 } = this.props.params
+        console.log('ids', tableId1, tableId2)
+        this.props.fetchTableComparison(tableId1, tableId2)
+    }
+    render () {
+        return (
+            <LoadingAndErrorWrapper loading={!this.props.tableComparison}>
+                { JSON.stringify(this.props.tableComparison, null, 2) }
+            </LoadingAndErrorWrapper>
+        )
+    }
+}
+
+export default connect(mapStateToProps, mapDispatchToProps)(TableComparison)
diff --git a/frontend/src/metabase/xray/containers/TableXRay.jsx b/frontend/src/metabase/xray/containers/TableXRay.jsx
new file mode 100644
index 0000000000000000000000000000000000000000..41e93a58e38f2bea2cf55e037c6349b574f7f076
--- /dev/null
+++ b/frontend/src/metabase/xray/containers/TableXRay.jsx
@@ -0,0 +1,123 @@
+/* @flow */
+import React, { Component } from 'react'
+
+import { connect } from 'react-redux'
+import title from 'metabase/hoc/Title'
+
+import { fetchTableFingerPrint } from 'metabase/reference/reference'
+import { XRayPageWrapper } from 'metabase/xray/components/XRayLayout'
+
+import COSTS from 'metabase/xray/costs'
+
+import CostSelect from 'metabase/xray/components/CostSelect'
+import Constituent from 'metabase/xray/components/Constituent'
+
+import {
+    getTableConstituents,
+    getTableFingerprint
+} from 'metabase/reference/selectors'
+
+import Icon from 'metabase/components/Icon'
+import LoadingAndErrorWrapper from 'metabase/components/LoadingAndErrorWrapper'
+
+import type { Table } from 'metabase/meta/types/Table'
+
+type Props = {
+    constituents: [],
+    fetchTableFingerPrint: () => void,
+    fingerprint: {
+        table: Table
+    },
+    params: {
+        tableId: number,
+        cost: string
+    }
+}
+
+const mapStateToProps = state => ({
+    fingerprint: getTableFingerprint(state),
+    constituents: getTableConstituents(state)
+})
+
+const mapDispatchToProps = {
+    fetchTableFingerPrint
+}
+
+@connect(mapStateToProps, mapDispatchToProps)
+@title(({ fingerprint }) => fingerprint && fingerprint.table.display_name || "Table")
+class TableXRay extends Component {
+    props: Props
+
+    state = {
+        error: null
+    }
+
+    componentDidMount () {
+        this.fetchTableFingerPrint()
+    }
+
+    async fetchTableFingerPrint () {
+        const { params } = this.props
+        const cost = COSTS[params.cost]
+        try {
+            await this.props.fetchTableFingerPrint(params.tableId, cost)
+        } catch (error) {
+            this.setState({ error })
+        }
+    }
+
+    componentDidUpdate (prevProps: Props) {
+        if(prevProps.params.cost !== this.props.params.cost) {
+            this.fetchTableFingerPrint()
+        }
+    }
+
+    render () {
+        const { constituents, fingerprint, params } = this.props
+        const { error } = this.state
+
+        return (
+            <XRayPageWrapper>
+                <LoadingAndErrorWrapper
+                    loading={!constituents}
+                    error={error}
+                    noBackground
+                >
+                    { () =>
+                        <div className="full">
+                            <div className="my4 flex align-center py2">
+                                <div>
+                                    <h1 className="mt2 flex align-center">
+                                        {fingerprint.table.display_name}
+                                        <Icon name="chevronright" className="mx1 text-grey-3" size={16} />
+                                        <span className="text-grey-3">XRay</span>
+                                    </h1>
+                                    <p className="m0 text-paragraph text-measure">{fingerprint.table.description}</p>
+                                </div>
+                                <div className="ml-auto flex align-center">
+                                   <h3 className="mr2">Fidelity:</h3>
+                                    <CostSelect
+                                        xrayType='table'
+                                        currentCost={params.cost}
+                                        id={fingerprint.table.id}
+                                    />
+                                </div>
+                            </div>
+                            <ol>
+                                { constituents.map((constituent, index) =>
+                                    <li key={index}>
+                                        <Constituent
+                                            constituent={constituent}
+                                        />
+                                    </li>
+                                )}
+                            </ol>
+                        </div>
+                    }
+                </LoadingAndErrorWrapper>
+            </XRayPageWrapper>
+        )
+    }
+}
+
+export default TableXRay
diff --git a/frontend/src/metabase/xray/costs.js b/frontend/src/metabase/xray/costs.js
new file mode 100644
index 0000000000000000000000000000000000000000..1bd710214e2d057a2fa6bc44634da3d626009042
--- /dev/null
+++ b/frontend/src/metabase/xray/costs.js
@@ -0,0 +1,49 @@
+/* Combinations of MaxQueryCost and MaxComputationCost values combined into
+ * human understandable groupings.
+ * for more info on the actual values see src/metabase/fingerprints/costs.clj
+ */
+
+const approximate = {
+    display_name: "Approximate",
+    description: `
+        Get a sense for this data by looking at a sample.
+        This is faster but less precise.
+    `,
+    method: {
+        max_query_cost: 'sample',
+        max_computation_cost: 'linear'
+    },
+    icon: 'costapproximate'
+}
+
+const exact = {
+    display_name: "Exact",
+    description: `
+        Go deeper into this data by performing a full scan.
+        This is more precise but slower.
+    `,
+    method: {
+        max_query_cost: 'full-scan',
+        max_computation_cost: 'unbounded'
+    },
+    icon: 'costexact'
+}
+
+const extended = {
+    display_name: "Extended",
+    description: `
+        Adds additional info about this entity by including related objects.
+        This is the slowest but highest fidelity method.
+    `,
+    method: {
+        max_query_cost: 'full-scan',
+        max_computation_cost: 'unbounded'
+    },
+    icon: 'costextended'
+}
+
+export default {
+    approximate,
+    exact,
+    extended
+}
diff --git a/frontend/src/metabase/xray/stats.js b/frontend/src/metabase/xray/stats.js
new file mode 100644
index 0000000000000000000000000000000000000000..898eb67d5b38b29045936489ec7087736f430b3c
--- /dev/null
+++ b/frontend/src/metabase/xray/stats.js
@@ -0,0 +1,34 @@
+// keys for common values interesting for most folks
+export const VALUES_OVERVIEW = [
+    'min',
+    'earliest', // date field min is expressed as earliest
+    'max',
+    'latest', // date field max is expressed as latest
+    'count',
+    'sum',
+    'cardinality',
+    'sd',
+    'nil%',
+    'mean',
+    'median',
+    'mean-median-spread'
+]
+
+// keys for common values interesting for stat folks
+export const STATS_OVERVIEW = [
+    'kurtosis',
+    'skewness',
+    'entropy',
+    'var',
+    'sum-of-square',
+]
+
+export const ROBOTS = [
+    'cardinality-vs-count',
+    'positive-definite?',
+    'has-nils?',
+    'all-distinct?',
+]
+
+// periods we care about for showing periodicity
+export const PERIODICITY = ['day', 'week', 'month', 'hour', 'quarter']
diff --git a/frontend/test/__support__/integrated_tests.js b/frontend/test/__support__/integrated_tests.js
index 9a812221c2b1edfb48a3a97672dc9e31643a2c5b..da83ba18e6c3876ba61169fad6b9f74b53891f41 100644
--- a/frontend/test/__support__/integrated_tests.js
+++ b/frontend/test/__support__/integrated_tests.js
@@ -28,7 +28,8 @@ import fetch from 'isomorphic-fetch';
 import { refreshSiteSettings } from "metabase/redux/settings";
 import { getRoutes } from "metabase/routes";
 
-let hasCreatedStore = false;
+let hasStartedCreatingStore = false;
+let hasFinishedCreatingStore = false
 let loginSession = null; // Stores the current login session
 let simulateOfflineMode = false;
 
@@ -36,7 +37,7 @@ let simulateOfflineMode = false;
  * Login to the Metabase test instance with default credentials
  */
 export async function login() {
-    if (hasCreatedStore) {
+    if (hasStartedCreatingStore) {
         console.warn(
             "Warning: You have created a test store before calling login() which means that up-to-date site settings " +
             "won't be in the store unless you call `refreshSiteSettings` action manually. Please prefer " +
@@ -107,7 +108,11 @@ api._makeRequest = async (method, url, headers, requestBody, data, options) => {
 
 
     if (result.status >= 200 && result.status <= 299) {
-        return resultBody
+        if (options.transformResponse) {
+           return options.transformResponse(resultBody, { data });
+        } else {
+           return resultBody
+        }
     } else {
         const error = { status: result.status, data: resultBody, isCancelled: false }
         if (!simulateOfflineMode) {
@@ -140,13 +145,17 @@ if (process.env.E2E_HOST) {
  */
 
 export const createTestStore = async () => {
-    hasCreatedStore = true;
+    hasFinishedCreatingStore = false;
+    hasStartedCreatingStore = true;
 
     const history = useRouterHistory(createMemoryHistory)();
     const store = getStore(reducers, history, undefined, (createStore) => testStoreEnhancer(createStore, history));
     store.setFinalStoreInstance(store);
 
     await store.dispatch(refreshSiteSettings());
+
+    hasFinishedCreatingStore = true;
+
     return store;
 }
 
@@ -216,15 +225,26 @@ const testStoreEnhancer = (createStore, history) => {
                 }
             },
 
-            getDispatchedActions: () => {
-                return store._dispatchedActions;
+            logDispatchedActions: () => {
+                console.log(`Dispatched actions so far: ${store._dispatchedActions.map((a) => a.type).join(", ")}`);
             },
 
             pushPath: (path) => history.push(path),
             goBack: () => history.goBack(),
             getPath: () => urlFormat(history.getCurrentLocation()),
 
+            warnIfStoreCreationNotComplete: () => {
+                if (!hasFinishedCreatingStore) {
+                    console.warn(
+                        "Seems that you don't wait until the store creation has completely finished. " +
+                        "This means that site settings might not have been completely loaded. " +
+                        "Please add `await` in front of createTestStore call.")
+                }
+            },
+
             connectContainer: (reactContainer) => {
+                store.warnIfStoreCreationNotComplete();
+
                 const routes = createRoutes(getRoutes(store._finalStoreInstance))
                 return store._connectWithStore(
                     <Router
@@ -236,6 +256,8 @@ const testStoreEnhancer = (createStore, history) => {
             },
 
             getAppContainer: () => {
+                store.warnIfStoreCreationNotComplete();
+
                 return store._connectWithStore(
                     <Router history={history}>
                         {getRoutes(store._finalStoreInstance)}
@@ -255,9 +277,19 @@ const testStoreEnhancer = (createStore, history) => {
     }
 }
 
-export const clickRouterLink = (linkEnzymeWrapper) =>
-    linkEnzymeWrapper.simulate('click', { button: 0 });
+export const clickRouterLink = (linkEnzymeWrapper) => {
+    // This hits an Enzyme bug so we should find some other way to warn the user :/
+    // https://github.com/airbnb/enzyme/pull/769
 
+    // if (linkEnzymeWrapper.closest(Router).length === 0) {
+    //     console.warn(
+    //         "Trying to click a link with a component mounted with `store.connectContainer(container)`. Usually " +
+    //         "you want to use `store.getAppContainer()` instead because it has a complete support for react-router."
+    //     )
+    // }
+
+    linkEnzymeWrapper.simulate('click', {button: 0});
+}
 // Commonly used question helpers that are temporarily here
 // TODO Atte Keinänen 6/27/17: Put all metabase-lib -related test helpers to one file
 export const createSavedQuestion = async (unsavedQuestion) => {
diff --git a/frontend/test/__support__/integrated_tests_mocks.js b/frontend/test/__support__/integrated_tests_mocks.js
index 9cb0446392261cfdd8580dc90d8430deb72bda97..5ac00404633797745387178fc3762b6089969cea 100644
--- a/frontend/test/__support__/integrated_tests_mocks.js
+++ b/frontend/test/__support__/integrated_tests_mocks.js
@@ -34,3 +34,6 @@ tooltip.default = tooltip.TestTooltip
 
 import * as popover from "metabase/components/Popover";
 popover.default = popover.TestPopover
+
+import * as table from "metabase/visualizations/visualizations/Table";
+table.default = table.TestTable
diff --git a/frontend/test/__support__/sample_dataset_fixture.js b/frontend/test/__support__/sample_dataset_fixture.js
index 7e67ebd7519da37e7c8e7fbf00df30efb50533f7..1f2b221837ab72a836d5be7ac7a5e89b48a5e222 100644
--- a/frontend/test/__support__/sample_dataset_fixture.js
+++ b/frontend/test/__support__/sample_dataset_fixture.js
@@ -11,17 +11,20 @@ export const ORDERS_TABLE_ID = 1;
 export const PEOPLE_TABLE_ID = 2;
 export const PRODUCT_TABLE_ID = 3;
 
-export const ORDERS_TOTAL_FIELD_ID = 6;
-export const PRODUCT_CATEGORY_FIELD_ID = 21;
 export const ORDERS_CREATED_DATE_FIELD_ID = 1;
 export const ORDERS_PK_FIELD_ID = 2;
 export const ORDERS_PRODUCT_FK_FIELD_ID = 3;
+export const ORDERS_TOTAL_FIELD_ID = 6;
 
 export const MAIN_METRIC_ID = 1;
 
+export const PRODUCT_CATEGORY_FIELD_ID = 21;
 export const PRODUCT_PK_FIELD_ID = 24;
 export const PRODUCT_TILE_FIELD_ID = 27;
 
+export const PEOPLE_LATITUDE_FIELD_ID = 14;
+export const PEOPLE_LONGITUDE_FIELD_ID = 15;
+export const PEOPLE_STATE_FIELD_ID = 19;
 
 export const state = {
   metadata: {
@@ -132,7 +135,7 @@ export const state = {
         engine: 'h2',
         created_at: '2017-06-14T23:22:55.349Z',
         points_of_interest: null
-      }, 
+      },
        '2': {
         description: null,
         features: [
@@ -1450,7 +1453,20 @@ export const orders_count_card = {
 };
 
 export const native_orders_count_card = {
-    id: 2,
+    id: 3,
+    name: "# orders data",
+    display: 'table',
+    visualization_settings: {},
+    dataset_query: {
+        type: "native",
+        database: DATABASE_ID,
+        native: {
+            query: "SELECT count(*) FROM orders"
+        }
+    }
+};
+
+export const unsaved_native_orders_count_card = {
     name: "# orders data",
     display: 'table',
     visualization_settings: {},
diff --git a/frontend/test/admin/databases/DatabaseListApp.integ.spec.js b/frontend/test/admin/databases/DatabaseListApp.integ.spec.js
new file mode 100644
index 0000000000000000000000000000000000000000..e2f8305db15781b6d00a986ae30e461ddb4c41b1
--- /dev/null
+++ b/frontend/test/admin/databases/DatabaseListApp.integ.spec.js
@@ -0,0 +1,315 @@
+import {
+    login,
+    createTestStore,
+    clickRouterLink
+} from "__support__/integrated_tests";
+
+import { mount } from "enzyme";
+import {
+    FETCH_DATABASES,
+    initializeDatabase,
+    INITIALIZE_DATABASE,
+    DELETE_DATABASE_FAILED,
+    DELETE_DATABASE,
+    CREATE_DATABASE_STARTED,
+    CREATE_DATABASE_FAILED,
+    CREATE_DATABASE,
+    UPDATE_DATABASE_STARTED,
+    UPDATE_DATABASE_FAILED,
+    UPDATE_DATABASE,
+} from "metabase/admin/databases/database"
+
+import DatabaseListApp from "metabase/admin/databases/containers/DatabaseListApp";
+
+import { MetabaseApi } from 'metabase/services'
+import DatabaseEditApp from "metabase/admin/databases/containers/DatabaseEditApp";
+import { delay } from "metabase/lib/promise"
+import { getEditingDatabase } from "metabase/admin/databases/selectors";
+import FormMessage, { SERVER_ERROR_MESSAGE } from "metabase/components/form/FormMessage";
+import CreatedDatabaseModal from "metabase/admin/databases/components/CreatedDatabaseModal";
+
+describe('dashboard list', () => {
+
+    beforeAll(async () => {
+        await login()
+    })
+
+    it('should render', async () => {
+        const store = await createTestStore()
+        store.pushPath("/admin/databases");
+
+        const app = mount(store.getAppContainer())
+
+        await store.waitForActions([FETCH_DATABASES])
+
+        const wrapper = app.find(DatabaseListApp)
+        expect(wrapper.length).toEqual(1)
+
+    })
+
+    describe('adds', () => {
+        it("should work and shouldn't let you accidentally add db twice", async () => {
+            MetabaseApi.db_create = async (db) => { await delay(10); return {...db, id: 10}; };
+
+            const store = await createTestStore()
+            store.pushPath("/admin/databases");
+
+            const app = mount(store.getAppContainer())
+            await store.waitForActions([FETCH_DATABASES])
+
+            const listAppBeforeAdd = app.find(DatabaseListApp)
+
+            const addDbButton = listAppBeforeAdd.find('.Button.Button--primary').first()
+            clickRouterLink(addDbButton)
+
+            const dbDetailsForm = app.find(DatabaseEditApp);
+            expect(dbDetailsForm.length).toBe(1);
+
+            await store.waitForActions([INITIALIZE_DATABASE]);
+
+            expect(dbDetailsForm.find('button[children="Save"]').props().disabled).toBe(true)
+
+            const updateInputValue = (name, value) =>
+                dbDetailsForm.find(`input[name="${name}"]`).simulate('change', { target: { value } });
+
+            updateInputValue("name", "Test db name");
+            updateInputValue("dbname", "test_postgres_db");
+            updateInputValue("user", "uberadmin");
+
+            const saveButton = dbDetailsForm.find('button[children="Save"]')
+
+            expect(saveButton.props().disabled).toBe(false)
+            saveButton.simulate("submit");
+
+            // Now the submit button should be disabled so that you aren't able to trigger the db creation action twice
+            await store.waitForActions([CREATE_DATABASE_STARTED])
+            expect(saveButton.text()).toBe("Saving...");
+            expect(saveButton.props().disabled).toBe(true);
+
+            await store.waitForActions([CREATE_DATABASE]);
+
+            expect(store.getPath()).toEqual("/admin/databases?created=10")
+            expect(app.find(CreatedDatabaseModal).length).toBe(1);
+        })
+
+        it('should show error correctly on failure', async () => {
+            MetabaseApi.db_create = async () => {
+                await delay(10);
+                return Promise.reject({
+                    status: 400,
+                    data: {},
+                    isCancelled: false
+                })
+            }
+
+            const store = await createTestStore()
+            store.pushPath("/admin/databases");
+
+            const app = mount(store.getAppContainer())
+            await store.waitForActions([FETCH_DATABASES])
+
+            const listAppBeforeAdd = app.find(DatabaseListApp)
+
+            const addDbButton = listAppBeforeAdd.find('.Button.Button--primary').first()
+            clickRouterLink(addDbButton)
+
+            const dbDetailsForm = app.find(DatabaseEditApp);
+            expect(dbDetailsForm.length).toBe(1);
+
+            await store.waitForActions([INITIALIZE_DATABASE]);
+
+            const saveButton = dbDetailsForm.find('button[children="Save"]')
+            expect(saveButton.props().disabled).toBe(true)
+
+            const updateInputValue = (name, value) =>
+                dbDetailsForm.find(`input[name="${name}"]`).simulate('change', { target: { value } });
+
+            updateInputValue("name", "Test db name");
+            updateInputValue("dbname", "test_postgres_db");
+            updateInputValue("user", "uberadmin");
+
+            expect(saveButton.props().disabled).toBe(false)
+            saveButton.simulate("submit");
+
+            await store.waitForActions([CREATE_DATABASE_STARTED])
+            expect(saveButton.text()).toBe("Saving...");
+
+            await store.waitForActions([CREATE_DATABASE_FAILED]);
+            expect(dbDetailsForm.find(FormMessage).text()).toEqual(SERVER_ERROR_MESSAGE);
+            expect(saveButton.text()).toBe("Save");
+        });
+    })
+
+    describe('deletes', () => {
+        it('should not block deletes', async () => {
+            MetabaseApi.db_delete = async () => await delay(10)
+
+            const store = await createTestStore()
+            store.pushPath("/admin/databases");
+
+            const app = mount(store.getAppContainer())
+            await store.waitForActions([FETCH_DATABASES])
+
+            const wrapper = app.find(DatabaseListApp)
+            const dbCount = wrapper.find('tr').length
+
+            const deleteButton = wrapper.find('.Button.Button--danger').first()
+
+            deleteButton.simulate('click')
+
+            const deleteModal = wrapper.find('.test-modal')
+            deleteModal.find('.Form-input').simulate('change', { target: { value: "DELETE" }})
+            deleteModal.find('.Button.Button--danger').simulate('click')
+
+            // test that the modal is gone
+            expect(wrapper.find('.test-modal').length).toEqual(0)
+
+            // we should now have a disabled db row during delete
+            expect(wrapper.find('tr.disabled').length).toEqual(1)
+
+            // db delete finishes
+            await store.waitForActions([DELETE_DATABASE])
+
+            // there should be no disabled db rows now
+            expect(wrapper.find('tr.disabled').length).toEqual(0)
+
+            // we should now have one database less in the list
+            expect(wrapper.find('tr').length).toEqual(dbCount - 1)
+        })
+
+        it('should show error correctly on failure', async () => {
+            MetabaseApi.db_delete = async () => {
+                await delay(10);
+                return Promise.reject({
+                    status: 400,
+                    data: {},
+                    isCancelled: false
+                })
+            }
+
+            const store = await createTestStore()
+            store.pushPath("/admin/databases");
+
+            const app = mount(store.getAppContainer())
+            await store.waitForActions([FETCH_DATABASES])
+
+            const wrapper = app.find(DatabaseListApp)
+            const dbCount = wrapper.find('tr').length
+
+            const deleteButton = wrapper.find('.Button.Button--danger').first()
+
+            deleteButton.simulate('click')
+
+            const deleteModal = wrapper.find('.test-modal')
+            deleteModal.find('.Form-input').simulate('change', { target: { value: "DELETE" }})
+            deleteModal.find('.Button.Button--danger').simulate('click')
+
+            // test that the modal is gone
+            expect(wrapper.find('.test-modal').length).toEqual(0)
+
+            // we should now have a disabled db row during delete
+            expect(wrapper.find('tr.disabled').length).toEqual(1)
+
+            // db delete fails
+            await store.waitForActions([DELETE_DATABASE_FAILED])
+
+            // there should be no disabled db rows now
+            expect(wrapper.find('tr.disabled').length).toEqual(0)
+
+            // the db count should be same as before
+            expect(wrapper.find('tr').length).toEqual(dbCount)
+
+            expect(wrapper.find(FormMessage).text()).toBe(SERVER_ERROR_MESSAGE);
+        })
+    })
+
+    describe('editing', () => {
+        const newName = "Ex-Sample Data Set";
+
+        it('should be able to edit database name', async () => {
+            const store = await createTestStore()
+            store.pushPath("/admin/databases");
+
+            const app = mount(store.getAppContainer())
+            await store.waitForActions([FETCH_DATABASES])
+
+            const wrapper = app.find(DatabaseListApp)
+            const sampleDatasetEditLink = wrapper.find('a[children="Sample Dataset"]').first()
+            clickRouterLink(sampleDatasetEditLink);
+
+            expect(store.getPath()).toEqual("/admin/databases/1")
+            await store.waitForActions([INITIALIZE_DATABASE]);
+
+            const dbDetailsForm = app.find(DatabaseEditApp);
+            expect(dbDetailsForm.length).toBe(1);
+
+            const nameField = dbDetailsForm.find(`input[name="name"]`);
+            expect(nameField.props().value).toEqual("Sample Dataset")
+
+            nameField.simulate('change', { target: { value: newName } });
+
+            const saveButton = dbDetailsForm.find('button[children="Save"]')
+            saveButton.simulate("submit");
+
+            await store.waitForActions([UPDATE_DATABASE_STARTED]);
+            expect(saveButton.text()).toBe("Saving...");
+            expect(saveButton.props().disabled).toBe(true);
+
+            await store.waitForActions([UPDATE_DATABASE]);
+            expect(saveButton.props().disabled).toBe(undefined);
+            expect(dbDetailsForm.find(FormMessage).text()).toEqual("Successfully saved!");
+        })
+
+        it('should show the updated database name', async () => {
+            const store = await createTestStore()
+            store.pushPath("/admin/databases/1");
+
+            const app = mount(store.getAppContainer())
+            await store.waitForActions([INITIALIZE_DATABASE]);
+
+            const dbDetailsForm = app.find(DatabaseEditApp);
+            expect(dbDetailsForm.length).toBe(1);
+
+            const nameField = dbDetailsForm.find(`input[name="name"]`);
+            expect(nameField.props().value).toEqual(newName)
+        });
+
+        it('should show an error if saving fails', async () => {
+            const store = await createTestStore()
+            store.pushPath("/admin/databases/1");
+
+            const app = mount(store.getAppContainer())
+            await store.waitForActions([INITIALIZE_DATABASE]);
+
+            const dbDetailsForm = app.find(DatabaseEditApp);
+            expect(dbDetailsForm.length).toBe(1);
+
+            const tooLongName = "too long name ".repeat(100);
+            const nameField = dbDetailsForm.find(`input[name="name"]`);
+            nameField.simulate('change', { target: { value: tooLongName } });
+
+            const saveButton = dbDetailsForm.find('button[children="Save"]')
+            saveButton.simulate("submit");
+
+            await store.waitForActions([UPDATE_DATABASE_STARTED]);
+            expect(saveButton.text()).toBe("Saving...");
+            expect(saveButton.props().disabled).toBe(true);
+
+            await store.waitForActions([UPDATE_DATABASE_FAILED]);
+            expect(saveButton.props().disabled).toBe(undefined);
+            expect(dbDetailsForm.find(".Form-message.text-error").length).toBe(1);
+        });
+
+        afterAll(async () => {
+            const store = await createTestStore()
+            store.dispatch(initializeDatabase(1));
+            await store.waitForActions([INITIALIZE_DATABASE])
+            const sampleDatasetDb = getEditingDatabase(store.getState())
+
+            await MetabaseApi.db_update({
+                ...sampleDatasetDb,
+                name: "Sample Dataset"
+            });
+        });
+    })
+})
diff --git a/frontend/test/admin/datamodel/FieldApp.integ.spec.js b/frontend/test/admin/datamodel/FieldApp.integ.spec.js
new file mode 100644
index 0000000000000000000000000000000000000000..b946b36338e4fad393c53e5f6cf45aaf8f998246
--- /dev/null
+++ b/frontend/test/admin/datamodel/FieldApp.integ.spec.js
@@ -0,0 +1,378 @@
+import {
+    login,
+    createTestStore,
+} from "__support__/integrated_tests";
+
+import {
+    DELETE_FIELD_DIMENSION,
+    deleteFieldDimension,
+    FETCH_TABLE_METADATA,
+    fetchTableMetadata,
+    UPDATE_FIELD,
+    UPDATE_FIELD_DIMENSION,
+    UPDATE_FIELD_VALUES,
+    updateField,
+    updateFieldValues
+} from "metabase/redux/metadata"
+
+import { metadata as staticFixtureMetadata } from "__support__/sample_dataset_fixture"
+
+import React from 'react';
+import { mount } from "enzyme";
+import { FETCH_IDFIELDS } from "metabase/admin/datamodel/datamodel";
+import { delay } from "metabase/lib/promise"
+import FieldApp, {
+    FieldHeader, FieldRemapping, FieldValueMapping,
+    ValueRemappings
+} from "metabase/admin/datamodel/containers/FieldApp";
+import Input from "metabase/components/Input";
+import {
+    FieldVisibilityPicker,
+    SpecialTypeAndTargetPicker
+} from "metabase/admin/datamodel/components/database/ColumnItem";
+import { TestPopover } from "metabase/components/Popover";
+import Select from "metabase/components/Select";
+import SelectButton from "metabase/components/SelectButton";
+import ButtonWithStatus from "metabase/components/ButtonWithStatus";
+
+const getRawFieldWithId = (store, fieldId) => store.getState().metadata.fields[fieldId];
+
+// TODO: Should we use the metabase/lib/urls methods for constructing urls also here?
+
+// TODO Atte Keinänen 7/10/17: Use fixtures after metabase-lib branch has been merged
+
+const CREATED_AT_ID = 1;
+const PRODUCT_ID_FK_ID = 3;
+const USER_ID_FK_ID = 7;
+// enumeration with values 1, 2, 3, 4 or 5
+const USER_SOURCE_TABLE_ID = 2;
+const USER_SOURCE_ID = 18;
+
+const PRODUCT_RATING_TABLE_ID = 4;
+const PRODUCT_RATING_ID = 33;
+
+const initFieldApp = async ({ tableId = 1, fieldId }) => {
+    const store = await createTestStore()
+    store.pushPath(`/admin/datamodel/database/1/table/${tableId}/${fieldId}`);
+    const fieldApp = mount(store.connectContainer(<FieldApp />));
+    await store.waitForActions([FETCH_IDFIELDS]);
+    store.resetDispatchedActions();
+    return { store, fieldApp }
+}
+
+describe("FieldApp", () => {
+    beforeAll(async () => {
+        await login()
+    })
+
+    describe("name settings", () => {
+        const newTitle = 'Brought Into Existence At'
+        const newDescription = 'The point in space-time when this order saw the light.'
+
+        it("lets you change field name and description", async () => {
+            const { store, fieldApp } = await initFieldApp({ fieldId: CREATED_AT_ID });
+
+            const header = fieldApp.find(FieldHeader)
+            expect(header.length).toBe(1)
+
+            const nameInput = header.find(Input).at(0);
+            expect(nameInput.props().value).toBe(staticFixtureMetadata.fields['1'].display_name);
+            const descriptionInput = header.find(Input).at(1);
+            expect(descriptionInput.props().value).toBe(staticFixtureMetadata.fields['1'].description);
+
+            nameInput.simulate('change', {target: {value: newTitle}});
+            await store.waitForActions([UPDATE_FIELD])
+            store.resetDispatchedActions();
+
+            descriptionInput.simulate('change', {target: {value: newDescription}});
+            await store.waitForActions([UPDATE_FIELD])
+        })
+
+        it("should show the entered values after a page reload", async () => {
+            const { fieldApp } = await initFieldApp({ fieldId: CREATED_AT_ID });
+
+            const header = fieldApp.find(FieldHeader)
+            expect(header.length).toBe(1)
+            const nameInput = header.find(Input).at(0);
+            const descriptionInput = header.find(Input).at(1);
+
+            expect(nameInput.props().value).toBe(newTitle);
+            expect(descriptionInput.props().value).toBe(newDescription);
+        })
+
+        afterAll(async () => {
+            const store = await createTestStore()
+            await store.dispatch(fetchTableMetadata(1));
+            const createdAtField = getRawFieldWithId(store, CREATED_AT_ID)
+
+            await store.dispatch(updateField({
+                ...createdAtField,
+                display_name: staticFixtureMetadata.fields[1].display_name,
+                description: staticFixtureMetadata.fields[1].description,
+            }))
+        })
+    })
+
+    describe("visibility settings", () => {
+        it("shows correct default visibility", async () => {
+            const { fieldApp } = await initFieldApp({ fieldId: CREATED_AT_ID });
+            const visibilitySelect = fieldApp.find(FieldVisibilityPicker);
+            expect(visibilitySelect.text()).toMatch(/Everywhere/);
+        })
+
+        it("lets you change field visibility", async () => {
+            const { store, fieldApp } = await initFieldApp({ fieldId: CREATED_AT_ID });
+
+            const visibilitySelect = fieldApp.find(FieldVisibilityPicker);
+            visibilitySelect.simulate('click');
+            visibilitySelect.find(TestPopover).find("li").at(1).children().first().simulate("click");
+
+            await store.waitForActions([UPDATE_FIELD])
+        })
+
+        it("should show the updated visibility setting after a page reload", async () => {
+            const { fieldApp } = await initFieldApp({ fieldId: CREATED_AT_ID });
+
+            const picker = fieldApp.find(FieldVisibilityPicker);
+            expect(picker.text()).toMatch(/Only in Detail Views/);
+        })
+
+        afterAll(async () => {
+            const store = await createTestStore()
+            await store.dispatch(fetchTableMetadata(1));
+            const createdAtField = getRawFieldWithId(store, CREATED_AT_ID)
+
+            await store.dispatch(updateField({
+                ...createdAtField,
+                visibility_type: "normal",
+            }))
+        })
+    })
+
+    describe("special type and target settings", () => {
+        it("shows the correct default special type for a foreign key", async () => {
+            const { fieldApp } = await initFieldApp({ fieldId: PRODUCT_ID_FK_ID });
+            const picker = fieldApp.find(SpecialTypeAndTargetPicker).text()
+            expect(picker).toMatch(/Foreign KeyProducts → ID/);
+        })
+
+        it("lets you change the type to 'No special type'", async () => {
+            const { store, fieldApp } = await initFieldApp({ fieldId: CREATED_AT_ID });
+            const picker = fieldApp.find(SpecialTypeAndTargetPicker)
+            const typeSelect = picker.find(Select).at(0)
+            typeSelect.simulate('click');
+
+            const noSpecialTypeButton = typeSelect.find(TestPopover).find("li").last().children().first()
+            noSpecialTypeButton.simulate("click");
+
+            await store.waitForActions([UPDATE_FIELD])
+            expect(picker.text()).toMatch(/Select a special type/);
+        })
+
+        it("lets you change the type to 'Number'", async () => {
+            const { store, fieldApp } = await initFieldApp({ fieldId: CREATED_AT_ID });
+            const picker = fieldApp.find(SpecialTypeAndTargetPicker)
+            const typeSelect = picker.find(Select).at(0)
+            typeSelect.simulate('click');
+
+            const noSpecialTypeButton = typeSelect.find(TestPopover)
+                .find("li")
+                .filterWhere(li => li.text() === "Number").first()
+                .children().first();
+
+            noSpecialTypeButton.simulate("click");
+
+            await store.waitForActions([UPDATE_FIELD])
+            expect(picker.text()).toMatch(/Number/);
+        })
+
+        it("lets you change the type to 'Foreign key' and choose the target field", async () => {
+            const { store, fieldApp } = await initFieldApp({ fieldId: CREATED_AT_ID });
+            const picker = fieldApp.find(SpecialTypeAndTargetPicker)
+            const typeSelect = picker.find(Select).at(0)
+            typeSelect.simulate('click');
+
+            const foreignKeyButton = typeSelect.find(TestPopover).find("li").at(2).children().first();
+            foreignKeyButton.simulate("click");
+            await store.waitForActions([UPDATE_FIELD])
+            store.resetDispatchedActions();
+
+            expect(picker.text()).toMatch(/Foreign KeySelect a target/);
+            const fkFieldSelect = picker.find(Select).at(1)
+            fkFieldSelect.simulate('click');
+
+            const productIdField = fkFieldSelect.find(TestPopover)
+                .find("li")
+                .filterWhere(li => /The numerical product number./.test(li.text()))
+                .first().children().first();
+
+            productIdField.simulate('click')
+            await store.waitForActions([UPDATE_FIELD])
+            expect(picker.text()).toMatch(/Foreign KeyProducts → ID/);
+        })
+
+        afterAll(async () => {
+            const store = await createTestStore()
+            await store.dispatch(fetchTableMetadata(1));
+            const createdAtField = getRawFieldWithId(store, CREATED_AT_ID)
+
+            await store.dispatch(updateField({
+                ...createdAtField,
+                special_type: null,
+                fk_target_field_id: null
+            }))
+        })
+    })
+
+    describe("display value / remapping settings", () => {
+        it("shows only 'Use original value' for fields without fk and values", async () => {
+            const { fieldApp } = await initFieldApp({ fieldId: CREATED_AT_ID });
+            const section = fieldApp.find(FieldRemapping)
+            const mappingTypePicker = section.find(Select).first();
+            expect(mappingTypePicker.text()).toBe('Use original value')
+
+            mappingTypePicker.simulate('click');
+            const pickerOptions = mappingTypePicker.find(TestPopover).find("li");
+            expect(pickerOptions.length).toBe(1);
+        })
+
+        it("lets you change to 'Use foreign key' and change the target for field with fk", async () => {
+            const { store, fieldApp } = await initFieldApp({ fieldId: USER_ID_FK_ID });
+            const section = fieldApp.find(FieldRemapping)
+            const mappingTypePicker = section.find(Select);
+            expect(mappingTypePicker.text()).toBe('Use original value')
+
+            mappingTypePicker.simulate('click');
+            const pickerOptions = mappingTypePicker.find(TestPopover).find("li");
+            expect(pickerOptions.length).toBe(2);
+
+            const useFKButton = pickerOptions.at(1).children().first()
+            useFKButton.simulate('click');
+            store.waitForActions([UPDATE_FIELD_DIMENSION, FETCH_TABLE_METADATA])
+            store.resetDispatchedActions();
+            // TODO: Figure out a way to avoid using delay – the use of delays may lead to occasional CI failures
+            await delay(500);
+
+            const fkFieldSelect = section.find(SelectButton);
+
+            expect(fkFieldSelect.text()).toBe("Name");
+            fkFieldSelect.simulate('click');
+
+            const sourceField = fkFieldSelect.parent().find(TestPopover)
+                .find("li")
+                .filterWhere(li => /Source/.test(li.text()))
+                .first().children().first();
+
+            sourceField.simulate('click')
+            store.waitForActions([FETCH_TABLE_METADATA])
+            // TODO: Figure out a way to avoid using delay – the use of delays may lead to occasional CI failures
+            await delay(500);
+            expect(fkFieldSelect.text()).toBe("Source");
+        })
+
+        it("doesn't show date fields in fk options", async () => {
+            const { fieldApp } = await initFieldApp({ fieldId: USER_ID_FK_ID });
+            const section = fieldApp.find(FieldRemapping)
+            const mappingTypePicker = section.find(Select);
+            expect(mappingTypePicker.text()).toBe('Use foreign key')
+
+            const fkFieldSelect = section.find(SelectButton);
+            fkFieldSelect.simulate('click');
+
+            const popover = fkFieldSelect.parent().find(TestPopover);
+            expect(popover.length).toBe(1);
+
+            const dateFieldIcons = popover.find("svg.Icon-calendar")
+            expect(dateFieldIcons.length).toBe(0);
+        })
+
+        it("lets you switch back to Use original value after changing to some other value", async () => {
+            const { store, fieldApp } = await initFieldApp({ fieldId: USER_ID_FK_ID });
+            const section = fieldApp.find(FieldRemapping)
+            const mappingTypePicker = section.find(Select);
+            expect(mappingTypePicker.text()).toBe('Use foreign key')
+
+            mappingTypePicker.simulate('click');
+            const pickerOptions = mappingTypePicker.find(TestPopover).find("li");
+            const useOriginalValue = pickerOptions.first().children().first()
+            useOriginalValue.simulate('click');
+
+            store.waitForActions([DELETE_FIELD_DIMENSION, FETCH_TABLE_METADATA]);
+        })
+
+        it("doesn't let you enter custom remappings for a field with string values", async () => {
+            const { fieldApp } = await initFieldApp({ tableId: USER_SOURCE_TABLE_ID, fieldId: USER_SOURCE_ID });
+            const section = fieldApp.find(FieldRemapping)
+            const mappingTypePicker = section.find(Select);
+
+            expect(mappingTypePicker.text()).toBe('Use original value')
+            mappingTypePicker.simulate('click');
+            const pickerOptions = mappingTypePicker.find(TestPopover).find("li");
+            expect(pickerOptions.length).toBe(1);
+        });
+
+        // TODO: Make sure that product rating is a Category and that a sync has been run
+        it("lets you enter custom remappings for a field with numeral values", async () => {
+            const { store, fieldApp } = await initFieldApp({ tableId: PRODUCT_RATING_TABLE_ID, fieldId: PRODUCT_RATING_ID });
+            const section = fieldApp.find(FieldRemapping)
+            const mappingTypePicker = section.find(Select);
+
+            expect(mappingTypePicker.text()).toBe('Use original value')
+            mappingTypePicker.simulate('click');
+            const pickerOptions = mappingTypePicker.find(TestPopover).find("li");
+            expect(pickerOptions.length).toBe(2);
+
+            const customMappingButton = pickerOptions.at(1).children().first()
+            customMappingButton.simulate('click');
+
+            store.waitForActions([UPDATE_FIELD_DIMENSION, FETCH_TABLE_METADATA])
+            // TODO: Figure out a way to avoid using delay – using delays may lead to occasional CI failures
+            await delay(500);
+
+            const valueRemappingsSection = section.find(ValueRemappings);
+            expect(valueRemappingsSection.length).toBe(1);
+
+            const fieldValueMappings = valueRemappingsSection.find(FieldValueMapping);
+            expect(fieldValueMappings.length).toBe(5);
+
+            const firstMapping = fieldValueMappings.at(0);
+            expect(firstMapping.find("h3").text()).toBe("1");
+            expect(firstMapping.find(Input).props().value).toBe("1");
+            firstMapping.find(Input).simulate('change', {target: {value: "Terrible"}});
+
+            const lastMapping = fieldValueMappings.last();
+            expect(lastMapping.find("h3").text()).toBe("5");
+            expect(lastMapping.find(Input).props().value).toBe("5");
+            lastMapping.find(Input).simulate('change', {target: {value: "Extraordinarily awesome"}});
+
+            const saveButton = valueRemappingsSection.find(ButtonWithStatus)
+            saveButton.simulate("click");
+
+            store.waitForActions([UPDATE_FIELD_VALUES]);
+        });
+        
+        it("shows the updated values after page reload", async () => {
+            const { fieldApp } = await initFieldApp({ tableId: PRODUCT_RATING_TABLE_ID, fieldId: PRODUCT_RATING_ID });
+            const section = fieldApp.find(FieldRemapping)
+            const mappingTypePicker = section.find(Select);
+
+            expect(mappingTypePicker.text()).toBe('Custom mapping');
+            const fieldValueMappings = section.find(FieldValueMapping);
+            expect(fieldValueMappings.first().find(Input).props().value).toBe("Terrible");
+            expect(fieldValueMappings.last().find(Input).props().value).toBe("Extraordinarily awesome");
+        });
+
+        afterAll(async () => {
+            const store = await createTestStore()
+
+            await store.dispatch(deleteFieldDimension(USER_ID_FK_ID));
+            await store.dispatch(deleteFieldDimension(PRODUCT_RATING_ID));
+
+            // TODO: This is a little hacky – could there be a way to simply reset the user-defined valued?
+            await store.dispatch(updateFieldValues(PRODUCT_RATING_ID, [
+                [1, '1'], [2, '2'], [3, '3'], [4, '4'], [5, '5']
+            ]));
+        })
+    })
+
+})
\ No newline at end of file
diff --git a/frontend/test/containers/DatabaseListApp.integ.spec.js b/frontend/test/containers/DatabaseListApp.integ.spec.js
deleted file mode 100644
index f75450408b8ef515c47786c38ee68c52bd4e2d66..0000000000000000000000000000000000000000
--- a/frontend/test/containers/DatabaseListApp.integ.spec.js
+++ /dev/null
@@ -1,70 +0,0 @@
-import {
-    login,
-    createTestStore,
-} from "__support__/integrated_tests";
-
-import { mount } from "enzyme";
-import { FETCH_DATABASES, DELETE_DATABASE } from "metabase/admin/databases/database"
-import DatabaseListApp from "metabase/admin/databases/containers/DatabaseListApp";
-import { delay } from "metabase/lib/promise"
-
-import { MetabaseApi } from 'metabase/services'
-
-describe('dashboard list', () => {
-
-    beforeAll(async () => {
-        await login()
-    })
-
-    it('should render', async () => {
-        const store = await createTestStore()
-        store.pushPath("/admin/databases");
-
-        const app = mount(store.getAppContainer())
-
-        await store.waitForActions([FETCH_DATABASES])
-
-        const wrapper = app.find(DatabaseListApp)
-        expect(wrapper.length).toEqual(1)
-
-    })
-
-    describe('deletes', () => {
-        it('should not block deletes', async () => {
-            // mock the db_delete method call to simulate a longer running delete
-            MetabaseApi.db_delete = () => delay(5000)
-
-            const store = await createTestStore()
-            store.pushPath("/admin/databases");
-
-            const app = mount(store.getAppContainer())
-            await store.waitForActions([FETCH_DATABASES])
-
-            const wrapper = app.find(DatabaseListApp)
-            const dbCount = wrapper.find('tr').length
-
-            const deleteButton = wrapper.find('.Button.Button--danger').first()
-
-            deleteButton.simulate('click')
-
-            const deleteModal = wrapper.find('.test-modal')
-            deleteModal.find('.Form-input').simulate('change', { target: { value: "DELETE" }})
-            deleteModal.find('.Button.Button--danger').simulate('click')
-
-            // test that the modal is gone
-            expect(wrapper.find('.test-modal').length).toEqual(0)
-
-            // we should now have a disabled db row during delete
-            expect(wrapper.find('tr.disabled').length).toEqual(1)
-
-            // db delete finishes
-            await store.waitForActions([DELETE_DATABASE])
-
-            // there should be no disabled db rows now
-            expect(wrapper.find('tr.disabled').length).toEqual(0)
-
-            // we should now have one less database in the list
-            expect(wrapper.find('tr').length).toEqual(dbCount - 1)
-        })
-    })
-})
diff --git a/frontend/test/containers/SaveQuestionModal.spec.js b/frontend/test/containers/SaveQuestionModal.spec.js
index fef148528a4af8001905d31e6cd04a7b98fabbc1..9ed2510b6e06a155ee7ee253961af25cb3b2e0dc 100644
--- a/frontend/test/containers/SaveQuestionModal.spec.js
+++ b/frontend/test/containers/SaveQuestionModal.spec.js
@@ -6,22 +6,31 @@ import Question from "metabase-lib/lib/Question";
 import {
     DATABASE_ID,
     ORDERS_TABLE_ID,
-    metadata, ORDERS_TOTAL_FIELD_ID
+    PEOPLE_TABLE_ID,
+    metadata,
+    ORDERS_TOTAL_FIELD_ID
 } from "__support__/sample_dataset_fixture";
 
 const createFnMock = jest.fn();
-const saveFnMock = jest.fn();
+let saveFnMock;
 
-const getSaveQuestionModal = (question, originalQuestion) => <SaveQuestionModal
-    card={question.card()}
-    originalCard={originalQuestion && originalQuestion.card()}
-    tableMetadata={question.tableMetadata()}
-    createFn={createFnMock}
-    saveFn={saveFnMock}
-    onClose={() => {}}
-/>
+const getSaveQuestionModal = (question, originalQuestion) =>
+    <SaveQuestionModal
+        card={question.card()}
+        originalCard={originalQuestion && originalQuestion.card()}
+        tableMetadata={question.tableMetadata()}
+        createFn={createFnMock}
+        saveFn={saveFnMock}
+        onClose={() => {}}
+    />
 
 describe('SaveQuestionModal', () => {
+    beforeEach(() => {
+        // we need to create a new save mock before each test to ensure that each
+        // test has its own instance
+        saveFnMock = jest.fn();
+    })
+
     it("should call createFn correctly for a new question", async () => {
         const newQuestion = Question.create({databaseId: DATABASE_ID, tableId: ORDERS_TABLE_ID, metadata})
             .query()
@@ -52,4 +61,26 @@ describe('SaveQuestionModal', () => {
         await component.instance().formSubmitted();
         expect(saveFnMock.mock.calls.length).toBe(1);
     });
-});
\ No newline at end of file
+
+    it("should preserve the collection_id of a question in overwrite mode", async () => {
+        let originalQuestion = Question.create({databaseId: DATABASE_ID, tableId: PEOPLE_TABLE_ID, metadata})
+            .query()
+            .addAggregation(["count"])
+            .question()
+
+        // set the collection_id of the original question
+        originalQuestion = originalQuestion.setCard({
+            ...originalQuestion.card(),
+            collection_id: 5
+        })
+
+        let dirtyQuestion = originalQuestion
+            .query()
+            .addBreakout(["field-id", ORDERS_TOTAL_FIELD_ID])
+            .question()
+
+        const component = shallow(getSaveQuestionModal(dirtyQuestion, originalQuestion))
+        await component.instance().formSubmitted();
+        expect(saveFnMock.mock.calls[0][0].collection_id).toEqual(5);
+    })
+});
diff --git a/frontend/test/dashboard/dashboard.integ.spec.js b/frontend/test/dashboard/dashboard.integ.spec.js
new file mode 100644
index 0000000000000000000000000000000000000000..e58343c0b0432b6d933e9bba4759724c10f90368
--- /dev/null
+++ b/frontend/test/dashboard/dashboard.integ.spec.js
@@ -0,0 +1,73 @@
+import { PublicApi } from "metabase/services";
+import { fetchDashboard } from "../../src/metabase/dashboard/dashboard"
+
+import {
+    createTestStore,
+    login
+} from "__support__/integrated_tests";
+
+import { getParameterFieldValues } from "metabase/selectors/metadata";
+import { ADD_PARAM_VALUES } from "metabase/redux/metadata";
+
+// TODO Atte Keinänen 7/17/17: When we have a nice way to create dashboards in tests, this could use a real saved dashboard
+// instead of mocking the API endpoint
+
+// Mock the dashboard endpoint using a real response of `public/dashboard/:dashId`
+const mockPublicDashboardResponse = {
+    "name": "Dashboard",
+    "description": "For testing parameter values",
+    "id": 40,
+    "parameters": [{"name": "Category", "slug": "category", "id": "598ab323", "type": "category"}],
+    "ordered_cards": [{
+        "sizeX": 6,
+        "series": [],
+        "card": {
+            "id": 25,
+            "name": "Orders over time",
+            "description": null,
+            "display": "line",
+            "dataset_query": {"type": "query"}
+        },
+        "col": 0,
+        "id": 105,
+        "parameter_mappings": [{
+            "parameter_id": "598ab323",
+            "card_id": 25,
+            "target": ["dimension", ["fk->", 3, 21]]
+        }],
+        "card_id": 25,
+        "visualization_settings": {},
+        "dashboard_id": 40,
+        "sizeY": 6,
+        "row": 0
+    }],
+    // Parameter values are self-contained in the public dashboard response
+    "param_values": {
+        "21": {
+            "values": ["Doohickey", "Gadget", "Gizmo", "Widget"],
+            "human_readable_values": {},
+            "field_id": 21
+        }
+    }
+}
+PublicApi.dashboard = async () => {
+    return mockPublicDashboardResponse;
+}
+
+describe("Dashboard redux actions", () => {
+    beforeAll(async () => {
+        await login();
+    })
+
+    describe("fetchDashboard(...)", () => {
+        it("should add the parameter values to state tree for public dashboards", async () => {
+            const store = await createTestStore();
+            // using hash as dashboard id should invoke the public API
+            await store.dispatch(fetchDashboard('6e59cc97-3b6a-4bb6-9e7a-5efeee27e40f'));
+            await store.waitForActions(ADD_PARAM_VALUES)
+
+            const fieldValues = await getParameterFieldValues(store.getState(), { parameter: { field_id: 21 }});
+            expect(fieldValues).toEqual([["Doohickey"], ["Gadget"], ["Gizmo"], ["Widget"]]);
+        })
+    })
+})
diff --git a/frontend/test/legacy-selenium/support/fixtures/metabase.db.h2.db b/frontend/test/legacy-selenium/support/fixtures/metabase.db.h2.db
index 525215b6019e31c1a16911b75bbb6753fa9b6f21..6b74b1287d6c16f4c494083d0a21a746958639f7 100644
Binary files a/frontend/test/legacy-selenium/support/fixtures/metabase.db.h2.db and b/frontend/test/legacy-selenium/support/fixtures/metabase.db.h2.db differ
diff --git a/frontend/test/lib/formatting.spec.js b/frontend/test/lib/formatting.spec.js
index be2aa4cbaf57ccfe0bee36e30d78af7fe2fdf36c..c8344508e9d270b13934d6892a6d6b3cf6470b4b 100644
--- a/frontend/test/lib/formatting.spec.js
+++ b/frontend/test/lib/formatting.spec.js
@@ -54,8 +54,8 @@ describe('formatting', () => {
             expect(formatValue(12345, { column: { base_type: TYPE.Number, special_type: TYPE.ZipCode }})).toEqual("12345");
         });
         it("should format latitude and longitude columns correctly", () => {
-            expect(formatValue(37.7749, { column: { base_type: TYPE.Number, special_type: TYPE.Latitude }})).toEqual("37.77490000");
-            expect(formatValue(-122.4194, { column: { base_type: TYPE.Number, special_type: TYPE.Longitude }})).toEqual("-122.41940000");
+            expect(formatValue(37.7749, { column: { base_type: TYPE.Number, special_type: TYPE.Latitude }})).toEqual("37.77490000° N");
+            expect(formatValue(-122.4194, { column: { base_type: TYPE.Number, special_type: TYPE.Longitude }})).toEqual("122.41940000° W");
         });
         it("should return a component for links in jsx mode", () => {
             expect(isElementOfType(formatValue("http://metabase.com/", { jsx: true }), ExternalLink)).toEqual(true);
diff --git a/frontend/test/metabase-lib/Dimension.integ.spec.js b/frontend/test/metabase-lib/Dimension.integ.spec.js
new file mode 100644
index 0000000000000000000000000000000000000000..075ce18d2f19dd0cec074e8163e34cf0fd4e2c01
--- /dev/null
+++ b/frontend/test/metabase-lib/Dimension.integ.spec.js
@@ -0,0 +1,395 @@
+import { createTestStore, login } from "__support__/integrated_tests";
+
+import {
+    ORDERS_TOTAL_FIELD_ID,
+    PRODUCT_CATEGORY_FIELD_ID,
+    ORDERS_CREATED_DATE_FIELD_ID,
+    ORDERS_PRODUCT_FK_FIELD_ID,
+    PRODUCT_TILE_FIELD_ID
+} from "__support__/sample_dataset_fixture";
+
+import {
+    fetchDatabaseMetadata,
+    fetchTableMetadata
+} from "metabase/redux/metadata";
+import { getMetadata } from "metabase/selectors/metadata";
+import Dimension from "metabase-lib/lib/Dimension";
+
+describe("Dimension classes", () => {
+    let metadata = null;
+
+    beforeAll(async () => {
+        await login();
+        const store = await createTestStore();
+        await store.dispatch(fetchDatabaseMetadata(1));
+        await store.dispatch(fetchTableMetadata(1));
+        await store.dispatch(fetchTableMetadata(2));
+        await store.dispatch(fetchTableMetadata(3));
+        metadata = getMetadata(store.getState());
+    });
+
+    describe("Dimension", () => {
+        describe("STATIC METHODS", () => {
+            describe("parseMBQL(mbql metadata)", () => {
+                it("parses and format MBQL correctly", () => {
+                    expect(Dimension.parseMBQL(1, metadata).mbql()).toEqual([
+                        "field-id",
+                        1
+                    ]);
+                    expect(
+                        Dimension.parseMBQL(["field-id", 1], metadata).mbql()
+                    ).toEqual(["field-id", 1]);
+                    expect(
+                        Dimension.parseMBQL(["fk->", 1, 2], metadata).mbql()
+                    ).toEqual(["fk->", 1, 2]);
+                    expect(
+                        Dimension.parseMBQL(
+                            ["datetime-field", 1, "month"],
+                            metadata
+                        ).mbql()
+                    ).toEqual(["datetime-field", ["field-id", 1], "month"]);
+                    expect(
+                        Dimension.parseMBQL(
+                            ["datetime-field", ["field-id", 1], "month"],
+                            metadata
+                        ).mbql()
+                    ).toEqual(["datetime-field", ["field-id", 1], "month"]);
+                    expect(
+                        Dimension.parseMBQL(
+                            ["datetime-field", ["fk->", 1, 2], "month"],
+                            metadata
+                        ).mbql()
+                    ).toEqual(["datetime-field", ["fk->", 1, 2], "month"]);
+                });
+            });
+
+            describe("isEqual(other)", () => {
+                it("returns true for equivalent field-ids", () => {
+                    const d1 = Dimension.parseMBQL(1, metadata);
+                    const d2 = Dimension.parseMBQL(["field-id", 1], metadata);
+                    expect(d1.isEqual(d2)).toEqual(true);
+                    expect(d1.isEqual(["field-id", 1])).toEqual(true);
+                    expect(d1.isEqual(1)).toEqual(true);
+                });
+                it("returns false for different type clauses", () => {
+                    const d1 = Dimension.parseMBQL(["fk->", 1, 2], metadata);
+                    const d2 = Dimension.parseMBQL(["field-id", 1], metadata);
+                    expect(d1.isEqual(d2)).toEqual(false);
+                });
+                it("returns false for same type clauses with different arguments", () => {
+                    const d1 = Dimension.parseMBQL(["fk->", 1, 2], metadata);
+                    const d2 = Dimension.parseMBQL(["fk->", 1, 3], metadata);
+                    expect(d1.isEqual(d2)).toEqual(false);
+                });
+            });
+        });
+
+        describe("INSTANCE METHODS", () => {
+            describe("dimensions()", () => {
+                it("returns `dimension_options` of the underlying field if available", () => {
+                    pending();
+                });
+                it("returns sub-dimensions for matching dimension if no `dimension_options`", () => {
+                    // just a single scenario should be sufficient here as we will test
+                    // `static dimensions()` individually for each dimension
+                    pending();
+                });
+            });
+
+            describe("isSameBaseDimension(other)", () => {
+                it("returns true if the base dimensions are same", () => {
+                    pending();
+                });
+                it("returns false if the base dimensions don't match", () => {
+                    pending();
+                });
+            });
+        });
+
+        describe("INSTANCE METHODS", () => {
+            describe("dimensions()", () => {
+                it("returns `default_dimension_option` of the underlying field if available", () => {
+                    pending();
+                });
+                it("returns default dimension for matching dimension if no `default_dimension_option`", () => {
+                    // just a single scenario should be sufficient here as we will test
+                    // `static defaultDimension()` individually for each dimension
+                    pending();
+                });
+            });
+        });
+    });
+
+    describe("FieldIDDimension", () => {
+        let dimension = null;
+        let categoryDimension = null;
+        beforeAll(() => {
+            dimension = Dimension.parseMBQL(
+                ["field-id", ORDERS_TOTAL_FIELD_ID],
+                metadata
+            );
+            categoryDimension = Dimension.parseMBQL(
+                ["field-id", PRODUCT_CATEGORY_FIELD_ID],
+                metadata
+            );
+        });
+
+        describe("INSTANCE METHODS", () => {
+            describe("mbql()", () => {
+                it('returns a "field-id" clause', () => {
+                    expect(dimension.mbql()).toEqual([
+                        "field-id",
+                        ORDERS_TOTAL_FIELD_ID
+                    ]);
+                });
+            });
+            describe("displayName()", () => {
+                it("returns the field name", () => {
+                    expect(dimension.displayName()).toEqual("Total");
+                });
+            });
+            describe("subDisplayName()", () => {
+                it("returns 'Default' for numeric fields", () => {
+                    expect(dimension.subDisplayName()).toEqual("Default");
+                });
+                it("returns 'Default' for non-numeric fields", () => {
+                    expect(
+                        Dimension.parseMBQL(
+                            ["field-id", PRODUCT_CATEGORY_FIELD_ID],
+                            metadata
+                        ).subDisplayName()
+                    ).toEqual("Default");
+                });
+            });
+            describe("subTriggerDisplayName()", () => {
+                it("returns 'Unbinned' if the dimension is a binnable number", () => {
+                    expect(dimension.subTriggerDisplayName()).toBe("Unbinned");
+                });
+                it("does not have a value if the dimension is a category", () => {
+                    expect(
+                        categoryDimension.subTriggerDisplayName()
+                    ).toBeFalsy();
+                });
+            });
+        });
+    });
+
+    describe("FKDimension", () => {
+        let dimension = null;
+        beforeAll(() => {
+            dimension = Dimension.parseMBQL(
+                ["fk->", ORDERS_PRODUCT_FK_FIELD_ID, PRODUCT_TILE_FIELD_ID],
+                metadata
+            );
+        });
+
+        describe("STATIC METHODS", () => {
+            describe("dimensions(parentDimension)", () => {
+                it("should return array of FK dimensions for foreign key field dimension", () => {
+                    pending();
+                    // Something like this:
+                    // fieldsInProductsTable = metadata.tables[1].fields.length;
+                    // expect(FKDimension.dimensions(fkFieldIdDimension).length).toEqual(fieldsInProductsTable);
+                });
+                it("should return empty array for non-FK field dimension", () => {
+                    pending();
+                });
+            });
+        });
+
+        describe("INSTANCE METHODS", () => {
+            describe("mbql()", () => {
+                it('returns a "fk->" clause', () => {
+                    expect(dimension.mbql()).toEqual([
+                        "fk->",
+                        ORDERS_PRODUCT_FK_FIELD_ID,
+                        PRODUCT_TILE_FIELD_ID
+                    ]);
+                });
+            });
+            describe("displayName()", () => {
+                it("returns the field name", () => {
+                    expect(dimension.displayName()).toEqual("Title");
+                });
+            });
+            describe("subDisplayName()", () => {
+                it("returns the field name", () => {
+                    expect(dimension.subDisplayName()).toEqual("Title");
+                });
+            });
+            describe("subTriggerDisplayName()", () => {
+                it("does not have a value", () => {
+                    expect(dimension.subTriggerDisplayName()).toBeFalsy();
+                });
+            });
+        });
+    });
+
+    describe("DatetimeFieldDimension", () => {
+        let dimension = null;
+        beforeAll(() => {
+            dimension = Dimension.parseMBQL(
+                ["datetime-field", ORDERS_CREATED_DATE_FIELD_ID, "month"],
+                metadata
+            );
+        });
+
+        describe("STATIC METHODS", () => {
+            describe("dimensions(parentDimension)", () => {
+                it("should return an array with dimensions for each datetime unit", () => {
+                    pending();
+                    // Something like this:
+                    // fieldsInProductsTable = metadata.tables[1].fields.length;
+                    // expect(FKDimension.dimensions(fkFieldIdDimension).length).toEqual(fieldsInProductsTable);
+                });
+                it("should return empty array for non-date field dimension", () => {
+                    pending();
+                });
+            });
+            describe("defaultDimension(parentDimension)", () => {
+                it("should return dimension with 'day' datetime unit", () => {
+                    pending();
+                });
+                it("should return null for non-date field dimension", () => {
+                    pending();
+                });
+            });
+        });
+
+        describe("INSTANCE METHODS", () => {
+            describe("mbql()", () => {
+                it('returns a "datetime-field" clause', () => {
+                    expect(dimension.mbql()).toEqual([
+                        "datetime-field",
+                        ["field-id", ORDERS_CREATED_DATE_FIELD_ID],
+                        "month"
+                    ]);
+                });
+            });
+            describe("displayName()", () => {
+                it("returns the field name", () => {
+                    expect(dimension.displayName()).toEqual("Created At");
+                });
+            });
+            describe("subDisplayName()", () => {
+                it("returns 'Month'", () => {
+                    expect(dimension.subDisplayName()).toEqual("Month");
+                });
+            });
+            describe("subTriggerDisplayName()", () => {
+                it("returns 'by month'", () => {
+                    expect(dimension.subTriggerDisplayName()).toEqual(
+                        "by month"
+                    );
+                });
+            });
+        });
+    });
+
+    describe("BinningStrategyDimension", () => {
+        let dimension = null;
+        beforeAll(() => {
+            dimension = Dimension.parseMBQL(
+                ["field-id", ORDERS_TOTAL_FIELD_ID],
+                metadata
+            ).dimensions()[1];
+        });
+
+        describe("STATIC METHODS", () => {
+            describe("dimensions(parentDimension)", () => {
+                it("should return an array of dimensions based on default binning", () => {
+                    pending();
+                });
+                it("should return empty array for non-number field dimension", () => {
+                    pending();
+                });
+            });
+        });
+
+        describe("INSTANCE METHODS", () => {
+            describe("mbql()", () => {
+                it('returns a "binning-strategy" clause', () => {
+                    expect(dimension.mbql()).toEqual([
+                        "binning-strategy",
+                        ["field-id", ORDERS_TOTAL_FIELD_ID],
+                        "num-bins",
+                        10
+                    ]);
+                });
+            });
+            describe("displayName()", () => {
+                it("returns the field name", () => {
+                    expect(dimension.displayName()).toEqual("Total");
+                });
+            });
+            describe("subDisplayName()", () => {
+                it("returns '10 bins'", () => {
+                    expect(dimension.subDisplayName()).toEqual("10 bins");
+                });
+            });
+
+            describe("subTriggerDisplayName()", () => {
+                it("returns '10 bins'", () => {
+                    expect(dimension.subTriggerDisplayName()).toEqual(
+                        "10 bins"
+                    );
+                });
+            });
+        });
+    });
+
+    describe("ExpressionDimension", () => {
+        let dimension = null;
+        beforeAll(() => {
+            dimension = Dimension.parseMBQL(
+                ["expression", "Hello World"],
+                metadata
+            );
+        });
+
+        describe("STATIC METHODS", () => {
+            describe("dimensions(parentDimension)", () => {
+                it("should return array of FK dimensions for foreign key field dimension", () => {
+                    pending();
+                    // Something like this:
+                    // fieldsInProductsTable = metadata.tables[1].fields.length;
+                    // expect(FKDimension.dimensions(fkFieldIdDimension).length).toEqual(fieldsInProductsTable);
+                });
+                it("should return empty array for non-FK field dimension", () => {
+                    pending();
+                });
+            });
+        });
+
+        describe("INSTANCE METHODS", () => {
+            describe("mbql()", () => {
+                it('returns an "expression" clause', () => {
+                    expect(dimension.mbql()).toEqual([
+                        "expression",
+                        "Hello World"
+                    ]);
+                });
+            });
+            describe("displayName()", () => {
+                it("returns the expression name", () => {
+                    expect(dimension.displayName()).toEqual("Hello World");
+                });
+            });
+        });
+    });
+
+    describe("AggregationDimension", () => {
+        let dimension = null;
+        beforeAll(() => {
+            dimension = Dimension.parseMBQL(["aggregation", 1], metadata);
+        });
+
+        describe("INSTANCE METHODS", () => {
+            describe("mbql()", () => {
+                it('returns an "aggregation" clause', () => {
+                    expect(dimension.mbql()).toEqual(["aggregation", 1]);
+                });
+            });
+        });
+    });
+});
diff --git a/frontend/test/metabase-lib/Dimension.spec.js b/frontend/test/metabase-lib/Dimension.spec.js
deleted file mode 100644
index c4409035bd606ee1e8d728434a9b04655750e4a9..0000000000000000000000000000000000000000
--- a/frontend/test/metabase-lib/Dimension.spec.js
+++ /dev/null
@@ -1,343 +0,0 @@
-import Dimension from "metabase-lib/lib/Dimension";
-
-import {
-    metadata,
-    ORDERS_TOTAL_FIELD_ID,
-    PRODUCT_CATEGORY_FIELD_ID,
-    ORDERS_CREATED_DATE_FIELD_ID,
-    ORDERS_PRODUCT_FK_FIELD_ID,
-    PRODUCT_TILE_FIELD_ID
-} from "__support__/sample_dataset_fixture";
-
-describe("Dimension", () => {
-    describe("STATIC METHODS", () => {
-        describe("parseMBQL(mbql metadata)", () => {
-            it("parses and format MBQL correctly", () => {
-                expect(Dimension.parseMBQL(1, metadata).mbql()).toEqual([
-                    "field-id",
-                    1
-                ]);
-                expect(
-                    Dimension.parseMBQL(["field-id", 1], metadata).mbql()
-                ).toEqual(["field-id", 1]);
-                expect(
-                    Dimension.parseMBQL(["fk->", 1, 2], metadata).mbql()
-                ).toEqual(["fk->", 1, 2]);
-                expect(
-                    Dimension.parseMBQL(
-                        ["datetime-field", 1, "month"],
-                        metadata
-                    ).mbql()
-                ).toEqual(["datetime-field", ["field-id", 1], "month"]);
-                expect(
-                    Dimension.parseMBQL(
-                        ["datetime-field", ["field-id", 1], "month"],
-                        metadata
-                    ).mbql()
-                ).toEqual(["datetime-field", ["field-id", 1], "month"]);
-                expect(
-                    Dimension.parseMBQL(
-                        ["datetime-field", ["fk->", 1, 2], "month"],
-                        metadata
-                    ).mbql()
-                ).toEqual(["datetime-field", ["fk->", 1, 2], "month"]);
-            });
-        });
-
-        describe("isEqual(other)", () => {
-            it("returns true for equivalent field-ids", () => {
-                const d1 = Dimension.parseMBQL(1, metadata);
-                const d2 = Dimension.parseMBQL(["field-id", 1], metadata);
-                expect(d1.isEqual(d2)).toEqual(true);
-                expect(d1.isEqual(["field-id", 1])).toEqual(true);
-                expect(d1.isEqual(1)).toEqual(true);
-            });
-            it("returns false for different type clauses", () => {
-                const d1 = Dimension.parseMBQL(["fk->", 1, 2], metadata);
-                const d2 = Dimension.parseMBQL(["field-id", 1], metadata);
-                expect(d1.isEqual(d2)).toEqual(false);
-            });
-            it("returns false for same type clauses with different arguments", () => {
-                const d1 = Dimension.parseMBQL(["fk->", 1, 2], metadata);
-                const d2 = Dimension.parseMBQL(["fk->", 1, 3], metadata);
-                expect(d1.isEqual(d2)).toEqual(false);
-            });
-        });
-    });
-
-    describe("INSTANCE METHODS", () => {
-        describe("dimensions()", () => {
-            it("returns `dimension_options` of the underlying field if available", () => {
-                pending();
-            });
-            it("returns sub-dimensions for matching dimension if no `dimension_options`", () => {
-                // just a single scenario should be sufficient here as we will test
-                // `static dimensions()` individually for each dimension
-                pending();
-            });
-        });
-
-        describe("isSameBaseDimension(other)", () => {
-            it("returns true if the base dimensions are same", () => {
-                pending();
-            });
-            it("returns false if the base dimensions don't match", () => {
-                pending();
-            });
-        });
-    });
-
-    describe("INSTANCE METHODS", () => {
-        describe("dimensions()", () => {
-            it("returns `default_dimension_option` of the underlying field if available", () => {
-                pending();
-            });
-            it("returns default dimension for matching dimension if no `default_dimension_option`", () => {
-                // just a single scenario should be sufficient here as we will test
-                // `static defaultDimension()` individually for each dimension
-                pending();
-            });
-        });
-    });
-});
-
-describe("FieldIDDimension", () => {
-    const dimension = Dimension.parseMBQL(
-        ["field-id", ORDERS_TOTAL_FIELD_ID],
-        metadata
-    );
-
-    describe("INSTANCE METHODS", () => {
-        describe("mbql()", () => {
-            it('returns a "field-id" clause', () => {
-                expect(dimension.mbql()).toEqual([
-                    "field-id",
-                    ORDERS_TOTAL_FIELD_ID
-                ]);
-            });
-        });
-        describe("displayName()", () => {
-            it("returns the field name", () => {
-                expect(dimension.displayName()).toEqual("Total");
-            });
-        });
-        describe("subDisplayName()", () => {
-            it("returns 'Continuous (no binning)' for numeric fields", () => {
-                expect(dimension.subDisplayName()).toEqual(
-                    "Continuous (no binning)"
-                );
-            });
-            it("returns 'Default' for non-numeric fields", () => {
-                expect(
-                    Dimension.parseMBQL(
-                        ["field-id", PRODUCT_CATEGORY_FIELD_ID],
-                        metadata
-                    ).subDisplayName()
-                ).toEqual("Default");
-            });
-        });
-        describe("subTriggerDisplayName()", () => {
-            it("does not have a value", () => {
-                expect(dimension.subTriggerDisplayName()).toBeFalsy();
-            });
-        });
-    });
-});
-
-describe("FKDimension", () => {
-    const dimension = Dimension.parseMBQL(
-        ["fk->", ORDERS_PRODUCT_FK_FIELD_ID, PRODUCT_TILE_FIELD_ID],
-        metadata
-    );
-
-    describe("STATIC METHODS", () => {
-        describe("dimensions(parentDimension)", () => {
-            it("should return array of FK dimensions for foreign key field dimension", () => {
-                pending();
-                // Something like this:
-                // fieldsInProductsTable = metadata.tables[1].fields.length;
-                // expect(FKDimension.dimensions(fkFieldIdDimension).length).toEqual(fieldsInProductsTable);
-            });
-            it("should return empty array for non-FK field dimension", () => {
-                pending();
-            });
-        });
-    });
-
-    describe("INSTANCE METHODS", () => {
-        describe("mbql()", () => {
-            it('returns a "fk->" clause', () => {
-                expect(dimension.mbql()).toEqual([
-                    "fk->",
-                    ORDERS_PRODUCT_FK_FIELD_ID,
-                    PRODUCT_TILE_FIELD_ID
-                ]);
-            });
-        });
-        describe("displayName()", () => {
-            it("returns the field name", () => {
-                expect(dimension.displayName()).toEqual("Title");
-            });
-        });
-        describe("subDisplayName()", () => {
-            it("returns the field name", () => {
-                expect(dimension.subDisplayName()).toEqual("Title");
-            });
-        });
-        describe("subTriggerDisplayName()", () => {
-            it("does not have a value", () => {
-                expect(dimension.subTriggerDisplayName()).toBeFalsy();
-            });
-        });
-    });
-});
-
-describe("DatetimeFieldDimension", () => {
-    const dimension = Dimension.parseMBQL(
-        ["datetime-field", ORDERS_CREATED_DATE_FIELD_ID, "month"],
-        metadata
-    );
-
-    describe("STATIC METHODS", () => {
-        describe("dimensions(parentDimension)", () => {
-            it("should return an array with dimensions for each datetime unit", () => {
-                pending();
-                // Something like this:
-                // fieldsInProductsTable = metadata.tables[1].fields.length;
-                // expect(FKDimension.dimensions(fkFieldIdDimension).length).toEqual(fieldsInProductsTable);
-            });
-            it("should return empty array for non-date field dimension", () => {
-                pending();
-            });
-        });
-        describe("defaultDimension(parentDimension)", () => {
-            it("should return dimension with 'day' datetime unit", () => {
-                pending();
-            });
-            it("should return null for non-date field dimension", () => {
-                pending();
-            });
-        });
-    });
-
-    describe("INSTANCE METHODS", () => {
-        describe("mbql()", () => {
-            it('returns a "datetime-field" clause', () => {
-                expect(dimension.mbql()).toEqual([
-                    "datetime-field",
-                    ["field-id", ORDERS_CREATED_DATE_FIELD_ID],
-                    "month"
-                ]);
-            });
-        });
-        describe("displayName()", () => {
-            it("returns the field name", () => {
-                expect(dimension.displayName()).toEqual("Created At");
-            });
-        });
-        describe("subDisplayName()", () => {
-            it("returns 'Month'", () => {
-                expect(dimension.subDisplayName()).toEqual("Month");
-            });
-        });
-        describe("subTriggerDisplayName()", () => {
-            it("returns 'by month'", () => {
-                expect(dimension.subTriggerDisplayName()).toEqual("by month");
-            });
-        });
-    });
-});
-
-describe("BinningStrategyDimension", () => {
-    const dimension = Dimension.parseMBQL(
-        ["binning-strategy", ORDERS_TOTAL_FIELD_ID, "default", 10],
-        metadata
-    );
-
-    describe("STATIC METHODS", () => {
-        describe("dimensions(parentDimension)", () => {
-            it("should return an array of dimensions based on default binning", () => {
-                pending();
-            });
-            it("should return empty array for non-number field dimension", () => {
-                pending();
-            });
-        });
-    });
-
-    describe("INSTANCE METHODS", () => {
-        describe("mbql()", () => {
-            it('returns a "binning-strategy" clause', () => {
-                expect(dimension.mbql()).toEqual([
-                    "binning-strategy",
-                    ["field-id", ORDERS_TOTAL_FIELD_ID],
-                    "default",
-                    10
-                ]);
-            });
-        });
-        describe("displayName()", () => {
-            it("returns the field name", () => {
-                expect(dimension.displayName()).toEqual("Total");
-            });
-        });
-        describe("subDisplayName()", () => {
-            it("returns 'Quantized into 10 bins'", () => {
-                expect(dimension.subDisplayName()).toEqual(
-                    "Quantized into 10 bins"
-                );
-            });
-        });
-        describe("subTriggerDisplayName()", () => {
-            it("returns '10 bins'", () => {
-                expect(dimension.subTriggerDisplayName()).toEqual("10 bins");
-            });
-        });
-    });
-});
-
-describe("ExpressionDimension", () => {
-    const dimension = Dimension.parseMBQL(
-        ["expression", "Hello World"],
-        metadata
-    );
-
-    describe("STATIC METHODS", () => {
-        describe("dimensions(parentDimension)", () => {
-            it("should return array of FK dimensions for foreign key field dimension", () => {
-                pending();
-                // Something like this:
-                // fieldsInProductsTable = metadata.tables[1].fields.length;
-                // expect(FKDimension.dimensions(fkFieldIdDimension).length).toEqual(fieldsInProductsTable);
-            });
-            it("should return empty array for non-FK field dimension", () => {
-                pending();
-            });
-        });
-    });
-
-    describe("INSTANCE METHODS", () => {
-        describe("mbql()", () => {
-            it('returns an "expression" clause', () => {
-                expect(dimension.mbql()).toEqual(["expression", "Hello World"]);
-            });
-        });
-        describe("displayName()", () => {
-            it("returns the expression name", () => {
-                expect(dimension.displayName()).toEqual("Hello World");
-            });
-        });
-    });
-});
-
-describe("AggregationDimension", () => {
-    const dimension = Dimension.parseMBQL(["aggregation", 1], metadata);
-
-    describe("INSTANCE METHODS", () => {
-        describe("mbql()", () => {
-            it('returns an "aggregation" clause', () => {
-                expect(dimension.mbql()).toEqual(["aggregation", 1]);
-            });
-        });
-    });
-});
diff --git a/frontend/test/metabase-lib/Question.spec.js b/frontend/test/metabase-lib/Question.spec.js
index 3c22093cc45d1cb2bca5aaa9cbb7ce280d0f30ed..c88abd0274d7c4169fa9bae11d64862bcf7f818c 100644
--- a/frontend/test/metabase-lib/Question.spec.js
+++ b/frontend/test/metabase-lib/Question.spec.js
@@ -337,7 +337,7 @@ describe("Question", () => {
                     query: {
                         source_table: ORDERS_TABLE_ID,
                         aggregation: [["count"]],
-                        breakout: [["field-id", ORDERS_CREATED_DATE_FIELD_ID]]
+                        breakout: ["field-id", ORDERS_CREATED_DATE_FIELD_ID]
                     }
                 });
                 // Make sure we haven't mutated the underlying query
@@ -360,7 +360,7 @@ describe("Question", () => {
                     query: {
                         source_table: ORDERS_TABLE_ID,
                         aggregation: [["count"]],
-                        breakout: [["field-id", ORDERS_PK_FIELD_ID]]
+                        breakout: ["field-id", ORDERS_PK_FIELD_ID]
                     }
                 });
                 // Make sure we haven't mutated the underlying query
diff --git a/frontend/test/modes/actions/CompoundQueryAction.spec.js b/frontend/test/modes/actions/CompoundQueryAction.spec.js
new file mode 100644
index 0000000000000000000000000000000000000000..36a6365c66e6b98e5f01325acdf9a1a732173488
--- /dev/null
+++ b/frontend/test/modes/actions/CompoundQueryAction.spec.js
@@ -0,0 +1,43 @@
+/* eslint-disable flowtype/require-valid-file-annotation */
+
+import CompoundQueryAction from "../../../src/metabase/qb/components/actions/CompoundQueryAction";
+
+import Question from "metabase-lib/lib/Question";
+
+import {
+    native_orders_count_card,
+    orders_count_card,
+    unsaved_native_orders_count_card,
+    metadata
+} from "__support__/sample_dataset_fixture";
+
+describe("CompoundQueryAction", () => {
+    it("should not suggest a compount query for an unsaved native query", () => {
+        const question = new Question(
+            metadata,
+            unsaved_native_orders_count_card
+        );
+        expect(CompoundQueryAction({ question })).toHaveLength(0);
+    });
+    it("should suggest a compound query for a mbql query", () => {
+        const question = new Question(metadata, orders_count_card);
+
+        const actions = CompoundQueryAction({ question });
+        expect(actions).toHaveLength(1);
+        const newCard = actions[0].question().card();
+        expect(newCard.dataset_query.query).toEqual({
+            source_table: "card__2"
+        });
+    });
+
+    it("should return a nested query for a saved native card", () => {
+        const question = new Question(metadata, native_orders_count_card);
+
+        const actions = CompoundQueryAction({ question });
+        expect(actions).toHaveLength(1);
+        const newCard = actions[0].question().card();
+        expect(newCard.dataset_query.query).toEqual({
+            source_table: "card__3"
+        });
+    });
+});
diff --git a/frontend/test/modes/actions/SummarizeBySegmentMetricAction.unit.spec.js b/frontend/test/modes/actions/SummarizeBySegmentMetricAction.unit.spec.js
index 871f456a6db2174d3b10c4bddaa87bbfa10c9af4..2298be260b0b652af9890de853f691ca4ae99d7a 100644
--- a/frontend/test/modes/actions/SummarizeBySegmentMetricAction.unit.spec.js
+++ b/frontend/test/modes/actions/SummarizeBySegmentMetricAction.unit.spec.js
@@ -16,6 +16,31 @@ const question = Question.create({
 });
 
 describe("SummarizeBySegmentMetricAction", () => {
+    describe("aggregation options", () => {
+        it("should show only a subset of all query aggregations", () => {
+            const hasAggregationOption = (popover, optionName) =>
+                popover.find(
+                    `.List-item-title[children="${optionName}"]`
+                ).length === 1;
+
+            const action = SummarizeBySegmentMetricAction({ question })[0];
+            const popover = mount(
+                action.popover({
+                    onClose: () => {},
+                    onChangeCardAndRun: () => {}
+                })
+            );
+
+            expect(hasAggregationOption(popover, "Count of rows")).toBe(true);
+            expect(hasAggregationOption(popover, "Average of ...")).toBe(true);
+            expect(hasAggregationOption(popover, "Raw data")).toBe(false);
+            expect(
+                hasAggregationOption(popover, "Cumulative count of rows")
+            ).toBe(false);
+            expect(popover.find(".List-section-title").length).toBe(0);
+        });
+    });
+
     describe("onChangeCardAndRun", async () => {
         it("should be called for 'Count of rows' choice", async () => {
             const action = SummarizeBySegmentMetricAction({ question })[0];
diff --git a/frontend/test/modes/drills/PivotByCategoryDrill.integ.spec.js b/frontend/test/modes/drills/PivotByCategoryDrill.integ.spec.js
new file mode 100644
index 0000000000000000000000000000000000000000..ac7ce1f60052095a7d71bb249fc5b161084866bc
--- /dev/null
+++ b/frontend/test/modes/drills/PivotByCategoryDrill.integ.spec.js
@@ -0,0 +1,32 @@
+/* eslint-disable flowtype/require-valid-file-annotation */
+
+import {
+    DATABASE_ID,
+    ORDERS_TABLE_ID,
+    metadata
+} from "__support__/sample_dataset_fixture";
+import Question from "metabase-lib/lib/Question";
+import { login } from "__support__/integrated_tests";
+
+describe("PivotByCategoryDrill", () => {
+    beforeAll(async () => {
+        await login();
+    });
+
+    it("should return a result for Order count pivoted by Subtotal", async () => {
+        // NOTE: Using the fixture metadata for now because trying to load the metadata involves a lot of Redux magic
+        const question = Question.create({
+            databaseId: DATABASE_ID,
+            tableId: ORDERS_TABLE_ID,
+            metadata
+        })
+            .query()
+            .addAggregation(["count"])
+            .question();
+
+        const pivotedQuestion = question.pivot([["field-id", 4]]);
+
+        const results = await pivotedQuestion.getResults();
+        expect(results[0]).toBeDefined();
+    });
+});
diff --git a/frontend/test/modes/lib/drilldown.spec.js b/frontend/test/modes/lib/drilldown.spec.js
new file mode 100644
index 0000000000000000000000000000000000000000..836950d7f3825545b347c82845a4f46168dec98c
--- /dev/null
+++ b/frontend/test/modes/lib/drilldown.spec.js
@@ -0,0 +1,208 @@
+/* eslint-disable flowtype/require-valid-file-annotation */
+
+import {
+    metadata,
+    ORDERS_CREATED_DATE_FIELD_ID,
+    ORDERS_TOTAL_FIELD_ID,
+    PEOPLE_LATITUDE_FIELD_ID,
+    PEOPLE_LONGITUDE_FIELD_ID,
+    PEOPLE_STATE_FIELD_ID
+} from "__support__/sample_dataset_fixture";
+
+import { drillDownForDimensions } from "../../../src/metabase/qb/lib/drilldown";
+
+const col = (fieldId, extra = {}) => ({
+    ...metadata.fields[fieldId],
+    ...extra
+});
+
+describe("drilldown", () => {
+    describe("drillDownForDimensions", () => {
+        it("should return null if there are no dimensions", () => {
+            const drillDown = drillDownForDimensions([], metadata);
+            expect(drillDown).toEqual(null);
+        });
+
+        // DATE/TIME:
+        it("should return breakout by quarter for breakout by year", () => {
+            const drillDown = drillDownForDimensions(
+                [
+                    {
+                        column: col(ORDERS_CREATED_DATE_FIELD_ID, {
+                            unit: "year"
+                        })
+                    }
+                ],
+                metadata
+            );
+            expect(drillDown).toEqual({
+                breakouts: [
+                    [
+                        "datetime-field",
+                        ["field-id", ORDERS_CREATED_DATE_FIELD_ID],
+                        "quarter"
+                    ]
+                ]
+            });
+        });
+        it("should return breakout by minute for breakout by hour", () => {
+            const drillDown = drillDownForDimensions(
+                [
+                    {
+                        column: col(ORDERS_CREATED_DATE_FIELD_ID, {
+                            unit: "hour"
+                        })
+                    }
+                ],
+                metadata
+            );
+            expect(drillDown).toEqual({
+                breakouts: [
+                    [
+                        "datetime-field",
+                        ["field-id", ORDERS_CREATED_DATE_FIELD_ID],
+                        "minute"
+                    ]
+                ]
+            });
+        });
+        it("should return null for breakout by minute", () => {
+            const drillDown = drillDownForDimensions(
+                [
+                    {
+                        column: col(ORDERS_CREATED_DATE_FIELD_ID, {
+                            unit: "minute"
+                        })
+                    }
+                ],
+                metadata
+            );
+            expect(drillDown).toEqual(null);
+        });
+
+        // NUMERIC:
+        it("should reset breakout to default binning for num-bins strategy", () => {
+            const drillDown = drillDownForDimensions(
+                [
+                    {
+                        column: col(ORDERS_TOTAL_FIELD_ID, {
+                            binning_info: {
+                                binning_strategy: "num-bins",
+                                num_bins: 10
+                            }
+                        })
+                    }
+                ],
+                metadata
+            );
+            expect(drillDown).toEqual({
+                breakouts: [
+                    [
+                        "binning-strategy",
+                        ["field-id", ORDERS_TOTAL_FIELD_ID],
+                        "default"
+                    ]
+                ]
+            });
+        });
+
+        it("should return breakout with bin-width of 1 for bin-width of 10", () => {
+            const drillDown = drillDownForDimensions(
+                [
+                    {
+                        column: col(ORDERS_TOTAL_FIELD_ID, {
+                            binning_info: {
+                                binning_strategy: "bin-width",
+                                bin_width: 10
+                            }
+                        })
+                    }
+                ],
+                metadata
+            );
+            expect(drillDown).toEqual({
+                breakouts: [
+                    [
+                        "binning-strategy",
+                        ["field-id", ORDERS_TOTAL_FIELD_ID],
+                        "bin-width",
+                        1
+                    ]
+                ]
+            });
+        });
+
+        // GEO:
+        it("should return breakout by lat/lon for breakout by state", () => {
+            const drillDown = drillDownForDimensions(
+                [{ column: col(PEOPLE_STATE_FIELD_ID) }],
+                metadata
+            );
+            expect(drillDown).toEqual({
+                breakouts: [
+                    [
+                        "binning-strategy",
+                        ["field-id", PEOPLE_LATITUDE_FIELD_ID],
+                        "bin-width",
+                        1
+                    ],
+                    [
+                        "binning-strategy",
+                        ["field-id", PEOPLE_LONGITUDE_FIELD_ID],
+                        "bin-width",
+                        1
+                    ]
+                ]
+            });
+        });
+        it("should return breakout with 10 degree bin-width for lat/lon breakout with 30 degree bin-width", () => {
+            const drillDown = drillDownForDimensions(
+                [
+                    {
+                        column: col(PEOPLE_LATITUDE_FIELD_ID, {
+                            binning_info: {
+                                binning_strategy: "bin-width",
+                                bin_width: 30
+                            }
+                        })
+                    },
+                    {
+                        column: col(PEOPLE_LONGITUDE_FIELD_ID, {
+                            binning_info: {
+                                binning_strategy: "bin-width",
+                                bin_width: 30
+                            }
+                        })
+                    }
+                ],
+                metadata
+            );
+            expect(drillDown).toEqual({
+                breakouts: [
+                    [
+                        "binning-strategy",
+                        ["field-id", PEOPLE_LATITUDE_FIELD_ID],
+                        "bin-width",
+                        10
+                    ],
+                    [
+                        "binning-strategy",
+                        ["field-id", PEOPLE_LONGITUDE_FIELD_ID],
+                        "bin-width",
+                        10
+                    ]
+                ]
+            });
+        });
+
+        // it("should return breakout by state for breakout by country", () => {
+        //     const drillDown = drillDownForDimensions([
+        //         { column: col(PEOPLE_STATE_FIELD_ID) }
+        //     ], metadata);
+        //     expect(drillDown).toEqual({ breakouts: [
+        //         ["binning-strategy", ["field-id", PEOPLE_LATITUDE_FIELD_ID], "bin-width", 1],
+        //         ["binning-strategy", ["field-id", PEOPLE_LONGITUDE_FIELD_ID], "bin-width", 1],
+        //     ]});
+        // })
+    });
+});
diff --git a/frontend/test/query_builder/query_builder.integ.spec.js b/frontend/test/query_builder/query_builder.integ.spec.js
index ab7cb72ffafe60b6c0ff7689b92e0bc0ec703510..9a6fa2c0a9add6a5c43a44efc7f0d1bd44f63cfa 100644
--- a/frontend/test/query_builder/query_builder.integ.spec.js
+++ b/frontend/test/query_builder/query_builder.integ.spec.js
@@ -8,18 +8,68 @@ import {
 import React from 'react';
 import QueryBuilder from "metabase/query_builder/containers/QueryBuilder";
 import { mount } from "enzyme";
+import {
+    INITIALIZE_QB,
+    QUERY_COMPLETED,
+    QUERY_ERRORED,
+    RUN_QUERY,
+    CANCEL_QUERY,
+    SET_DATASET_QUERY,
+    setQueryDatabase,
+    setQuerySourceTable,
+    setDatasetQuery,
+    NAVIGATE_TO_NEW_CARD,
+    UPDATE_URL
+} from "metabase/query_builder/actions";
+import { SET_ERROR_PAGE } from "metabase/redux/app";
+
+import QueryHeader from "metabase/query_builder/components/QueryHeader";
+import { VisualizationEmptyState } from "metabase/query_builder/components/QueryVisualization";
+import { FETCH_TABLE_METADATA } from "metabase/redux/metadata";
+import FieldList, { DimensionPicker } from "metabase/query_builder/components/FieldList";
+import FilterPopover from "metabase/query_builder/components/filters/FilterPopover";
+
+import CheckBox from "metabase/components/CheckBox";
+import FilterWidget from "metabase/query_builder/components/filters/FilterWidget";
+import FieldName from "metabase/query_builder/components/FieldName";
+import RunButton from "metabase/query_builder/components/RunButton";
+
+import VisualizationSettings from "metabase/query_builder/components/VisualizationSettings";
+import Visualization from "metabase/visualizations/components/Visualization";
+import TableSimple from "metabase/visualizations/components/TableSimple";
+
 import {
     ORDERS_TOTAL_FIELD_ID,
     unsavedOrderCountQuestion
 } from "__support__/sample_dataset_fixture";
-import { CANCEL_QUERY, INITIALIZE_QB, QUERY_COMPLETED, QUERY_ERRORED, RUN_QUERY } from "metabase/query_builder/actions";
 import VisualizationError from "metabase/query_builder/components/VisualizationError";
+import OperatorSelector from "metabase/query_builder/components/filters/OperatorSelector";
+import BreakoutWidget from "metabase/query_builder/components/BreakoutWidget";
+import { getCard, getQueryResults } from "metabase/query_builder/selectors";
+import { TestTable } from "metabase/visualizations/visualizations/Table";
+import ChartClickActions from "metabase/visualizations/components/ChartClickActions";
 
-import { VisualizationEmptyState } from "metabase/query_builder/components/QueryVisualization";
-import Visualization from "metabase/visualizations/components/Visualization";
-import RunButton from "metabase/query_builder/components/RunButton";
-import { SET_ERROR_PAGE } from "metabase/redux/app";
-import QueryHeader from "metabase/query_builder/components/QueryHeader";
+import { delay } from "metabase/lib/promise";
+
+const initQbWithDbAndTable = (dbId, tableId) => {
+    return async () => {
+        const store = await createTestStore()
+        store.pushPath("/question");
+        const qb = mount(store.connectContainer(<QueryBuilder />));
+        await store.waitForActions([INITIALIZE_QB]);
+
+        // Use Products table
+        store.dispatch(setQueryDatabase(dbId));
+        store.dispatch(setQuerySourceTable(tableId));
+        await store.waitForActions([FETCH_TABLE_METADATA]);
+        store.resetDispatchedActions();
+
+        return { store, qb }
+    }
+}
+
+const initQbWithOrdersTable = initQbWithDbAndTable(1, 1)
+const initQBWithReviewsTable = initQbWithDbAndTable(1, 4)
 
 describe("QueryBuilder", () => {
     beforeAll(async () => {
@@ -42,6 +92,41 @@ describe("QueryBuilder", () => {
         });
     });
 
+    describe("visualization settings", () => {
+        it("lets you hide a field for a raw data table", async () => {
+            const { store, qb } = await initQBWithReviewsTable();
+
+            // Run the raw data query
+            qb.find(RunButton).simulate("click");
+            await store.waitForActions([QUERY_COMPLETED]);
+
+            const vizSettings = qb.find(VisualizationSettings);
+            vizSettings.find(".Icon-gear").simulate("click");
+
+            const settingsModal = vizSettings.find(".test-modal")
+            const table = settingsModal.find(TableSimple);
+
+            expect(table.find('div[children="Created At"]').length).toBe(1);
+
+            const doneButton = settingsModal.find(".Button--primary")
+            expect(doneButton.length).toBe(1)
+
+            const fieldsToIncludeCheckboxes = settingsModal.find(CheckBox)
+            expect(fieldsToIncludeCheckboxes.length).toBe(6)
+
+            fieldsToIncludeCheckboxes.at(3).simulate("click");
+
+            expect(table.find('div[children="Created At"]').length).toBe(0);
+
+            // Save the settings
+            doneButton.simulate("click");
+            expect(vizSettings.find(".test-modal").length).toBe(0);
+
+            // Don't test the contents of actual table visualization here as react-virtualized doesn't seem to work
+            // very well together with Enzyme
+        })
+    })
+
     describe("for saved questions", async () => {
         let savedQuestion = null;
         beforeAll(async () => {
@@ -159,4 +244,484 @@ describe("QueryBuilder", () => {
             });
         });
     });
+
+    describe("editor bar", async() => {
+        describe("for filtering by Rating category field in Reviews table", () =>  {
+            let store = null;
+            let qb = null;
+            beforeAll(async () => {
+                ({ store, qb } = await initQBWithReviewsTable());
+            })
+
+            // NOTE: Sequential tests; these may fail in a cascading way but shouldn't affect other tests
+
+            it("lets you add Rating field as a filter", async () => {
+                // TODO Atte Keinänen 7/13/17: Extracting GuiQueryEditor's contents to smaller React components
+                // would make testing with selectors more natural
+                const filterSection = qb.find('.GuiBuilder-filtered-by');
+                const addFilterButton = filterSection.find('.AddButton');
+                addFilterButton.simulate("click");
+
+                const filterPopover = filterSection.find(FilterPopover);
+
+                const ratingFieldButton = filterPopover.find(FieldList).find('h4[children="Rating"]')
+                expect(ratingFieldButton.length).toBe(1);
+                ratingFieldButton.simulate('click');
+            })
+
+            it("lets you see its field values in filter popover", () => {
+                // Same as before applies to FilterPopover too: individual list items could be in their own components
+                const filterPopover = qb.find(FilterPopover);
+                const fieldItems = filterPopover.find('li');
+                expect(fieldItems.length).toBe(5);
+
+                // should be in alphabetical order
+                expect(fieldItems.first().text()).toBe("1")
+                expect(fieldItems.last().text()).toBe("5")
+            })
+
+            it("lets you set 'Rating is 5' filter", async () => {
+                const filterPopover = qb.find(FilterPopover);
+                const fieldItems = filterPopover.find('li');
+                const widgetFieldItem = fieldItems.last();
+                const widgetCheckbox = widgetFieldItem.find(CheckBox);
+
+                expect(widgetCheckbox.props().checked).toBe(false);
+                widgetFieldItem.children().first().simulate("click");
+                expect(widgetCheckbox.props().checked).toBe(true);
+
+                const addFilterButton = filterPopover.find('button[children="Add filter"]')
+                addFilterButton.simulate("click");
+
+                await store.waitForActions([SET_DATASET_QUERY])
+                store.resetDispatchedActions();
+
+                expect(qb.find(FilterPopover).length).toBe(0);
+                const filterWidget = qb.find(FilterWidget);
+                expect(filterWidget.length).toBe(1);
+                expect(filterWidget.text()).toBe("Rating is equal to5");
+            })
+
+            it("lets you set 'Rating is 5 or 4' filter", async () => {
+                // reopen the filter popover by clicking filter widget
+                const filterWidget = qb.find(FilterWidget);
+                filterWidget.find(FieldName).simulate('click');
+
+                const filterPopover = qb.find(FilterPopover);
+                const fieldItems = filterPopover.find('li');
+                const widgetFieldItem = fieldItems.at(3);
+                const gadgetCheckbox = widgetFieldItem.find(CheckBox);
+
+                expect(gadgetCheckbox.props().checked).toBe(false);
+                widgetFieldItem.children().first().simulate("click");
+                expect(gadgetCheckbox.props().checked).toBe(true);
+
+                const addFilterButton = filterPopover.find('button[children="Update filter"]')
+                addFilterButton.simulate("click");
+
+                await store.waitForActions([SET_DATASET_QUERY])
+
+                expect(qb.find(FilterPopover).length).toBe(0);
+                expect(filterWidget.text()).toBe("Rating is equal to2 selections");
+            })
+
+            it("lets you remove the added filter", async () => {
+                const filterWidget = qb.find(FilterWidget);
+                filterWidget.find(".Icon-close").simulate('click');
+                await store.waitForActions([SET_DATASET_QUERY])
+
+                expect(qb.find(FilterWidget).length).toBe(0);
+            })
+        })
+
+        describe("for filtering by ID number field in Reviews table", () => {
+            let store = null;
+            let qb = null;
+            beforeAll(async () => {
+                ({ store, qb } = await initQBWithReviewsTable());
+            })
+
+            it("lets you add ID field as a filter", async () => {
+                const filterSection = qb.find('.GuiBuilder-filtered-by');
+                const addFilterButton = filterSection.find('.AddButton');
+                addFilterButton.simulate("click");
+
+                const filterPopover = filterSection.find(FilterPopover);
+
+                const ratingFieldButton = filterPopover.find(FieldList).find('h4[children="ID"]')
+                expect(ratingFieldButton.length).toBe(1);
+                ratingFieldButton.simulate('click');
+            })
+
+            it("lets you see a correct number of operators in filter popover", () => {
+                const filterPopover = qb.find(FilterPopover);
+
+                const operatorSelector = filterPopover.find(OperatorSelector);
+                const moreOptionsIcon = operatorSelector.find(".Icon-chevrondown");
+                moreOptionsIcon.simulate("click");
+
+                expect(operatorSelector.find("button").length).toBe(9)
+            })
+
+            it("lets you set 'ID is 10' filter", async () => {
+                const filterPopover = qb.find(FilterPopover);
+                const filterInput = filterPopover.find("textarea");
+                filterInput.simulate('change', { target: { value: "10" }})
+
+                const addFilterButton = filterPopover.find('button[children="Add filter"]')
+                addFilterButton.simulate("click");
+
+                await store.waitForActions([SET_DATASET_QUERY])
+                store.resetDispatchedActions();
+
+                expect(qb.find(FilterPopover).length).toBe(0);
+                const filterWidget = qb.find(FilterWidget);
+                expect(filterWidget.length).toBe(1);
+                expect(filterWidget.text()).toBe("ID is equal to10");
+            })
+
+            it("lets you update the filter to 'ID is 10 or 11'", async () => {
+                const filterWidget = qb.find(FilterWidget);
+                filterWidget.find(FieldName).simulate('click');
+
+                const filterPopover = qb.find(FilterPopover);
+                const filterInput = filterPopover.find("textarea");
+
+                // Intentionally use a value with lots of extra spaces
+                filterInput.simulate('change', { target: { value: "  10,      11" }})
+
+                const addFilterButton = filterPopover.find('button[children="Update filter"]')
+                addFilterButton.simulate("click");
+
+                await store.waitForActions([SET_DATASET_QUERY])
+
+                expect(qb.find(FilterPopover).length).toBe(0);
+                expect(filterWidget.text()).toBe("ID is equal to2 selections");
+            });
+
+            it("lets you update the filter to 'ID is between 1 or 100'", async () => {
+                const filterWidget = qb.find(FilterWidget);
+                filterWidget.find(FieldName).simulate('click');
+
+                const filterPopover = qb.find(FilterPopover);
+                const operatorSelector = filterPopover.find(OperatorSelector);
+                operatorSelector.find('button[children="Between"]').simulate("click");
+
+                const betweenInputs = filterPopover.find("textarea");
+                expect(betweenInputs.length).toBe(2);
+
+                expect(betweenInputs.at(0).props().value).toBe("10, 11");
+
+                betweenInputs.at(1).simulate('change', { target: { value: "asdasd" }})
+                const updateFilterButton = filterPopover.find('button[children="Update filter"]')
+                expect(updateFilterButton.props().className).toMatch(/disabled/);
+
+                betweenInputs.at(0).simulate('change', { target: { value: "1" }})
+                betweenInputs.at(1).simulate('change', { target: { value: "100" }})
+
+                updateFilterButton.simulate("click");
+
+                await store.waitForActions([SET_DATASET_QUERY])
+                expect(qb.find(FilterPopover).length).toBe(0);
+                expect(filterWidget.text()).toBe("ID between1100");
+            });
+        })
+
+        describe("for grouping by Total in Orders table", async () => {
+            let store = null;
+            let qb = null;
+            beforeAll(async () => {
+                ({ store, qb } = await initQbWithOrdersTable());
+            })
+
+            it("lets you group by Total with the default binning option", async () => {
+                const breakoutSection = qb.find('.GuiBuilder-groupedBy');
+                const addBreakoutButton = breakoutSection.find('.AddButton');
+                addBreakoutButton.simulate("click");
+
+                const breakoutPopover = breakoutSection.find("#BreakoutPopover")
+                const subtotalFieldButton = breakoutPopover.find(FieldList).find('h4[children="Total"]')
+                expect(subtotalFieldButton.length).toBe(1);
+                subtotalFieldButton.simulate('click');
+
+                await store.waitForActions([SET_DATASET_QUERY])
+
+                const breakoutWidget = qb.find(BreakoutWidget).first();
+                expect(breakoutWidget.text()).toBe("Total: Auto binned");
+            });
+            it("produces correct results for default binning option", async () => {
+                // Run the raw data query
+                qb.find(RunButton).simulate("click");
+                await store.waitForActions([QUERY_COMPLETED]);
+
+                // We can use the visible row count as we have a low number of result rows
+                expect(qb.find(".ShownRowCount").text()).toBe("Showing 6 rows");
+
+                // Get the binning
+                const results = getQueryResults(store.getState())[0]
+                const breakoutBinningInfo = results.data.cols[0].binning_info;
+                expect(breakoutBinningInfo.binning_strategy).toBe("num-bins");
+                expect(breakoutBinningInfo.bin_width).toBe(20);
+                expect(breakoutBinningInfo.num_bins).toBe(8);
+            })
+            it("lets you change the binning strategy to 100 bins", async () => {
+                const breakoutWidget = qb.find(BreakoutWidget).first();
+                breakoutWidget.find(FieldName).children().first().simulate("click")
+                const breakoutPopover = qb.find("#BreakoutPopover")
+
+                const subtotalFieldButton = breakoutPopover.find(FieldList).find('.List-item--selected h4[children="Auto binned"]')
+                expect(subtotalFieldButton.length).toBe(1);
+                subtotalFieldButton.simulate('click');
+
+                qb.find(DimensionPicker).find('a[children="100 bins"]').simulate("click");
+
+                await store.waitForActions([SET_DATASET_QUERY])
+                expect(breakoutWidget.text()).toBe("Total: 100 bins");
+            });
+            it("produces correct results for 100 bins", async () => {
+                store.resetDispatchedActions();
+                qb.find(RunButton).simulate("click");
+                await store.waitForActions([QUERY_COMPLETED]);
+
+                expect(qb.find(".ShownRowCount").text()).toBe("Showing 95 rows");
+                const results = getQueryResults(store.getState())[0]
+                const breakoutBinningInfo = results.data.cols[0].binning_info;
+                expect(breakoutBinningInfo.binning_strategy).toBe("num-bins");
+                expect(breakoutBinningInfo.bin_width).toBe(1);
+                expect(breakoutBinningInfo.num_bins).toBe(100);
+            })
+            it("lets you disable the binning", async () => {
+                const breakoutWidget = qb.find(BreakoutWidget).first();
+                breakoutWidget.find(FieldName).children().first().simulate("click")
+                const breakoutPopover = qb.find("#BreakoutPopover")
+
+                const subtotalFieldButton = breakoutPopover.find(FieldList).find('.List-item--selected h4[children="100 bins"]')
+                expect(subtotalFieldButton.length).toBe(1);
+                subtotalFieldButton.simulate('click');
+
+                qb.find(DimensionPicker).find('a[children="Don\'t bin"]').simulate("click");
+            });
+            it("produces the expected count of rows when no binning", async () => {
+                store.resetDispatchedActions();
+                qb.find(RunButton).simulate("click");
+                await store.waitForActions([QUERY_COMPLETED]);
+
+                // We just want to see that there are a lot more rows than there would be if a binning was active
+                expect(qb.find(".ShownRowCount").text()).toBe("Showing first 2,000 rows");
+
+                const results = getQueryResults(store.getState())[0]
+                expect(results.data.cols[0].binning_info).toBe(undefined);
+            });
+        })
+
+        describe("for grouping by Latitude location field through Users FK in Orders table", async () => {
+            let store = null;
+            let qb = null;
+            beforeAll(async () => {
+                ({ store, qb } = await initQbWithOrdersTable());
+            })
+
+            it("lets you group by Latitude with the default binning option", async () => {
+                const breakoutSection = qb.find('.GuiBuilder-groupedBy');
+                const addBreakoutButton = breakoutSection.find('.AddButton');
+                addBreakoutButton.simulate("click");
+
+                const breakoutPopover = breakoutSection.find("#BreakoutPopover")
+
+                const userSectionButton = breakoutPopover.find(FieldList).find('h3[children="User"]')
+                expect(userSectionButton.length).toBe(1);
+                userSectionButton.simulate("click");
+
+                const subtotalFieldButton = breakoutPopover.find(FieldList).find('h4[children="Latitude"]')
+                expect(subtotalFieldButton.length).toBe(1);
+                subtotalFieldButton.simulate('click');
+
+                await store.waitForActions([SET_DATASET_QUERY])
+
+                const breakoutWidget = qb.find(BreakoutWidget).first();
+                expect(breakoutWidget.text()).toBe("Latitude: Auto binned");
+            });
+
+            it("produces correct results for default binning option", async () => {
+                // Run the raw data query
+                qb.find(RunButton).simulate("click");
+                await store.waitForActions([QUERY_COMPLETED]);
+
+                expect(qb.find(".ShownRowCount").text()).toBe("Showing 18 rows");
+
+                const results = getQueryResults(store.getState())[0]
+                const breakoutBinningInfo = results.data.cols[0].binning_info;
+                expect(breakoutBinningInfo.binning_strategy).toBe("bin-width");
+                expect(breakoutBinningInfo.bin_width).toBe(10);
+                expect(breakoutBinningInfo.num_bins).toBe(18);
+            })
+
+            it("lets you group by Latitude with the 'Bin every 1 degree'", async () => {
+                const breakoutWidget = qb.find(BreakoutWidget).first();
+                breakoutWidget.find(FieldName).children().first().simulate("click")
+                const breakoutPopover = qb.find("#BreakoutPopover")
+
+                const subtotalFieldButton = breakoutPopover.find(FieldList).find('.List-item--selected h4[children="Auto binned"]')
+                expect(subtotalFieldButton.length).toBe(1);
+                subtotalFieldButton.simulate('click');
+
+                qb.find(DimensionPicker).find('a[children="Bin every 1 degree"]').simulate("click");
+
+                await store.waitForActions([SET_DATASET_QUERY])
+                expect(breakoutWidget.text()).toBe("Latitude: 1°");
+            });
+            it("produces correct results for 'Bin every 1 degree'", async () => {
+                // Run the raw data query
+                store.resetDispatchedActions();
+                qb.find(RunButton).simulate("click");
+                await store.waitForActions([QUERY_COMPLETED]);
+
+                expect(qb.find(".ShownRowCount").text()).toBe("Showing 180 rows");
+
+                const results = getQueryResults(store.getState())[0]
+                const breakoutBinningInfo = results.data.cols[0].binning_info;
+                expect(breakoutBinningInfo.binning_strategy).toBe("bin-width");
+                expect(breakoutBinningInfo.bin_width).toBe(1);
+                expect(breakoutBinningInfo.num_bins).toBe(180);
+            })
+        });
+    })
+
+    describe("drill-through", () => {
+        describe("Zoom In action for broken out fields", () => {
+            it("works for Count of rows aggregation and Subtotal 50 Bins breakout", async () => {
+                const {store, qb} = await initQbWithOrdersTable();
+                await store.dispatch(setDatasetQuery({
+                    database: 1,
+                    type: 'query',
+                    query: {
+                        source_table: 1,
+                        breakout: [['binning-strategy', ['field-id', 6], 'num-bins', 50]],
+                        aggregation: [['count']]
+                    }
+                }));
+
+
+                qb.find(RunButton).simulate("click");
+                await store.waitForActions([QUERY_COMPLETED]);
+
+                const table = qb.find(TestTable);
+                const firstRowCells = table.find("tbody tr").first().find("td");
+                expect(firstRowCells.length).toBe(2);
+
+                expect(firstRowCells.first().text()).toBe("12  –  14");
+
+                const countCell = firstRowCells.last();
+                expect(countCell.text()).toBe("387");
+                countCell.children().first().simulate("click");
+
+                // Drill-through is delayed in handleVisualizationClick of Visualization.jsx by 100ms
+                await delay(150);
+
+                store.resetDispatchedActions();
+                qb.find(ChartClickActions).find('div[children="Zoom in"]').simulate("click");
+
+                store.waitForActions([NAVIGATE_TO_NEW_CARD, UPDATE_URL, QUERY_COMPLETED]);
+
+                // Should reset to auto binning
+                const breakoutWidget = qb.find(BreakoutWidget).first();
+                expect(breakoutWidget.text()).toBe("Total: Auto binned");
+
+                // Expecting to see the correct lineage (just a simple sanity check)
+                const title = qb.find(QueryHeader).find("h1")
+                expect(title.text()).toBe("New question")
+            })
+
+            it("works for Count of rows aggregation and FK State breakout", async () => {
+                const {store, qb} = await initQbWithOrdersTable();
+                await store.dispatch(setDatasetQuery({
+                    database: 1,
+                    type: 'query',
+                    query: {
+                        source_table: 1,
+                        breakout: [['fk->', 7, 19]],
+                        aggregation: [['count']]
+                    }
+                }));
+
+                qb.find(RunButton).simulate("click");
+                await store.waitForActions([QUERY_COMPLETED]);
+
+                const table = qb.find(TestTable);
+                const firstRowCells = table.find("tbody tr").first().find("td");
+                expect(firstRowCells.length).toBe(2);
+
+                expect(firstRowCells.first().text()).toBe("AA");
+
+                const countCell = firstRowCells.last();
+                expect(countCell.text()).toBe("417");
+                countCell.children().first().simulate("click");
+
+                // Drill-through is delayed in handleVisualizationClick of Visualization.jsx by 100ms
+                await delay(150);
+
+                store.resetDispatchedActions();
+                qb.find(ChartClickActions).find('div[children="Zoom in"]').simulate("click");
+
+                store.waitForActions([NAVIGATE_TO_NEW_CARD, UPDATE_URL, QUERY_COMPLETED]);
+
+                // Should reset to auto binning
+                const breakoutWidgets = qb.find(BreakoutWidget);
+                expect(breakoutWidgets.length).toBe(3);
+                expect(breakoutWidgets.at(0).text()).toBe("Latitude: 1°");
+                expect(breakoutWidgets.at(1).text()).toBe("Longitude: 1°");
+
+                // Should have visualization type set to Pin map (temporary workaround until we have polished heat maps)
+                const card = getCard(store.getState())
+                expect(card.display).toBe("map");
+                expect(card.visualization_settings).toEqual({ "map.type": "pin" });
+            });
+
+            it("works for Count of rows aggregation and FK Latitude Auto binned breakout", async () => {
+                const {store, qb} = await initQbWithOrdersTable();
+                await store.dispatch(setDatasetQuery({
+                    database: 1,
+                    type: 'query',
+                    query: {
+                        source_table: 1,
+                        breakout: [["binning-strategy", ['fk->', 7, 14], "default"]],
+                        aggregation: [['count']]
+                    }
+                }));
+
+                qb.find(RunButton).simulate("click");
+                await store.waitForActions([QUERY_COMPLETED]);
+
+                const table = qb.find(TestTable);
+                const firstRowCells = table.find("tbody tr").first().find("td");
+                expect(firstRowCells.length).toBe(2);
+
+                // lat-long formatting should be improved when it comes to trailing zeros
+                expect(firstRowCells.first().text()).toBe("90° S  –  80° S");
+
+                const countCell = firstRowCells.last();
+                expect(countCell.text()).toBe("1,079");
+                countCell.children().first().simulate("click");
+
+                // Drill-through is delayed in handleVisualizationClick of Visualization.jsx by 100ms
+                await delay(150);
+
+                store.resetDispatchedActions();
+                qb.find(ChartClickActions).find('div[children="Zoom in"]').simulate("click");
+
+                store.waitForActions([NAVIGATE_TO_NEW_CARD, UPDATE_URL, QUERY_COMPLETED]);
+
+                // Should reset to auto binning
+                const breakoutWidgets = qb.find(BreakoutWidget);
+                expect(breakoutWidgets.length).toBe(2);
+
+                // Default location binning strategy currently has a bin width of 10° so
+                expect(breakoutWidgets.at(0).text()).toBe("Latitude: 1°");
+
+                // Should have visualization type set to the previous visualization
+                const card = getCard(store.getState())
+                expect(card.display).toBe("bar");
+            });
+        })
+    })
 });
diff --git a/frontend/test/reference/databases.integ.spec.js b/frontend/test/reference/databases.integ.spec.js
index ec5215fd528498d3e101475a74c554ac9be99264..3862934eb2563335662c80b9c271fde80c6d000d 100644
--- a/frontend/test/reference/databases.integ.spec.js
+++ b/frontend/test/reference/databases.integ.spec.js
@@ -1,6 +1,7 @@
 import {
     login,
-    createTestStore
+    createTestStore,
+    clickRouterLink
 } from "__support__/integrated_tests";
 
 import React from 'react';
@@ -10,7 +11,7 @@ import { CardApi } from 'metabase/services'
 
 import { 
     FETCH_DATABASE_METADATA,
-    FETCH_DATABASES
+    FETCH_REAL_DATABASES
 } from "metabase/redux/metadata";
 
 import { END_LOADING } from "metabase/reference/reference"
@@ -28,6 +29,10 @@ import List from "metabase/components/List.jsx";
 import ListItem from "metabase/components/ListItem.jsx";
 import ReferenceHeader from "metabase/reference/components/ReferenceHeader.jsx";
 import AdminAwareEmptyState from "metabase/components/AdminAwareEmptyState.jsx";
+import UsefulQuestions from "metabase/reference/components/UsefulQuestions";
+import QueryButton from "metabase/components/QueryButton";
+import { INITIALIZE_QB, QUERY_COMPLETED } from "metabase/query_builder/actions";
+import { getQuestion } from "metabase/query_builder/selectors";
 
 describe("The Reference Section", () => {
     // Test data
@@ -48,7 +53,7 @@ describe("The Reference Section", () => {
             const store = await createTestStore()
             store.pushPath("/reference/databases/");
             var container = mount(store.connectContainer(<DatabaseListContainer />));
-            await store.waitForActions([FETCH_DATABASES, END_LOADING])
+            await store.waitForActions([FETCH_REAL_DATABASES, END_LOADING])
             
             expect(container.find(ReferenceHeader).length).toBe(1)
             expect(container.find(DatabaseList).length).toBe(1)            
@@ -58,12 +63,34 @@ describe("The Reference Section", () => {
             expect(container.find(ListItem).length).toBeGreaterThanOrEqual(1)
         })
         
+        // database list
+        it("should not see saved questions in the database list", async () => {
+            var card = await CardApi.create(cardDef)
+            const store = await createTestStore()
+            store.pushPath("/reference/databases/");
+            var container = mount(store.connectContainer(<DatabaseListContainer />));
+            await store.waitForActions([FETCH_REAL_DATABASES, END_LOADING])
+            
+            expect(container.find(ReferenceHeader).length).toBe(1)
+            expect(container.find(DatabaseList).length).toBe(1)            
+            expect(container.find(AdminAwareEmptyState).length).toBe(0)
+            
+            expect(container.find(List).length).toBe(1)
+            expect(container.find(ListItem).length).toBe(1)
+
+
+            expect(card.name).toBe(cardDef.name);
+            
+            await CardApi.delete({cardId: card.id})
+
+        })
+        
         // database detail
         it("should see a the detail view for the sample database", async ()=>{
             const store = await createTestStore()
             store.pushPath("/reference/databases/1");
             mount(store.connectContainer(<DatabaseDetailContainer />));
-            await store.waitForActions([FETCH_DATABASE_METADATA])
+            await store.waitForActions([FETCH_DATABASE_METADATA, END_LOADING])
 
         })
         
@@ -72,7 +99,7 @@ describe("The Reference Section", () => {
             const store = await createTestStore()
             store.pushPath("/reference/databases/1/tables");
             mount(store.connectContainer(<TableListContainer />));
-            await store.waitForActions([FETCH_DATABASE_METADATA])
+            await store.waitForActions([FETCH_DATABASE_METADATA, END_LOADING])
         })
         // table detail
 
@@ -80,33 +107,33 @@ describe("The Reference Section", () => {
             const store = await createTestStore()
             store.pushPath("/reference/databases/1/tables/1");
             mount(store.connectContainer(<TableDetailContainer />));
-            await store.waitForActions([FETCH_DATABASE_METADATA])
+            await store.waitForActions([FETCH_DATABASE_METADATA, END_LOADING])
         })
 
        it("should see the Reviews table", async  () => {
             const store = await createTestStore()
             store.pushPath("/reference/databases/1/tables/2");
             mount(store.connectContainer(<TableDetailContainer />));
-            await store.waitForActions([FETCH_DATABASE_METADATA])
+            await store.waitForActions([FETCH_DATABASE_METADATA, END_LOADING])
         })
        it("should see the Products table", async  () => {
             const store = await createTestStore()
             store.pushPath("/reference/databases/1/tables/3");
             mount(store.connectContainer(<TableDetailContainer />));
-            await store.waitForActions([FETCH_DATABASE_METADATA])
+            await store.waitForActions([FETCH_DATABASE_METADATA, END_LOADING])
         })
        it("should see the People table", async  () => {
             const store = await createTestStore()
             store.pushPath("/reference/databases/1/tables/4");
             mount(store.connectContainer(<TableDetailContainer />));
-            await store.waitForActions([FETCH_DATABASE_METADATA])
+            await store.waitForActions([FETCH_DATABASE_METADATA, END_LOADING])
         })
         // field list
        it("should see the fields for the orders table", async  () => {
             const store = await createTestStore()
             store.pushPath("/reference/databases/1/tables/1/fields");
             mount(store.connectContainer(<FieldListContainer />));
-            await store.waitForActions([FETCH_DATABASE_METADATA])
+            await store.waitForActions([FETCH_DATABASE_METADATA, END_LOADING])
 
         })
        it("should see the questions for the orders tables", async  () => {
@@ -114,7 +141,7 @@ describe("The Reference Section", () => {
             const store = await createTestStore()
             store.pushPath("/reference/databases/1/tables/1/questions");
             mount(store.connectContainer(<TableQuestionsContainer />));
-            await store.waitForActions([FETCH_DATABASE_METADATA])
+            await store.waitForActions([FETCH_DATABASE_METADATA, END_LOADING])
             
             var card = await CardApi.create(cardDef)
 
@@ -129,16 +156,36 @@ describe("The Reference Section", () => {
             const store = await createTestStore()
             store.pushPath("/reference/databases/1/tables/1/fields/1");
             mount(store.connectContainer(<FieldDetailContainer />));
-            await store.waitForActions([FETCH_DATABASE_METADATA])
+            await store.waitForActions([FETCH_DATABASE_METADATA, END_LOADING])
+        })
+
+        it("should let you open a potentially useful question for created_at field without errors", async () => {
+            const store = await createTestStore()
+            store.pushPath("/reference/databases/1/tables/1/fields/1");
+
+            const app = mount(store.getAppContainer());
+
+            await store.waitForActions([FETCH_DATABASE_METADATA, END_LOADING])
+            const fieldDetails = app.find(FieldDetailContainer);
+            expect(fieldDetails.length).toBe(1);
+
+            const usefulQuestionLink = fieldDetails.find(UsefulQuestions).find(QueryButton).first().find("a");
+            expect(usefulQuestionLink.text()).toBe("Number of Orders grouped by Created At")
+            clickRouterLink(usefulQuestionLink);
+
+            await store.waitForActions([INITIALIZE_QB, QUERY_COMPLETED]);
+
+            const qbQuery = getQuestion(store.getState()).query();
+
+            // the granularity/subdimension should be applied correctly to the breakout
+            expect(qbQuery.breakouts()).toEqual([["datetime-field", ["field-id", 1], "day"]]);
         })
 
        it("should see the orders id field", async () => {
             const store = await createTestStore()
             store.pushPath("/reference/databases/1/tables/1/fields/25");
             mount(store.connectContainer(<FieldDetailContainer />));
-            await store.waitForActions([FETCH_DATABASE_METADATA])
+            await store.waitForActions([FETCH_DATABASE_METADATA, END_LOADING])
         })
     });
-
-
 });
\ No newline at end of file
diff --git a/frontend/test/reference/metrics.integ.spec.js b/frontend/test/reference/metrics.integ.spec.js
index a257cc4a346ce97c873e1d3ae6fe10cec9f1c642..417e7ae33e5fcf82625994c9a44ea743e6e69397 100644
--- a/frontend/test/reference/metrics.integ.spec.js
+++ b/frontend/test/reference/metrics.integ.spec.js
@@ -105,15 +105,19 @@ describe("The Reference Section", () => {
 
             it("Should see a newly asked question in its questions list", async () => {
                     var card = await CardApi.create(metricCardDef)
-
                     expect(card.name).toBe(metricCardDef.name);
-                    // see that there is a new question on the metric's questions page
-                    const store = await createTestStore()    
-                    store.pushPath("/reference/metrics/"+metricIds[0]+'/questions');
-                    mount(store.connectContainer(<MetricQuestionsContainer />));
-                    await store.waitForActions([FETCH_METRICS, FETCH_METRIC_TABLE])
-                    
-                    await CardApi.delete({cardId: card.id})
+
+                    try {
+                        // see that there is a new question on the metric's questions page
+                        const store = await createTestStore()
+
+                        store.pushPath("/reference/metrics/"+metricIds[0]+'/questions');
+                        mount(store.connectContainer(<MetricQuestionsContainer />));
+                        await store.waitForActions([FETCH_METRICS, FETCH_METRIC_TABLE])
+                    } finally {
+                        // even if the code above results in an exception, try to delete the question
+                        await CardApi.delete({cardId: card.id})
+                    }
             })
 
                        
diff --git a/frontend/test/visualizations/components/LineAreaBarChart.spec.js b/frontend/test/visualizations/components/LineAreaBarChart.spec.js
new file mode 100644
index 0000000000000000000000000000000000000000..61e2e4b5f1ee1d1b160970729b6bee94a1794f64
--- /dev/null
+++ b/frontend/test/visualizations/components/LineAreaBarChart.spec.js
@@ -0,0 +1,625 @@
+// TODO: To be replaced by an integrated test which doesn't require hardcoding the card objects
+
+// HACK: Needed because of conflicts caused by circular dependencies
+import "metabase/visualizations/components/Visualization";
+
+import LineAreaBarChart from "metabase/visualizations/components/LineAreaBarChart"
+
+const millisecondCard = {
+    "card": {
+        "description": null,
+        "archived": false,
+        "table_id": 1784,
+        "result_metadata": [
+            {
+                "base_type": "type/BigInteger",
+                "display_name": "Timestamp",
+                "name": "timestamp",
+                "special_type": "type/UNIXTimestampMilliseconds",
+                "unit": "week"
+            },
+            {
+                "base_type": "type/Integer",
+                "display_name": "count",
+                "name": "count",
+                "special_type": "type/Number"
+            }
+        ],
+        "creator": {
+            "email": "atte@metabase.com",
+            "first_name": "Atte",
+            "last_login": "2017-07-21T17:51:23.181Z",
+            "is_qbnewb": false,
+            "is_superuser": true,
+            "id": 1,
+            "last_name": "Keinänen",
+            "date_joined": "2017-03-17T03:37:27.396Z",
+            "common_name": "Atte Keinänen"
+        },
+        "database_id": 5,
+        "enable_embedding": false,
+        "collection_id": null,
+        "query_type": "query",
+        "name": "Toucan Incidents",
+        "query_average_duration": 501,
+        "creator_id": 1,
+        "updated_at": "2017-07-24T22:15:33.343Z",
+        "made_public_by_id": null,
+        "embedding_params": null,
+        "cache_ttl": null,
+        "dataset_query": {
+            "database": 5,
+            "type": "query",
+            "query": {
+                "source_table": 1784,
+                "aggregation": [
+                    [
+                        "count"
+                    ]
+                ],
+                "breakout": [
+                    [
+                        "datetime-field",
+                        [
+                            "field-id",
+                            8159
+                        ],
+                        "week"
+                    ]
+                ]
+            }
+        },
+        "id": 83,
+        "display": "line",
+        "visualization_settings": {
+            "graph.dimensions": [
+                "timestamp"
+            ],
+            "graph.metrics": [
+                "severity"
+            ]
+        },
+        "created_at": "2017-07-21T19:40:40.102Z",
+        "public_uuid": null
+    },
+    "data": {
+        "rows": [
+            [
+                "2015-05-31T00:00:00.000-07:00",
+                46
+            ],
+            [
+                "2015-06-07T00:00:00.000-07:00",
+                47
+            ],
+            [
+                "2015-06-14T00:00:00.000-07:00",
+                40
+            ],
+            [
+                "2015-06-21T00:00:00.000-07:00",
+                60
+            ],
+            [
+                "2015-06-28T00:00:00.000-07:00",
+                7
+            ]
+        ],
+        "columns": [
+            "timestamp",
+            "count"
+        ],
+        "native_form": {
+            "query": "SELECT count(*) AS \"count\", (date_trunc('week', CAST((CAST((TIMESTAMP '1970-01-01T00:00:00Z' + ((\"schema_126\".\"sad_toucan_incidents_incidents\".\"timestamp\" / 1000) * INTERVAL '1 second')) AS timestamp) + INTERVAL '1 day') AS timestamp)) - INTERVAL '1 day') AS \"timestamp\" FROM \"schema_126\".\"sad_toucan_incidents_incidents\" GROUP BY (date_trunc('week', CAST((CAST((TIMESTAMP '1970-01-01T00:00:00Z' + ((\"schema_126\".\"sad_toucan_incidents_incidents\".\"timestamp\" / 1000) * INTERVAL '1 second')) AS timestamp) + INTERVAL '1 day') AS timestamp)) - INTERVAL '1 day') ORDER BY (date_trunc('week', CAST((CAST((TIMESTAMP '1970-01-01T00:00:00Z' + ((\"schema_126\".\"sad_toucan_incidents_incidents\".\"timestamp\" / 1000) * INTERVAL '1 second')) AS timestamp) + INTERVAL '1 day') AS timestamp)) - INTERVAL '1 day') ASC",
+            "params": null
+        },
+        "cols": [
+            {
+                "description": null,
+                "table_id": 1784,
+                "schema_name": "schema_126",
+                "special_type": "type/UNIXTimestampMilliseconds",
+                "unit": "week",
+                "name": "timestamp",
+                "source": "breakout",
+                "remapped_from": null,
+                "extra_info": {},
+                "fk_field_id": null,
+                "remapped_to": null,
+                "id": 8159,
+                "visibility_type": "normal",
+                "target": null,
+                "display_name": "Timestamp",
+                "base_type": "type/BigInteger"
+            },
+            {
+                "description": null,
+                "table_id": null,
+                "special_type": "type/Number",
+                "name": "count",
+                "source": "aggregation",
+                "remapped_from": null,
+                "extra_info": {},
+                "remapped_to": null,
+                "id": null,
+                "target": null,
+                "display_name": "count",
+                "base_type": "type/Integer"
+            }
+        ],
+        "results_metadata": {
+            "checksum": "H2XV8wuuBkFrxukvDt+Ehw==",
+            "columns": [
+                {
+                    "base_type": "type/BigInteger",
+                    "display_name": "Timestamp",
+                    "name": "timestamp",
+                    "special_type": "type/UNIXTimestampMilliseconds",
+                    "unit": "week"
+                },
+                {
+                    "base_type": "type/Integer",
+                    "display_name": "count",
+                    "name": "count",
+                    "special_type": "type/Number"
+                }
+            ]
+        }
+    }
+};
+
+const dateTimeCard = {
+    "card": {
+        "description": null,
+        "archived": false,
+        "table_id": 1,
+        "result_metadata": [
+            {
+                "base_type": "type/DateTime",
+                "display_name": "Created At",
+                "name": "CREATED_AT",
+                "description": "The date and time an order was submitted.",
+                "unit": "month"
+            },
+            {
+                "base_type": "type/Float",
+                "display_name": "sum",
+                "name": "sum",
+                "special_type": "type/Number"
+            }
+        ],
+        "creator": {
+            "email": "atte@metabase.com",
+            "first_name": "Atte",
+            "last_login": "2017-07-21T17:51:23.181Z",
+            "is_qbnewb": false,
+            "is_superuser": true,
+            "id": 1,
+            "last_name": "Keinänen",
+            "date_joined": "2017-03-17T03:37:27.396Z",
+            "common_name": "Atte Keinänen"
+        },
+        "database_id": 1,
+        "enable_embedding": false,
+        "collection_id": null,
+        "query_type": "query",
+        "name": "Orders over time",
+        "query_average_duration": 798,
+        "creator_id": 1,
+        "updated_at": "2017-07-24T22:15:33.603Z",
+        "made_public_by_id": null,
+        "embedding_params": null,
+        "cache_ttl": null,
+        "dataset_query": {
+            "database": 1,
+            "type": "query",
+            "query": {
+                "source_table": 1,
+                "aggregation": [
+                    [
+                        "sum",
+                        [
+                            "field-id",
+                            4
+                        ]
+                    ]
+                ],
+                "breakout": [
+                    [
+                        "datetime-field",
+                        [
+                            "field-id",
+                            1
+                        ],
+                        "month"
+                    ]
+                ]
+            }
+        },
+        "id": 25,
+        "display": "line",
+        "visualization_settings": {
+            "graph.colors": [
+                "#F1B556",
+                "#9cc177",
+                "#a989c5",
+                "#ef8c8c",
+                "#f9d45c",
+                "#F1B556",
+                "#A6E7F3",
+                "#7172AD",
+                "#7B8797",
+                "#6450e3",
+                "#55e350",
+                "#e35850",
+                "#77c183",
+                "#7d77c1",
+                "#c589b9",
+                "#bec589",
+                "#89c3c5",
+                "#c17777",
+                "#899bc5",
+                "#efce8c",
+                "#50e3ae",
+                "#be8cef",
+                "#8cefc6",
+                "#ef8cde",
+                "#b5f95c",
+                "#5cc2f9",
+                "#f95cd0",
+                "#c1a877",
+                "#f95c67"
+            ]
+        },
+        "created_at": "2017-04-13T21:47:08.360Z",
+        "public_uuid": null
+    },
+    "data": {
+        "rows": [
+            [
+                "2015-09-01T00:00:00.000-07:00",
+                533.45
+            ],
+            [
+                "2015-10-01T00:00:00.000-07:00",
+                4130.049999999998
+            ],
+            [
+                "2015-11-01T00:00:00.000-07:00",
+                6786.2599999999975
+            ],
+            [
+                "2015-12-01T00:00:00.000-08:00",
+                12494.039999999994
+            ],
+            [
+                "2016-01-01T00:00:00.000-08:00",
+                13594.169999999995
+            ],
+            [
+                "2016-02-01T00:00:00.000-08:00",
+                16607.429999999997
+            ],
+            [
+                "2016-03-01T00:00:00.000-08:00",
+                23600.45000000002
+            ],
+            [
+                "2016-04-01T00:00:00.000-07:00",
+                24051.120000000024
+            ],
+            [
+                "2016-05-01T00:00:00.000-07:00",
+                30163.87000000002
+            ],
+            [
+                "2016-06-01T00:00:00.000-07:00",
+                30547.53000000002
+            ],
+            [
+                "2016-07-01T00:00:00.000-07:00",
+                35808.49000000004
+            ],
+            [
+                "2016-08-01T00:00:00.000-07:00",
+                43856.760000000075
+            ],
+            [
+                "2016-09-01T00:00:00.000-07:00",
+                42831.96000000008
+            ],
+            [
+                "2016-10-01T00:00:00.000-07:00",
+                50299.75000000006
+            ],
+            [
+                "2016-11-01T00:00:00.000-07:00",
+                51861.37000000006
+            ],
+            [
+                "2016-12-01T00:00:00.000-08:00",
+                55982.590000000106
+            ],
+            [
+                "2017-01-01T00:00:00.000-08:00",
+                64462.70000000016
+            ],
+            [
+                "2017-02-01T00:00:00.000-08:00",
+                58228.17000000016
+            ],
+            [
+                "2017-03-01T00:00:00.000-08:00",
+                65618.70000000017
+            ],
+            [
+                "2017-04-01T00:00:00.000-07:00",
+                66682.43000000018
+            ],
+            [
+                "2017-05-01T00:00:00.000-07:00",
+                71817.04000000012
+            ],
+            [
+                "2017-06-01T00:00:00.000-07:00",
+                72691.63000000018
+            ],
+            [
+                "2017-07-01T00:00:00.000-07:00",
+                86210.1600000002
+            ],
+            [
+                "2017-08-01T00:00:00.000-07:00",
+                81121.41000000008
+            ],
+            [
+                "2017-09-01T00:00:00.000-07:00",
+                24811.320000000007
+            ]
+        ],
+        "columns": [
+            "CREATED_AT",
+            "sum"
+        ],
+        "native_form": {
+            "query": "SELECT sum(\"PUBLIC\".\"ORDERS\".\"SUBTOTAL\") AS \"sum\", parsedatetime(formatdatetime(\"PUBLIC\".\"ORDERS\".\"CREATED_AT\", 'yyyyMM'), 'yyyyMM') AS \"CREATED_AT\" FROM \"PUBLIC\".\"ORDERS\" GROUP BY parsedatetime(formatdatetime(\"PUBLIC\".\"ORDERS\".\"CREATED_AT\", 'yyyyMM'), 'yyyyMM') ORDER BY parsedatetime(formatdatetime(\"PUBLIC\".\"ORDERS\".\"CREATED_AT\", 'yyyyMM'), 'yyyyMM') ASC",
+            "params": null
+        },
+        "cols": [
+            {
+                "description": "The date and time an order was submitted.",
+                "table_id": 1,
+                "schema_name": "PUBLIC",
+                "special_type": null,
+                "unit": "month",
+                "name": "CREATED_AT",
+                "source": "breakout",
+                "remapped_from": null,
+                "extra_info": {},
+                "fk_field_id": null,
+                "remapped_to": null,
+                "id": 1,
+                "visibility_type": "normal",
+                "target": null,
+                "display_name": "Created At",
+                "base_type": "type/DateTime"
+            },
+            {
+                "description": null,
+                "table_id": null,
+                "special_type": "type/Number",
+                "name": "sum",
+                "source": "aggregation",
+                "remapped_from": null,
+                "extra_info": {},
+                "remapped_to": null,
+                "id": null,
+                "target": null,
+                "display_name": "sum",
+                "base_type": "type/Float"
+            }
+        ],
+        "results_metadata": {
+            "checksum": "XIqamTTUJ9nbWlTwKc8Bpg==",
+            "columns": [
+                {
+                    "base_type": "type/DateTime",
+                    "display_name": "Created At",
+                    "name": "CREATED_AT",
+                    "description": "The date and time an order was submitted.",
+                    "unit": "month"
+                },
+                {
+                    "base_type": "type/Float",
+                    "display_name": "sum",
+                    "name": "sum",
+                    "special_type": "type/Number"
+                }
+            ]
+        }
+    }
+};
+
+const numberCard = {
+    "card": {
+        "description": null,
+        "archived": false,
+        "labels": [],
+        "table_id": 4,
+        "result_metadata": [
+            {
+                "base_type": "type/Integer",
+                "display_name": "Ratings",
+                "name": "RATING",
+                "description": "The rating (on a scale of 1-5) the user left.",
+                "special_type": "type/Number"
+            },
+            {
+                "base_type": "type/Integer",
+                "display_name": "count",
+                "name": "count",
+                "special_type": "type/Number"
+            }
+        ],
+        "creator": {
+            "email": "atte@metabase.com",
+            "first_name": "Atte",
+            "last_login": "2017-07-21T17:51:23.181Z",
+            "is_qbnewb": false,
+            "is_superuser": true,
+            "id": 1,
+            "last_name": "Keinänen",
+            "date_joined": "2017-03-17T03:37:27.396Z",
+            "common_name": "Atte Keinänen"
+        },
+        "database_id": 1,
+        "enable_embedding": false,
+        "collection_id": 2,
+        "query_type": "query",
+        "name": "Reviews by Rating",
+        "creator_id": 1,
+        "updated_at": "2017-07-24T22:15:29.911Z",
+        "made_public_by_id": null,
+        "embedding_params": null,
+        "cache_ttl": null,
+        "dataset_query": {
+            "database": 1,
+            "type": "query",
+            "query": {
+                "source_table": 4,
+                "aggregation": [
+                    [
+                        "count"
+                    ]
+                ],
+                "breakout": [
+                    [
+                        "field-id",
+                        33
+                    ]
+                ]
+            }
+        },
+        "id": 86,
+        "display": "line",
+        "visualization_settings": {},
+        "collection": {
+            "id": 2,
+            "name": "Order Statistics",
+            "slug": "order_statistics",
+            "description": null,
+            "color": "#7B8797",
+            "archived": false
+        },
+        "favorite": false,
+        "created_at": "2017-07-24T22:15:29.911Z",
+        "public_uuid": null
+    },
+    "data": {
+        "rows": [
+            [
+                1,
+                59
+            ],
+            [
+                2,
+                77
+            ],
+            [
+                3,
+                64
+            ],
+            [
+                4,
+                550
+            ],
+            [
+                5,
+                328
+            ]
+        ],
+        "columns": [
+            "RATING",
+            "count"
+        ],
+        "native_form": {
+            "query": "SELECT count(*) AS \"count\", \"PUBLIC\".\"REVIEWS\".\"RATING\" AS \"RATING\" FROM \"PUBLIC\".\"REVIEWS\" GROUP BY \"PUBLIC\".\"REVIEWS\".\"RATING\" ORDER BY \"PUBLIC\".\"REVIEWS\".\"RATING\" ASC",
+            "params": null
+        },
+        "cols": [
+            {
+                "description": "The rating (on a scale of 1-5) the user left.",
+                "table_id": 4,
+                "schema_name": "PUBLIC",
+                "special_type": "type/Number",
+                "name": "RATING",
+                "source": "breakout",
+                "remapped_from": null,
+                "extra_info": {},
+                "fk_field_id": null,
+                "remapped_to": null,
+                "id": 33,
+                "visibility_type": "normal",
+                "target": null,
+                "display_name": "Ratings",
+                "base_type": "type/Integer"
+            },
+            {
+                "description": null,
+                "table_id": null,
+                "special_type": "type/Number",
+                "name": "count",
+                "source": "aggregation",
+                "remapped_from": null,
+                "extra_info": {},
+                "remapped_to": null,
+                "id": null,
+                "target": null,
+                "display_name": "count",
+                "base_type": "type/Integer"
+            }
+        ],
+        "results_metadata": {
+            "checksum": "jTfxUHHttR31J8lQBqJ/EA==",
+            "columns": [
+                {
+                    "base_type": "type/Integer",
+                    "display_name": "Ratings",
+                    "name": "RATING",
+                    "description": "The rating (on a scale of 1-5) the user left.",
+                    "special_type": "type/Number"
+                },
+                {
+                    "base_type": "type/Integer",
+                    "display_name": "count",
+                    "name": "count",
+                    "special_type": "type/Number"
+                }
+            ]
+        }
+    }
+}
+
+describe("LineAreaBarChart", () => {
+    it("should let you combine series with datetimes only", () => {
+        expect(LineAreaBarChart.seriesAreCompatible(dateTimeCard, dateTimeCard)).toBe(true);
+    });
+    it("should let you combine series with UNIX millisecond timestamps only", () => {
+        expect(LineAreaBarChart.seriesAreCompatible(dateTimeCard, dateTimeCard)).toBe(true);
+    });
+    it("should let you combine series with numbers only", () => {
+        expect(LineAreaBarChart.seriesAreCompatible(numberCard, numberCard)).toBe(true);
+    });
+    it("should let you combine series with UNIX millisecond timestamps and datetimes", () => {
+        expect(LineAreaBarChart.seriesAreCompatible(millisecondCard, dateTimeCard)).toBe(true);
+        expect(LineAreaBarChart.seriesAreCompatible(dateTimeCard, millisecondCard)).toBe(true);
+    })
+    it("should not let you combine series with UNIX millisecond timestamps and numbers", () => {
+        expect(LineAreaBarChart.seriesAreCompatible(numberCard, millisecondCard)).toBe(false);
+        expect(LineAreaBarChart.seriesAreCompatible(millisecondCard, numberCard)).toBe(false);
+    })
+    it("should not let you combine series with datetimes and numbers", () => {
+        expect(LineAreaBarChart.seriesAreCompatible(numberCard, dateTimeCard)).toBe(false);
+        expect(LineAreaBarChart.seriesAreCompatible(dateTimeCard, numberCard)).toBe(false);
+    })
+})
\ No newline at end of file
diff --git a/package.json b/package.json
index 1d3dd522f795ca0ee215790cdbf129cec6e7b906..eb8ac86c86cc2125611e8df0f8935e500af006eb 100644
--- a/package.json
+++ b/package.json
@@ -29,6 +29,7 @@
     "js-cookie": "^2.1.2",
     "jsrsasign": "^7.1.0",
     "leaflet": "^1.0.1",
+    "leaflet.heat": "^0.2.0",
     "leaflet-draw": "^0.4.9",
     "moment": "2.14.1",
     "node-libs-browser": "^2.0.0",
diff --git a/project.clj b/project.clj
index 504c2d9d5979520b628a92dab9694b26c470faf4..7b098407bccaac136209bea490c1be24ea4ff137 100644
--- a/project.clj
+++ b/project.clj
@@ -17,7 +17,7 @@
                  [org.clojure/core.memoize "0.5.9"]                   ; needed by core.match; has useful FIFO, LRU, etc. caching mechanisms
                  [org.clojure/data.csv "0.1.3"]                       ; CSV parsing / generation
                  [org.clojure/java.classpath "0.2.3"]                 ; examine the Java classpath from Clojure programs
-                 [org.clojure/java.jdbc "0.6.1"]                      ; basic JDBC access from Clojure
+                 [org.clojure/java.jdbc "0.7.0"]                      ; basic JDBC access from Clojure
                  [org.clojure/math.numeric-tower "0.0.4"]             ; math functions like `ceil`
                  [org.clojure/tools.logging "0.3.1"]                  ; logging framework
                  [org.clojure/tools.namespace "0.2.10"]
@@ -26,6 +26,7 @@
                                org.clojure/clojurescript]]            ; fixed length queue implementation, used in log buffering
                  [amalloy/ring-gzip-middleware "0.1.3"]               ; Ring middleware to GZIP responses if client can handle it
                  [aleph "0.4.3"]                                      ; Async HTTP library; WebSockets
+                 [bigml/histogram "4.1.3"]                            ; Streaming one-pass Histogram data structure
                  [buddy/buddy-core "1.2.0"]                           ; various cryptograhpic functions
                  [buddy/buddy-sign "1.5.0"]                           ; JSON Web Tokens; High-Level message signing library
                  [cheshire "5.7.0"]                                   ; fast JSON encoding (used by Ring JSON middleware)
@@ -58,6 +59,7 @@
                  [environ "1.1.0"]                                    ; easy environment management
                  [hiccup "1.0.5"]                                     ; HTML templating
                  [honeysql "0.8.2"]                                   ; Transform Clojure data structures to SQL
+                 [kixi/stats "0.3.8"]                                 ; Various statistic measures implemented as transducers
                  [log4j/log4j "1.2.17"                                ; logging framework
                   :exclusions [javax.mail/mail
                                javax.jms/jms
@@ -69,6 +71,7 @@
                  [net.sf.cssbox/cssbox "4.12"                         ; HTML / CSS rendering
                   :exclusions [org.slf4j/slf4j-api]]
                  [net.sourceforge.jtds/jtds "1.3.1"]                  ; Open Source SQL Server driver
+                 [com.clearspring.analytics/stream "2.9.5"]           ; Various sketching algorithms
                  [org.clojars.pntblnk/clj-ldap "0.0.12"]              ; LDAP client
                  [org.liquibase/liquibase-core "3.5.3"]               ; migration management (Java lib)
                  [org.slf4j/slf4j-log4j12 "1.7.25"]                   ; abstraction for logging frameworks -- allows end user to plug in desired logging framework at deployment time
@@ -77,10 +80,12 @@
                  [postgresql "9.3-1102.jdbc41"]                       ; Postgres driver
                  [io.crate/crate-jdbc "2.1.6"]                        ; Crate JDBC driver
                  [prismatic/schema "1.1.5"]                           ; Data schema declaration and validation library
+                 [redux "0.1.4"]                                      ; Utility functions for building and composing transducers
                  [ring/ring-core "1.6.0"]
                  [ring/ring-jetty-adapter "1.6.0"]                    ; Ring adapter using Jetty webserver (used to run a Ring server for unit tests)
                  [ring/ring-json "0.4.0"]                             ; Ring middleware for reading/writing JSON automatically
                  [stencil "0.5.0"]                                    ; Mustache templates for Clojure
+                 [tide "0.1.0-SNAPSHOT"]                              ; Various algorithms for working with timeseries
                  [toucan "1.0.3"                                      ; Model layer, hydration, and DB utilities
                   :exclusions [honeysql]]]
   :repositories [["bintray" "https://dl.bintray.com/crate/crate"]]    ; Repo for Crate JDBC driver
diff --git a/resources/frontend_client/index_template.html b/resources/frontend_client/index_template.html
index c2c0924cc3ddadc6bb3b0702e99f00e40767b781..1986da16223f528586ea445f25a4973f3de42419 100644
--- a/resources/frontend_client/index_template.html
+++ b/resources/frontend_client/index_template.html
@@ -57,7 +57,7 @@
                     document.body.appendChild(script);
                 }
                 loadScript('https://ajax.googleapis.com/ajax/libs/webfont/1.4.7/webfont.js', function () {
-                    WebFont.load({ google: { families: ["Lato:n3,n4,n9"] } });
+                    WebFont.load({ google: { families: ["Lato:n3,n4,n9,i9"] } });
                 });
                 var googleAuthClientID = window.MetabaseBootstrap.google_auth_client_id;
                 if (googleAuthClientID) {
diff --git a/resources/log4j.properties b/resources/log4j.properties
index 774e5d5886b6af9a12f9a4f4a5a131a91f0ef78f..075a52ccf78243543acee507eaa88aaabea9c9a4 100644
--- a/resources/log4j.properties
+++ b/resources/log4j.properties
@@ -22,7 +22,8 @@ log4j.logger.metabase.middleware=DEBUG
 log4j.logger.metabase.models.permissions=DEBUG
 log4j.logger.metabase.query-processor.permissions=DEBUG
 log4j.logger.metabase.query-processor=DEBUG
-log4j.logger.metabase.sync-database=DEBUG
+log4j.logger.metabase.sync=DEBUG
+log4j.logger.metabase.models.field-values=DEBUG
 log4j.logger.metabase=INFO
 # c3p0 connection pools tend to log useless warnings way too often; only log actual errors
 log4j.logger.com.mchange=ERROR
diff --git a/resources/migrations/000_migrations.yaml b/resources/migrations/000_migrations.yaml
index ce0bff1cc3e164e82fc416f1d77b7371459507e2..1927dce96d75ea0166612ce6beb37fbcd8df1d99 100644
--- a/resources/migrations/000_migrations.yaml
+++ b/resources/migrations/000_migrations.yaml
@@ -3670,3 +3670,85 @@ databaseChangeLog:
                   name: result_metadata
                   type: text
                   remarks: 'Serialized JSON containing metadata about the result columns from running the query.'
+  - changeSet:
+      id: 58
+      author: senior
+      changes:
+        - createTable:
+            tableName: dimension
+            remarks: 'Stores references to alternate views of existing fields, such as remapping an integer to a description, like an enum'
+            columns:
+              - column:
+                  name: id
+                  type: int
+                  autoIncrement: true
+                  constraints:
+                    primaryKey: true
+                    nullable: false
+              - column:
+                  name: field_id
+                  type: int
+                  remarks: 'ID of the field this dimension row applies to'
+                  constraints:
+                    deferrable: false
+                    foreignKeyName: fk_dimension_ref_field_id
+                    initiallyDeferred: false
+                    nullable: false
+                    references: metabase_field(id)
+                    deleteCascade: true
+              - column:
+                  name: name
+                  type: VARCHAR(254)
+                  remarks: 'Short description used as the display name of this new column'
+                  constraints:
+                    nullable: false
+              - column:
+                  name: type
+                  type: varchar(254)
+                  remarks: 'Either internal for a user defined remapping or external for a foreign key based remapping'
+                  constraints:
+                    nullable: false
+              - column:
+                  name: human_readable_field_id
+                  type: int
+                  remarks: 'Only used with external type remappings. Indicates which field on the FK related table to use for display'
+                  constraints:
+                    deferrable: false
+                    foreignKeyName: fk_dimension_displayfk_ref_field_id
+                    initiallyDeferred: false
+                    nullable: true
+                    references: metabase_field(id)
+                    deleteCascade: true
+              - column:
+                  name: created_at
+                  type: DATETIME
+                  remarks: 'The timestamp of when the dimension was created.'
+                  constraints:
+                    nullable: false
+              - column:
+                  name: updated_at
+                  type: DATETIME
+                  remarks: 'The timestamp of when these dimension was last updated.'
+                  constraints:
+                    nullable: false
+        - addUniqueConstraint:
+            tableName: dimension
+            columnNames: field_id, name
+            constraintName: unique_dimension_field_id_name
+        - createIndex:
+            tableName: dimension
+            indexName: idx_dimension_field_id
+            columns:
+              - column:
+                  name: field_id
+  - changeSet:
+      id: 59
+      author: camsaul
+      changes:
+        - addColumn:
+            tableName: metabase_field
+            columns:
+              - column:
+                  name: fingerprint
+                  type: text
+                  remarks: 'Serialized JSON containing non-identifying information about this Field, such as min, max, and percent JSON. Used for classification.'
diff --git a/src/metabase/api/card.clj b/src/metabase/api/card.clj
index 69b37ea253e11aaca871e5d52cc6f7bf916d9e13..413d7f3c73901c7f2d6e7998f513fbbd4ab9b1bd 100644
--- a/src/metabase/api/card.clj
+++ b/src/metabase/api/card.clj
@@ -252,7 +252,7 @@
   (when collection_id
     (api/check-403 (perms/set-has-full-permissions? @api/*current-user-permissions-set* (perms/collection-readwrite-path collection_id))))
   ;; everything is g2g, now save the card
-  (->> (db/insert! Card
+  (let [card (db/insert! Card
          :creator_id             api/*current-user-id*
          :dataset_query          dataset_query
          :description            description
@@ -260,8 +260,10 @@
          :name                   name
          :visualization_settings visualization_settings
          :collection_id          collection_id
-         :result_metadata        (result-metadata dataset_query result_metadata metadata_checksum))
-       (events/publish-event! :card-create)))
+         :result_metadata        (result-metadata dataset_query result_metadata metadata_checksum))]
+       (events/publish-event! :card-create card)
+       ;; include same information returned by GET /api/card/:id since frontend replaces the Card it currently has with returned one -- See #4283
+       (hydrate card :creator :dashboard_count :labels :can_write :collection)))
 
 
 ;;; ------------------------------------------------------------ Updating Cards ------------------------------------------------------------
diff --git a/src/metabase/api/database.clj b/src/metabase/api/database.clj
index 6de82e7a9b365cffa78122d0155f63088f75990a..0ffef4e1e77d473706e2e3c26bb15ae6b8843f63 100644
--- a/src/metabase/api/database.clj
+++ b/src/metabase/api/database.clj
@@ -12,6 +12,7 @@
             [metabase.api
              [common :as api]
              [table :as table-api]]
+            [metabase.public-settings :as public-settings]
             [metabase.models
              [card :refer [Card]]
              [database :as database :refer [Database protected-password]]
@@ -110,12 +111,13 @@
     (table-api/card->virtual-table card :include-fields? include-fields?)))
 
 (defn- saved-cards-virtual-db-metadata [& {:keys [include-fields?]}]
-  (when-let [virtual-tables (seq (cards-virtual-tables :include-fields? include-fields?))]
-    {:name               "Saved Questions"
-     :id                 database/virtual-id
-     :features           #{:basic-aggregations}
-     :tables             virtual-tables
-     :is_saved_questions true}))
+  (when (public-settings/enable-nested-queries)
+    (when-let [virtual-tables (seq (cards-virtual-tables :include-fields? include-fields?))]
+      {:name               "Saved Questions"
+       :id                 database/virtual-id
+       :features           #{:basic-aggregations}
+       :tables             virtual-tables
+       :is_saved_questions true})))
 
 ;; "Virtual" tables for saved cards simulate the db->schema->table hierarchy by doing fake-db->collection->card
 (defn- add-virtual-tables-for-saved-cards [dbs]
@@ -135,7 +137,7 @@
   [include_tables include_cards]
   {include_tables (s/maybe su/BooleanString)
    include_cards  (s/maybe su/BooleanString)}
-  (or (dbs-list include_tables include_cards)
+  (or (dbs-list (Boolean/parseBoolean include_tables) (Boolean/parseBoolean include_cards))
       []))
 
 
@@ -163,12 +165,12 @@
 (defn- db-metadata [id]
   (-> (api/read-check Database id)
       (hydrate [:tables [:fields :target :values] :segments :metrics])
-      (update :tables   (fn [tables]
-                          (for [table tables
-                                :when (mi/can-read? table)]
-                            (-> table
-                                (update :segments (partial filter mi/can-read?))
-                                (update :metrics  (partial filter mi/can-read?))))))))
+      (update :tables (fn [tables]
+                        (for [table tables
+                              :when (mi/can-read? table)]
+                          (-> table
+                              (update :segments (partial filter mi/can-read?))
+                              (update :metrics  (partial filter mi/can-read?))))))))
 
 (api/defendpoint GET "/:id/metadata"
   "Get metadata about a `Database`, including all of its `Tables` and `Fields`.
@@ -184,8 +186,7 @@
     {:where    [:and [:= :db_id db-id]
                      [:= :active true]
                      [:like :%lower.name (str (str/lower-case prefix) "%")]
-                     [:or [:= :visibility_type nil]
-                          [:not= :visibility_type "hidden"]]]
+                     [:= :visibility_type nil]]
      :order-by [[:%lower.name :asc]]}))
 
 (defn- autocomplete-fields [db-id prefix]
diff --git a/src/metabase/api/dataset.clj b/src/metabase/api/dataset.clj
index 22e476c51aac6a9c5ca20aeb0c85e364ff1a5106..27c8c73b7e4ad19033af5408d0e58412a14d4bf8 100644
--- a/src/metabase/api/dataset.clj
+++ b/src/metabase/api/dataset.clj
@@ -19,7 +19,8 @@
             [metabase.query-processor.util :as qputil]
             [metabase.util.schema :as su]
             [schema.core :as s])
-  (:import [java.io ByteArrayInputStream ByteArrayOutputStream]))
+  (:import [java.io ByteArrayInputStream ByteArrayOutputStream]
+           org.apache.poi.ss.usermodel.Cell))
 
 ;;; ------------------------------------------------------------ Constants ------------------------------------------------------------
 
@@ -65,18 +66,30 @@
 
 ;;; ------------------------------------------------------------ Downloading Query Results in Other Formats ------------------------------------------------------------
 
-(defn- export-to-csv [columns rows]
-  (with-out-str
-    ;; turn keywords into strings, otherwise we get colons in our output
-    (csv/write-csv *out* (into [(mapv name columns)] rows))))
+;; add a generic implementation for the method that writes values to XLSX cells that just piggybacks off the implementations
+;; we've already defined for encoding things as JSON. These implementations live in `metabase.middleware`.
+(defmethod spreadsheet/set-cell! Object [^Cell cell, value]
+  (when (= (.getCellType cell) Cell/CELL_TYPE_FORMULA)
+    (.setCellType cell Cell/CELL_TYPE_STRING))
+  ;; stick the object in a JSON map and encode it, which will force conversion to a string. Then unparse that JSON and use the resulting value
+  ;; as the cell's new String value.
+  ;; There might be some more efficient way of doing this but I'm not sure what it is.
+  (.setCellValue cell (str (-> (json/generate-string {:v value})
+                               (json/parse-string keyword)
+                               :v))))
 
 (defn- export-to-xlsx [columns rows]
-  (let [wb  (spreadsheet/create-workbook "Query result" (conj rows (mapv name columns)))
+  (let [wb  (spreadsheet/create-workbook "Query result" (cons (mapv name columns) rows))
         ;; note: byte array streams don't need to be closed
         out (ByteArrayOutputStream.)]
     (spreadsheet/save-workbook! out wb)
     (ByteArrayInputStream. (.toByteArray out))))
 
+(defn- export-to-csv [columns rows]
+  (with-out-str
+    ;; turn keywords into strings, otherwise we get colons in our output
+    (csv/write-csv *out* (into [(mapv name columns)] rows))))
+
 (defn- export-to-json [columns rows]
   (for [row rows]
     (zipmap columns row)))
diff --git a/src/metabase/api/field.clj b/src/metabase/api/field.clj
index d513a6d80618b605d56b423a8c2a47822634e829..751a4b691de8dd44f6170fc2b642b8dd8fd49007 100644
--- a/src/metabase/api/field.clj
+++ b/src/metabase/api/field.clj
@@ -1,10 +1,11 @@
 (ns metabase.api.field
-  (:require [compojure.core :refer [GET POST PUT]]
+  (:require [compojure.core :refer [GET POST PUT DELETE]]
             [metabase.api.common :as api]
             [metabase.db.metadata-queries :as metadata]
             [metabase.models
+             [dimension :refer [Dimension]]
              [field :as field :refer [Field]]
-             [field-values :refer [create-field-values-if-needed! field-should-have-field-values? FieldValues]]]
+             [field-values :refer [create-field-values-if-needed! field-should-have-field-values? field-values->pairs FieldValues]]]
             [metabase.util :as u]
             [metabase.util.schema :as su]
             [schema.core :as s]
@@ -28,6 +29,33 @@
   (-> (api/read-check Field id)
       (hydrate [:table :db])))
 
+(defn- clear-dimension-on-fk-change! [{{dimension-id :id dimension-type :type} :dimensions :as field}]
+  (when (and dimension-id (= :external dimension-type))
+    (db/delete! Dimension :id dimension-id))
+  true)
+
+(defn- removed-fk-special-type? [old-special-type new-special-type]
+  (and (not= old-special-type new-special-type)
+       (isa? old-special-type :type/FK)
+       (or (nil? new-special-type)
+           (not (isa? new-special-type :type/FK)))))
+
+(defn- internal-remapping-allowed? [base-type special-type]
+  (and (isa? base-type :type/Integer)
+       (or
+        (nil? special-type)
+        (isa? special-type :type/Category)
+        (isa? special-type :type/Enum))))
+
+(defn- clear-dimension-on-type-change!
+  "Removes a related dimension if the field is moving to a type that
+  does not support remapping"
+  [{{old-dim-id :id, old-dim-type :type} :dimensions, :as old-field} base-type new-special-type]
+  (when (and old-dim-id
+             (= :internal old-dim-type)
+             (not (internal-remapping-allowed? base-type new-special-type)))
+    (db/delete! Dimension :id old-dim-id))
+  true)
 
 (api/defendpoint PUT "/:id"
   "Update `Field` with ID."
@@ -35,27 +63,34 @@
   {caveats            (s/maybe su/NonBlankString)
    description        (s/maybe su/NonBlankString)
    display_name       (s/maybe su/NonBlankString)
-   fk_target_field_id (s/maybe s/Int)
+   fk_target_field_id (s/maybe su/IntGreaterThanZero)
    points_of_interest (s/maybe su/NonBlankString)
    special_type       (s/maybe FieldType)
    visibility_type    (s/maybe FieldVisibilityType)}
-  (let [field              (api/write-check Field id)
-        special_type       (keyword (or special_type (:special_type field)))
-        ;; only let target field be set for :type/FK type fields, and if it's not in the payload then leave the current value
-        fk_target_field_id (when (isa? special_type :type/FK)
-                             (or fk_target_field_id (:fk_target_field_id field)))]
+  (let [field              (hydrate (api/write-check Field id) :dimensions)
+        new-special-type   (keyword (get body :special_type (:special_type field)))
+        removed-fk?        (removed-fk-special-type? (:special_type field) new-special-type)
+        fk-target-field-id (get body :fk_target_field_id (:fk_target_field_id field))]
+
     ;; validate that fk_target_field_id is a valid Field
     ;; TODO - we should also check that the Field is within the same database as our field
-    (when fk_target_field_id
-      (api/checkp (db/exists? Field :id fk_target_field_id)
+    (when fk-target-field-id
+      (api/checkp (db/exists? Field :id fk-target-field-id)
         :fk_target_field_id "Invalid target field"))
     ;; everything checks out, now update the field
-    (api/check-500 (db/update! Field id
-                     (u/select-keys-when (assoc body :fk_target_field_id fk_target_field_id)
-                       :present #{:caveats :description :fk_target_field_id :points_of_interest :special_type :visibility_type}
-                       :non-nil #{:display_name})))
+    (api/check-500
+     (db/transaction
+       (and
+        (if removed-fk?
+          (clear-dimension-on-fk-change! field)
+          true)
+        (clear-dimension-on-type-change! field (:base_type field) new-special-type)
+        (db/update! Field id
+          (u/select-keys-when (assoc body :fk_target_field_id (when-not removed-fk? fk-target-field-id))
+            :present #{:caveats :description :fk_target_field_id :points_of_interest :special_type :visibility_type}
+            :non-nil #{:display_name})))))
     ;; return updated field
-    (Field id)))
+    (hydrate (Field id) :dimensions)))
 
 (api/defendpoint GET "/:id/summary"
   "Get the count and distinct count of `Field` with ID."
@@ -65,16 +100,49 @@
      [:distincts (metadata/field-distinct-count field)]]))
 
 (def ^:private empty-field-values
-  {:values {} :human_readable_values {}})
+  {:values []})
 
 (api/defendpoint GET "/:id/values"
   "If `Field`'s special type derives from `type/Category`, or its base type is `type/Boolean`, return
    all distinct values of the field, and a map of human-readable values defined by the user."
   [id]
   (let [field (api/read-check Field id)]
-    (if-not (field-should-have-field-values? field)
-      empty-field-values
-      (create-field-values-if-needed! field))))
+    (if-let [field-values (and (field-should-have-field-values? field)
+                               (create-field-values-if-needed! field))]
+      (-> field-values
+          (assoc :values (field-values->pairs field-values))
+          (dissoc :human_readable_values))
+      {:values []})))
+
+(api/defendpoint POST "/:id/dimension"
+  "Sets the dimension for the given field at ID"
+  [id :as {{dimension-type :type dimension-name :name human_readable_field_id :human_readable_field_id} :body}]
+  {dimension-type         (s/enum "internal" "external")
+   dimension-name         su/NonBlankString
+   human_readable_field_id (s/maybe su/IntGreaterThanZero)}
+  (let [field (api/write-check Field id)]
+    (api/check (or (= dimension-type "internal")
+                   (and (= dimension-type "external")
+                        human_readable_field_id))
+      [400 "Foreign key based remappings require a human readable field id"])
+    (if-let [dimension (Dimension :field_id id)]
+      (db/update! Dimension (u/get-id dimension)
+        {:type dimension-type
+         :name dimension-name
+         :human_readable_field_id human_readable_field_id})
+      (db/insert! Dimension
+        {:field_id id
+         :type dimension-type
+         :name dimension-name
+         :human_readable_field_id human_readable_field_id}))
+    (Dimension :field_id id)))
+
+(api/defendpoint DELETE "/:id/dimension"
+  "Remove the dimension associated to field at ID"
+  [id]
+  (let [field (api/write-check Field id)]
+    (db/delete! Dimension :field_id id)
+    api/generic-204-no-content))
 
 ;; match things like GET /field-literal%2Ccreated_at%2Ctype%2FDatetime/values
 ;; (this is how things like [field-literal,created_at,type/Datetime] look when URL-encoded)
@@ -86,21 +154,45 @@
   [_ _]
   empty-field-values)
 
+(defn- validate-human-readable-pairs
+  "Human readable values are optional, but if present they must be
+  present for each field value. Throws if invalid, returns a boolean
+  indicating whether human readable values were found."
+  [value-pairs]
+  (let [human-readable-missing? #(= ::not-found (get % 1 ::not-found))
+        has-human-readable-values? (not-any? human-readable-missing? value-pairs)]
+    (api/check (or has-human-readable-values?
+                   (every? human-readable-missing? value-pairs))
+      [400 "If remapped values are specified, they must be specified for all field values"])
+    has-human-readable-values?))
 
-;; TODO - not sure this is used anymore
-(api/defendpoint POST "/:id/value_map_update"
-  "Update the human-readable values for a `Field` whose special type is `category`/`city`/`state`/`country`
-   or whose base type is `type/Boolean`."
-  [id :as {{:keys [values_map]} :body}]
-  {values_map su/Map}
+(defn- update-field-values! [field-value-id value-pairs]
+  (let [human-readable-values? (validate-human-readable-pairs value-pairs)]
+    (api/check-500 (db/update! FieldValues field-value-id
+                     :values (map first value-pairs)
+                     :human_readable_values (when human-readable-values?
+                                              (map second value-pairs))))))
+
+(defn- create-field-values!
+  [field-or-id value-pairs]
+  (let [human-readable-values? (validate-human-readable-pairs value-pairs)]
+    (db/insert! FieldValues
+      :field_id (u/get-id field-or-id)
+      :values (map first value-pairs)
+      :human_readable_values (when human-readable-values?
+                               (map second value-pairs)))))
+
+(api/defendpoint POST "/:id/values"
+  "Update the fields values and human-readable values for a `Field` whose special type is `category`/`city`/`state`/`country`
+   or whose base type is `type/Boolean`. The human-readable values are optional."
+  [id :as {{value-pairs :values} :body}]
+  {value-pairs [[(s/one s/Num "value") (s/optional su/NonBlankString "human readable value")]]}
   (let [field (api/write-check Field id)]
     (api/check (field-should-have-field-values? field)
-      [400 "You can only update the mapped values of a Field whose 'special_type' is 'category'/'city'/'state'/'country' or whose 'base_type' is 'type/Boolean'."])
-    (if-let [field-values-id (db/select-one-id FieldValues, :field_id id)]
-      (api/check-500 (db/update! FieldValues field-values-id
-                       :human_readable_values values_map))
-      (create-field-values-if-needed! field values_map)))
+      [400 "You can only update the human readable values of a mapped values of a Field whose 'special_type' is 'category'/'city'/'state'/'country' or whose 'base_type' is 'type/Boolean'."])
+    (if-let [field-value-id (db/select-one-id FieldValues, :field_id id)]
+      (update-field-values! field-value-id value-pairs)
+      (create-field-values! field value-pairs)))
   {:status :success})
 
-
 (api/define-routes)
diff --git a/src/metabase/api/fingerprint.clj b/src/metabase/api/fingerprint.clj
new file mode 100644
index 0000000000000000000000000000000000000000..70c28be3599523af761886770021605bc875cd5c
--- /dev/null
+++ b/src/metabase/api/fingerprint.clj
@@ -0,0 +1,134 @@
+(ns metabase.api.fingerprint
+  (:require [compojure.core :refer [GET]]
+            [metabase.api.common :as api]
+            [metabase.fingerprinting.core :as f]
+            [metabase.models
+             [card :refer [Card]]
+             [field :refer [Field]]
+             [metric :refer [Metric]]
+             [segment :refer [Segment]]
+             [table :refer [Table]]]
+            [schema.core :as s]))
+
+;; See metabase.fingerprinting.core/fingerprint for description of these settings.
+(def ^:private ^:const MaxQueryCost
+  (s/maybe (s/enum "cache"
+                   "sample"
+                   "full-scan"
+                   "joins")))
+
+(def ^:private ^:const MaxComputationCost
+  (s/maybe (s/enum "linear"
+                   "unbounded"
+                   "yolo")))
+
+(def ^:private ^:const Scale
+  (s/maybe (s/enum "month"
+                   "week"
+                   "day")))
+
+(defn- max-cost
+  [query computation]
+  {:query       (keyword query)
+   :computation (keyword computation)})
+
+(api/defendpoint GET "/field/:id"
+  "Get fingerprint for a `Field` with ID."
+  [id max_query_cost max_computation_cost]
+  {max_query_cost       MaxQueryCost
+   max_computation_cost MaxComputationCost}
+  (->> id
+       (api/read-check Field)
+       (f/fingerprint {:max-cost (max-cost max_query_cost
+                                           max_computation_cost)})
+       f/x-ray))
+
+(api/defendpoint GET "/table/:id"
+  "Get fingerprint for a `Tield` with ID."
+  [id max_query_cost max_computation_cost]
+  {max_query_cost       MaxQueryCost
+   max_computation_cost MaxComputationCost}
+  (->> id
+       (api/read-check Table)
+       (f/fingerprint {:max-cost (max-cost max_query_cost
+                                           max_computation_cost)})
+       f/x-ray))
+
+(api/defendpoint GET "/segment/:id"
+  "Get fingerprint for a `Segment` with ID."
+  [id max_query_cost max_computation_cost]
+  {max_query_cost       MaxQueryCost
+   max_computation_cost MaxComputationCost}
+  (->> id
+       (api/read-check Segment)
+       (f/fingerprint {:max-cost (max-cost max_query_cost
+                                           max_computation_cost)})
+       f/x-ray))
+
+(api/defendpoint GET "/card/:id"
+  "Get fingerprint for a `Card` with ID."
+  [id max_query_cost max_computation_cost]
+  {max_query_cost       MaxQueryCost
+   max_computation_cost MaxComputationCost}
+  (->> id
+       (api/read-check Card)
+       (f/fingerprint {:max-cost (max-cost max_query_cost
+                                           max_computation_cost)})
+       f/x-ray))
+
+(api/defendpoint GET "/compare/fields/:id1/:id2"
+  "Get comparison fingerprints for `Field`s with ID1 and ID2."
+  [id1 id2 max_query_cost max_computation_cost]
+  {max_query_cost       MaxQueryCost
+   max_computation_cost MaxComputationCost}
+  (->> [id1 id2]
+       (map (partial api/read-check Field))
+       (apply f/compare-fingerprints
+              {:max-cost (max-cost max_query_cost max_computation_cost)})
+       f/x-ray))
+
+(api/defendpoint GET "/compare/tables/:id1/:id2"
+  "Get comparison fingerprints for `Table`s with ID1 and ID2."
+  [id1 id2 max_query_cost max_computation_cost]
+  {max_query_cost       MaxQueryCost
+   max_computation_cost MaxComputationCost}
+  (->> [id1 id2]
+       (map (partial api/read-check Table))
+       (apply f/compare-fingerprints
+              {:max-cost (max-cost max_query_cost max_computation_cost)})
+       f/x-ray))
+
+(api/defendpoint GET "/compare/cards/:id1/:id2"
+  "Get comparison fingerprints for `Card`s with ID1 and ID2."
+  [id1 id2 max_query_cost max_computation_cost]
+  {max_query_cost       MaxQueryCost
+   max_computation_cost MaxComputationCost}
+  (->> [id1 id2]
+       (map (partial api/read-check Card))
+       (apply f/compare-fingerprints
+              {:max-cost (max-cost max_query_cost max_computation_cost)})
+       f/x-ray))
+
+(api/defendpoint GET "/compare/segments/:id1/:id2"
+  "Get comparison fingerprints for `Segment`s with ID1 and ID2."
+  [id1 id2 max_query_cost max_computation_cost]
+  {max_query_cost       MaxQueryCost
+   max_computation_cost MaxComputationCost}
+  (->> [id1 id2]
+       (map (partial api/read-check Segment))
+       (apply f/compare-fingerprints
+              {:max-cost (max-cost max_query_cost max_computation_cost)})
+       f/x-ray))
+
+(api/defendpoint GET "/compare/segment/:sid/table/:tid"
+  "Compare `Segment` with `Table`."
+  [sid tid max_query_cost max_computation_cost]
+  {max_query_cost       MaxQueryCost
+   max_computation_cost MaxComputationCost}
+  (f/x-ray
+   (f/compare-fingerprints
+    {:max-cost (max-cost max_query_cost max_computation_cost)}
+    (api/read-check Segment sid)
+    (api/read-check Table tid))))
+
+(api/define-routes)
diff --git a/src/metabase/api/notify.clj b/src/metabase/api/notify.clj
index 894a441fe5278e905a5589ba2da148edfac7966d..afcf6ce12062f14fc8c2f5408b5fe96bd2ea21fd 100644
--- a/src/metabase/api/notify.clj
+++ b/src/metabase/api/notify.clj
@@ -5,7 +5,7 @@
             [metabase.models
              [database :refer [Database]]
              [table :refer [Table]]]
-            [metabase.sync-database :as sync-database]))
+            [metabase.sync :as sync]))
 
 (api/defendpoint POST "/db/:id"
   "Notification about a potential schema change to one of our `Databases`.
@@ -14,10 +14,10 @@
   (api/let-404 [database (Database id)]
     (cond
       table_id (when-let [table (Table :db_id id, :id (int table_id))]
-                 (future (sync-database/sync-table! table)))
+                 (future (sync/sync-table! table)))
       table_name (when-let [table (Table :db_id id, :name table_name)]
-                   (future (sync-database/sync-table! table)))
-      :else (future (sync-database/sync-database! database))))
+                   (future (sync/sync-table! table)))
+      :else (future (sync/sync-database! database))))
   {:success true})
 
 
diff --git a/src/metabase/api/public.clj b/src/metabase/api/public.clj
index 996fbd43e7a0a959c7bb7aa722790da0ad29a051..db1c9f5c8271c99791ee7b2f943564826a7ad0e0 100644
--- a/src/metabase/api/public.clj
+++ b/src/metabase/api/public.clj
@@ -16,7 +16,7 @@
              [dashboard-card :refer [DashboardCard]]
              [dashboard-card-series :refer [DashboardCardSeries]]
              [field-values :refer [FieldValues]]]
-            [metabase.query-processor.expand :as ql]
+            [metabase.query-processor.middleware.expand :as ql]
             [metabase.util
              [embed :as embed]
              [schema :as su]]
diff --git a/src/metabase/api/routes.clj b/src/metabase/api/routes.clj
index 90b1b20751da34b986d2c1ab3526e4239a6eb504..216991dff13b601f91a6c2cc7846088595a990b7 100644
--- a/src/metabase/api/routes.clj
+++ b/src/metabase/api/routes.clj
@@ -12,6 +12,7 @@
              [email :as email]
              [embed :as embed]
              [field :as field]
+             [fingerprint :as fingerprint]
              [geojson :as geojson]
              [getting-started :as getting-started]
              [label :as label]
@@ -61,6 +62,8 @@
   (context "/email"           [] (+auth email/routes))
   (context "/embed"           [] (+message-only-exceptions embed/routes))
   (context "/field"           [] (+auth field/routes))
+  ;; TODO - fingerprint and comparison should be split out?
+  (context "/fingerprint"     [] (+auth fingerprint/routes))
   (context "/getting_started" [] (+auth getting-started/routes))
   (context "/geojson"         [] (+auth geojson/routes))
   (context "/label"           [] (+auth label/routes))
diff --git a/src/metabase/api/session.clj b/src/metabase/api/session.clj
index dfe3116f2e33ac4a45f23dd8b42f56eebcc9e68c..443b0da0c8b90636e1fd1a715e007ca299221de6 100644
--- a/src/metabase/api/session.clj
+++ b/src/metabase/api/session.clj
@@ -40,6 +40,28 @@
   {:username   (throttle/make-throttler :username)
    :ip-address (throttle/make-throttler :username, :attempts-threshold 50)}) ; IP Address doesn't have an actual UI field so just show error by username
 
+(defn- ldap-login
+  "If LDAP is enabled and a matching user exists return a new Session for them, or `nil` if they couldn't be authenticated."
+  [username password]
+  (when (ldap/ldap-configured?)
+    (try
+      (when-let [user-info (ldap/find-user username)]
+        (when-not (ldap/verify-password user-info password)
+          ;; Since LDAP knows about the user, fail here to prevent the local strategy to be tried with a possibly outdated password
+          (throw (ex-info "Password did not match stored password." {:status-code 400
+                                                                     :errors      {:password "did not match stored password"}})))
+        ;; password is ok, return new session
+        {:id (create-session! (ldap/fetch-or-create-user! user-info password))})
+      (catch com.unboundid.util.LDAPSDKException e
+        (log/error (u/format-color 'red "Problem connecting to LDAP server, will fallback to local authentication") (.getMessage e))))))
+
+(defn- email-login
+  "Find a matching `User` if one exists and return a new Session for them, or `nil` if they couldn't be authenticated."
+  [username password]
+  (when-let [user (db/select-one [User :id :password_salt :password :last_login], :email username, :is_active true)]
+    (when (pass/verify-password password (:password_salt user) (:password user))
+      {:id (create-session! user)})))
+
 (api/defendpoint POST "/"
   "Login."
   [:as {{:keys [username password]} :body, remote-address :remote-addr}]
@@ -48,28 +70,13 @@
   (throttle/check (login-throttlers :ip-address) remote-address)
   (throttle/check (login-throttlers :username)   username)
   ;; Primitive "strategy implementation", should be reworked for modular providers in #3210
-  (or
-    ;; First try LDAP if it's enabled
-    (when (ldap/ldap-configured?)
-      (try
-        (when-let [user-info (ldap/find-user username)]
-          (if (ldap/verify-password user-info password)
-            {:id (create-session! (ldap/fetch-or-create-user! user-info password))}
-            ;; Since LDAP knows about the user, fail here to prevent the local strategy to be tried with a possibly outdated password
-            (throw (ex-info "Password did not match stored password." {:status-code 400
-                                                                       :errors      {:password "did not match stored password"}}))))
-        (catch com.unboundid.util.LDAPSDKException e
-          (log/error (u/format-color 'red "Problem connecting to LDAP server, will fallback to local authentication") (.getMessage e)))))
-
-    ;; Then try local authentication
-    (when-let [user (db/select-one [User :id :password_salt :password :last_login], :email username, :is_active true)]
-      (when (pass/verify-password password (:password_salt user) (:password user))
-        {:id (create-session! user)}))
-
-    ;; If nothing succeeded complain about it
-    ;; Don't leak whether the account doesn't exist or the password was incorrect
-    (throw (ex-info "Password did not match stored password." {:status-code 400
-                                                               :errors      {:password "did not match stored password"}}))))
+  (or (ldap-login username password)  ; First try LDAP if it's enabled
+      (email-login username password) ; Then try local authentication
+      ;; If nothing succeeded complain about it
+      ;; Don't leak whether the account doesn't exist or the password was incorrect
+      (throw (ex-info "Password did not match stored password." {:status-code 400
+                                                                 :errors      {:password "did not match stored password"}}))))
+
 
 (api/defendpoint DELETE "/"
   "Logout."
diff --git a/src/metabase/api/table.clj b/src/metabase/api/table.clj
index 6c5acaa84d14d5fe8466c75ad13035d21f04777f..c59e6e7326ce77ef6635896031ad2ec59173fbc2 100644
--- a/src/metabase/api/table.clj
+++ b/src/metabase/api/table.clj
@@ -4,13 +4,15 @@
             [compojure.core :refer [GET PUT]]
             [medley.core :as m]
             [metabase
-             [sync-database :as sync-database]
+             [driver :as driver]
+             [sync :as sync]
              [util :as u]]
             [metabase.api.common :as api]
             [metabase.models
              [card :refer [Card]]
-             [database :as database]
-             [field :refer [Field]]
+             [database :as database :refer [Database]]
+             [field :refer [Field with-normal-values]]
+             [field-values :as fv]
              [interface :as mi]
              [table :as table :refer [Table]]]
             [metabase.util.schema :as su]
@@ -44,41 +46,161 @@
   (-> (api/read-check Table id)
       (hydrate :db :pk_field)))
 
-(defn- visible-state?
-  "only the nil state is considered visible."
-  [state]
-  {:pre [(or (nil? state) (table/visibility-types state))]}
-  (if (nil? state)
-    :show
-    :hide))
 
 (api/defendpoint PUT "/:id"
   "Update `Table` with ID."
-  [id :as {{:keys [display_name entity_type visibility_type description caveats points_of_interest show_in_getting_started]} :body}]
-  {display_name    (s/maybe su/NonBlankString)
-   entity_type     (s/maybe TableEntityType)
-   visibility_type (s/maybe TableVisibilityType)}
+  [id :as {{:keys [display_name entity_type visibility_type description caveats points_of_interest show_in_getting_started], :as body} :body}]
+  {display_name            (s/maybe su/NonBlankString)
+   entity_type             (s/maybe TableEntityType)
+   visibility_type         (s/maybe TableVisibilityType)
+   description             (s/maybe su/NonBlankString)
+   caveats                 (s/maybe su/NonBlankString)
+   points_of_interest      (s/maybe su/NonBlankString)
+   show_in_getting_started (s/maybe s/Bool)}
   (api/write-check Table id)
-  (let [original-visibility-type (:visibility_type (Table :id id))]
-    (api/check-500 (db/update-non-nil-keys! Table id
-                     :display_name            display_name
-                     :caveats                 caveats
-                     :points_of_interest      points_of_interest
-                     :show_in_getting_started show_in_getting_started
-                     :entity_type             entity_type
-                     :description             description))
-    (api/check-500 (db/update! Table id, :visibility_type visibility_type))
-    (let [updated-table (Table id)
-          new-visibility (visible-state? (:visibility_type updated-table))
-          old-visibility (visible-state? original-visibility-type)
-          visibility-changed? (and (not= new-visibility
-                                         old-visibility)
-                                   (= :show new-visibility))]
-      (when visibility-changed?
-        (log/debug (u/format-color 'green "Table visibility changed, resyncing %s -> %s : %s") original-visibility-type visibility_type visibility-changed?)
-        (sync-database/sync-table! updated-table))
+  (let [original-visibility-type (db/select-one-field :visibility_type Table :id id)]
+    ;; always update visibility type; update display_name, show_in_getting_started, entity_type if non-nil; update description and related fields if passed in
+    (api/check-500
+     (db/update! Table id
+       (assoc (u/select-keys-when body
+                :non-nil [:display_name :show_in_getting_started :entity_type]
+                :present [:description :caveats :points_of_interest])
+         :visibility_type visibility_type)))
+    (let [updated-table   (Table id)
+          now-visible?    (nil? (:visibility_type updated-table)) ; only Tables with `nil` visibility type are visible
+          was-visible?    (nil? original-visibility-type)
+          became-visible? (and now-visible? (not was-visible?))]
+      (when became-visible?
+        (log/info (u/format-color 'green "Table '%s' is now visible. Resyncing." (:name updated-table)))
+        (sync/sync-table! updated-table))
       updated-table)))
 
+(def ^:private dimension-options
+  (let [default-entry ["Auto bin" ["default"]]]
+    (zipmap (range)
+            (concat
+             (map (fn [[name param]]
+                    {:name name
+                     :mbql ["datetime-field" nil param]
+                     :type "type/DateTime"})
+                  [["Minute" "minute"]
+                   ["Minute of Hour" "minute-of-hour"]
+                   ["Hour" "hour"]
+                   ["Hour of Day" "hour-of-day"]
+                   ["Day" "day"]
+                   ["Day of Week" "day-of-week"]
+                   ["Day of Month" "day-of-month"]
+                   ["Day of Year" "day-of-year"]
+                   ["Week" "week"]
+                   ["Week of Year" "week-of-year"]
+                   ["Month" "month"]
+                   ["Month of Year" "month-of-year"]
+                   ["Quarter" "quarter"]
+                   ["Quarter of Year" "quarter-of-year"]
+                   ["Year" "year"]])
+             (conj
+               (mapv (fn [[name params]]
+                      {:name name
+                       :mbql (apply vector "binning-strategy" nil params)
+                       :type "type/Number"})
+                    [default-entry
+                     ["10 bins" ["num-bins" 10]]
+                     ["50 bins" ["num-bins" 50]]
+                     ["100 bins" ["num-bins" 100]]])
+               {:name "Don't bin"
+                :mbql nil
+                :type "type/Number"}
+               )
+             (conj
+               (mapv (fn [[name params]]
+                      {:name name
+                       :mbql (apply vector "binning-strategy" nil params)
+                       :type "type/Coordinate"})
+                    [default-entry
+                     ["Bin every 1 degree" ["bin-width" 1.0]]
+                     ["Bin every 10 degrees" ["bin-width" 10.0]]
+                     ["Bin every 20 degrees" ["bin-width" 20.0]]
+                     ["Bin every 50 degrees" ["bin-width" 50.0]]])
+               {:name "Don't bin"
+                :mbql nil
+                :type "type/Coordinate"}
+              )))))
+
+(def ^:private dimension-options-for-response
+  (m/map-kv (fn [k v]
+              [(str k) v]) dimension-options))
+
+(defn- create-dim-index-seq [dim-type]
+  (->> dimension-options
+       (m/filter-kv (fn [k v] (= (:type v) dim-type)))
+       keys
+       sort
+       (map str)))
+
+(def ^:private datetime-dimension-indexes
+  (create-dim-index-seq "type/DateTime"))
+
+(def ^:private numeric-dimension-indexes
+  (create-dim-index-seq "type/Number"))
+
+(def ^:private coordinate-dimension-indexes
+  (create-dim-index-seq "type/Coordinate"))
+
+(defn- dimension-index-for-type [dim-type pred]
+  (first (m/find-first (fn [[k v]]
+                         (and (= dim-type (:type v))
+                              (pred v))) dimension-options-for-response)))
+
+(def ^:private date-default-index
+  (dimension-index-for-type "type/DateTime" #(= "Day" (:name %))))
+
+(def ^:private numeric-default-index
+  (dimension-index-for-type "type/Number" #(.contains ^String (:name %) "Auto bin")))
+
+(def ^:private coordinate-default-index
+  (dimension-index-for-type "type/Coordinate" #(.contains ^String (:name %) "Auto bin")))
+
+(defn- assoc-field-dimension-options [{:keys [base_type special_type fingerprint] :as field}]
+  (let [{min_value :min, max_value :max} (get-in fingerprint [:type :type/Number])
+        [default-option all-options] (cond
+
+                                       (isa? base_type :type/DateTime)
+                                       [date-default-index datetime-dimension-indexes]
+
+                                       (and min_value max_value
+                                            (isa? special_type :type/Coordinate))
+                                       [coordinate-default-index coordinate-dimension-indexes]
+
+                                       (and min_value max_value
+                                            (isa? base_type :type/Number)
+                                            (or (nil? special_type) (isa? special_type :type/Number)))
+                                       [numeric-default-index numeric-dimension-indexes]
+
+                                       :else
+                                       [nil []])]
+    (assoc field
+      :default_dimension_option default-option
+      :dimension_options all-options)))
+
+(defn- assoc-dimension-options [resp driver]
+  (if (and driver (contains? (driver/features driver) :binning))
+    (-> resp
+        (assoc :dimension_options dimension-options-for-response)
+        (update :fields #(mapv assoc-field-dimension-options %)))
+    (-> resp
+        (assoc :dimension_options [])
+        (update :fields (fn [fields]
+                          (mapv #(assoc %
+                                   :dimension_options []
+                                   :default_dimension_option nil) fields))))))
+
+(defn- format-fields-for-response [resp]
+  (update resp :fields
+          (fn [fields]
+            (for [{:keys [values] :as field} fields]
+              (if (seq values)
+                (update field :values fv/field-values->pairs)
+                field)))))
 
 (api/defendpoint GET "/:id/query_metadata"
   "Get metadata about a `Table` useful for running queries.
@@ -88,15 +210,20 @@
   will any of its corresponding values be returned. (This option is provided for use in the Admin Edit Metadata page)."
   [id include_sensitive_fields]
   {include_sensitive_fields (s/maybe su/BooleanString)}
-  (-> (api/read-check Table id)
-      (hydrate :db [:fields :target] :field_values :segments :metrics)
-      (m/dissoc-in [:db :details])
-      (update-in [:fields] (if (Boolean/parseBoolean include_sensitive_fields)
-                             ;; If someone passes include_sensitive_fields return hydrated :fields as-is
-                             identity
-                             ;; Otherwise filter out all :sensitive fields
-                             (partial filter (fn [{:keys [visibility_type]}]
-                                               (not= (keyword visibility_type) :sensitive)))))))
+  (let [table (api/read-check Table id)
+        driver (driver/engine->driver (db/select-one-field :engine Database :id (:db_id table)))]
+    (-> table
+        (hydrate :db [:fields :target :dimensions] :segments :metrics)
+        (update :fields with-normal-values)
+        (m/dissoc-in [:db :details])
+        (assoc-dimension-options driver)
+        format-fields-for-response
+        (update-in [:fields] (if (Boolean/parseBoolean include_sensitive_fields)
+                               ;; If someone passes include_sensitive_fields return hydrated :fields as-is
+                               identity
+                               ;; Otherwise filter out all :sensitive fields
+                               (partial filter (fn [{:keys [visibility_type]}]
+                                                 (not= (keyword visibility_type) :sensitive))))))))
 
 (defn- card-result-metadata->virtual-fields
   "Return a sequence of 'virtual' fields metadata for the 'virtual' table for a Card in the Saved Questions 'virtual' database."
@@ -119,7 +246,7 @@
     (cond-> {:id           (str "card__" (u/get-id card))
              :db_id        database/virtual-id
              :display_name (:name card)
-             :schema       (get-in card [:collection :name] "All questions")
+             :schema       (get-in card [:collection :name] "Everything else")
              :description  (:description card)}
       include-fields? (assoc :fields (card-result-metadata->virtual-fields (u/get-id card) (:result_metadata card))))))
 
diff --git a/src/metabase/cmd/load_from_h2.clj b/src/metabase/cmd/load_from_h2.clj
index 3fa41d4596abf8c1bcee0918eb80a5fd1d7e2247..c232068fe9ef386af399850954765f11063e2465 100644
--- a/src/metabase/cmd/load_from_h2.clj
+++ b/src/metabase/cmd/load_from_h2.clj
@@ -35,6 +35,7 @@
              [dashboard-favorite :refer [DashboardFavorite]]
              [database :refer [Database]]
              [dependency :refer [Dependency]]
+             [dimension :refer [Dimension]]
              [field :refer [Field]]
              [field-values :refer [FieldValues]]
              [label :refer [Label]]
@@ -48,8 +49,6 @@
              [pulse-card :refer [PulseCard]]
              [pulse-channel :refer [PulseChannel]]
              [pulse-channel-recipient :refer [PulseChannelRecipient]]
-             [raw-column :refer [RawColumn]]
-             [raw-table :refer [RawTable]]
              [revision :refer [Revision]]
              [segment :refer [Segment]]
              [session :refer [Session]]
@@ -67,8 +66,6 @@
    This is done so we make sure that we load load instances of entities before others
    that might depend on them, e.g. `Databases` before `Tables` before `Fields`."
   [Database
-   RawTable
-   RawColumn
    User
    Setting
    Dependency
@@ -100,6 +97,7 @@
    Collection
    CollectionRevision
    DashboardFavorite
+   Dimension
    ;; migrate the list of finished DataMigrations as the very last thing (all models to copy over should be listed above this line)
    DataMigrations])
 
diff --git a/src/metabase/db.clj b/src/metabase/db.clj
index 5a33009f5329cd3e3a9f01bd597fba0ee5a016b2..464d6b0ff8bd2983aa7368e96f684b2efbac60b1 100644
--- a/src/metabase/db.clj
+++ b/src/metabase/db.clj
@@ -397,8 +397,8 @@
 (defn join
   "Convenience for generating a HoneySQL `JOIN` clause.
 
-     (db/select-ids Table
-       (mdb/join [Table :raw_table_id] [RawTable :id])
+     (db/select-ids FieldValues
+       (mdb/join [FieldValues :field_id] [Field :id])
        :active true)"
   [[source-entity fk] [dest-entity pk]]
   {:left-join [(db/resolve-model dest-entity) [:= (db/qualify source-entity fk)
diff --git a/src/metabase/db/metadata_queries.clj b/src/metabase/db/metadata_queries.clj
index 1d0ff0b45297c6c5dfdabaf1491d854cfa65fb7c..a4773b26a542e7903df58acb75432724a1cde9f2 100644
--- a/src/metabase/db/metadata_queries.clj
+++ b/src/metabase/db/metadata_queries.clj
@@ -1,13 +1,17 @@
 (ns metabase.db.metadata-queries
   "Predefined MBQL queries for getting metadata about an external database."
-  (:require [metabase
+  (:require [clojure.tools.logging :as log]
+            [metabase
              [query-processor :as qp]
              [util :as u]]
-            [metabase.models.table :refer [Table]]
-            [metabase.query-processor.expand :as ql]
+            [metabase.models
+             [field-values :as field-values]
+             [table :refer [Table]]]
+            [metabase.query-processor.middleware.expand :as ql]
             [toucan.db :as db]))
 
 (defn- qp-query [db-id query]
+  {:pre [(integer? db-id)]}
   (-> (qp/process-query
        {:type     :query
         :database db-id
@@ -18,6 +22,7 @@
 (defn- field-query [{table-id :table_id} query]
   {:pre [(integer? table-id)]}
   (qp-query (db/select-one-field :db_id Table, :id table-id)
+            ;; this seeming useless `merge` statement IS in fact doing something important. `ql/query` is a threading macro for building queries. Do not remove
             (ql/query (merge query)
                       (ql/source-table table-id))))
 
@@ -26,15 +31,21 @@
   [table]
   {:pre  [(map? table)]
    :post [(integer? %)]}
-  (-> (qp-query (:db_id table) (ql/query (ql/source-table (:id table))
-                                         (ql/aggregation (ql/count))))
-      first first long))
+  (let [results (qp-query (:db_id table) (ql/query (ql/source-table (u/get-id table))
+                                                   (ql/aggregation (ql/count))))]
+    (try (-> results first first long)
+         (catch Throwable e
+           (log/error "Error fetching table row count. Query returned:\n"
+                      (u/pprint-to-str results))
+           (throw e)))))
 
 (defn field-distinct-values
   "Return the distinct values of FIELD.
    This is used to create a `FieldValues` object for `:type/Category` Fields."
   ([field]
-   (field-distinct-values field @(resolve 'metabase.sync-database.analyze/low-cardinality-threshold)))
+   ;; fetch up to one more value than allowed for FieldValues. e.g. if the max is 100 distinct values fetch up to 101.
+   ;; That way we will know if we're over the limit
+   (field-distinct-values field (inc field-values/low-cardinality-threshold)))
   ([field max-results]
    {:pre [(integer? max-results)]}
    (mapv first (field-query field (-> {}
@@ -43,14 +54,41 @@
 
 (defn field-distinct-count
   "Return the distinct count of FIELD."
-  [{field-id :id, :as field} & [limit]]
+  [field & [limit]]
   (-> (field-query field (-> {}
-                             (ql/aggregation (ql/distinct (ql/field-id field-id)))
+                             (ql/aggregation (ql/distinct (ql/field-id (u/get-id field))))
                              (ql/limit limit)))
       first first int))
 
 (defn field-count
   "Return the count of FIELD."
-  [{field-id :id :as field}]
-  (-> (field-query field (ql/aggregation {} (ql/count (ql/field-id field-id))))
+  [field]
+  (-> (field-query field (ql/aggregation {} (ql/count (ql/field-id (u/get-id field)))))
       first first int))
+
+(defn db-id
+  "Fetch the database ID of a given entity."
+  [x]
+  (db/select-one-field :db_id 'Table :id (:table_id x)))
+
+(defn field-values
+  "Return all the values of FIELD for QUERY."
+  [field query]
+  (->> (qp/process-query
+         {:type     :query
+          :database (db-id field)
+          :query    (merge {:fields       [[:field-id (:id field)]]
+                            :source-table (:table_id field)}
+                           query)})
+       :data
+       :rows
+       (map first)))
+
+(defn query-values
+  "Return all values for QUERY."
+  [db-id query]
+  (-> (qp/process-query
+        {:type     :query
+         :database db-id
+         :query    query})
+      :data))
diff --git a/src/metabase/db/migrations.clj b/src/metabase/db/migrations.clj
index 716c65bdd6c556f9eb98424b4730798adafd6cd6..d89a9a23396ce9a65a20823241cc1885eb799461 100644
--- a/src/metabase/db/migrations.clj
+++ b/src/metabase/db/migrations.clj
@@ -25,8 +25,6 @@
              [permissions-group :as perm-group]
              [permissions-group-membership :as perm-membership :refer [PermissionsGroupMembership]]
              [query-execution :as query-execution :refer [QueryExecution]]
-             [raw-column :refer [RawColumn]]
-             [raw-table :refer [RawTable]]
              [setting :as setting :refer [Setting]]
              [table :as table :refer [Table]]
              [user :refer [User]]]
@@ -152,60 +150,6 @@
       :visibility_type "normal")))
 
 
-;; populate RawTable and RawColumn information
-;; NOTE: we only handle active Tables/Fields and we skip any FK relationships (they can safely populate later)
-;; TODO - this function is way to big and hard to read -- See https://github.com/metabase/metabase/wiki/Metabase-Clojure-Style-Guide#break-up-larger-functions
-(defmigration ^{:author "agilliland",:added "0.17.0"} create-raw-tables
-  (when (zero? (db/count RawTable))
-    (binding [db/*disable-db-logging* true]
-      (db/transaction
-       (doseq [{database-id :id, :keys [name engine]} (db/select Database)]
-         (when-let [tables (not-empty (db/select Table, :db_id database-id, :active true))]
-           (log/info (format "Migrating raw schema information for %s database '%s'" engine name))
-           (let [processed-tables (atom #{})]
-             (doseq [{table-id :id, table-schema :schema, table-name :name} tables]
-               ;; this check gaurds against any table that appears in the schema multiple times
-               (if (contains? @processed-tables {:schema table-schema, :name table-name})
-                 ;; this is a dupe of this table, retire it and it's fields
-                 (table/retire-tables! #{table-id})
-                 ;; this is the first time we are encountering this table, so migrate it
-                 (do
-                   ;; add this table to the set of tables we've processed
-                   (swap! processed-tables conj {:schema table-schema, :name table-name})
-                   ;; create the RawTable
-                   (let [{raw-table-id :id} (db/insert! RawTable
-                                              :database_id database-id
-                                              :schema      table-schema
-                                              :name        table-name
-                                              :details     {}
-                                              :active      true)]
-                     ;; update the Table and link it with the RawTable
-                     (db/update! Table table-id
-                       :raw_table_id raw-table-id)
-                     ;; migrate all Fields in the Table (skipping :dynamic-schema dbs)
-                     (when-not (driver/driver-supports? (driver/engine->driver engine) :dynamic-schema)
-                       (let [processed-fields (atom #{})]
-                         (doseq [{field-id :id, column-name :name, :as field} (db/select Field, :table_id table-id, :visibility_type [:not= "retired"])]
-                           ;; guard against duplicate fields with the same name
-                           (if (contains? @processed-fields column-name)
-                             ;; this is a dupe, disable it
-                             (db/update! Field field-id
-                               :visibility_type "retired")
-                             ;; normal unmigrated field, so lets use it
-                             (let [{raw-column-id :id} (db/insert! RawColumn
-                                                         :raw_table_id raw-table-id
-                                                         :name         column-name
-                                                         :is_pk        (= :id (:special_type field))
-                                                         :details      {:base-type (:base_type field)}
-                                                         :active       true)]
-                               ;; update the Field and link it with the RawColumn
-                               (db/update! Field field-id
-                                 :raw_column_id raw-column-id
-                                 :last_analyzed (u/new-sql-timestamp))
-                               ;; add this column to the set we've processed already
-                               (swap! processed-fields conj column-name)))))))))))))))))
-
-
 ;;; +------------------------------------------------------------------------------------------------------------------------+
 ;;; |                                                     PERMISSIONS v1                                                     |
 ;;; +------------------------------------------------------------------------------------------------------------------------+
@@ -369,3 +313,15 @@
 (defmigration ^{:author "camsaul", :added "0.23.0"} drop-old-query-execution-table
   ;; DROP TABLE IF EXISTS should work on Postgres, MySQL, and H2
   (jdbc/execute! (db/connection) [(format "DROP TABLE IF EXISTS %s;" ((db/quote-fn) "query_queryexecution"))]))
+
+;; There's a window on in the 0.23.0 and 0.23.1 releases that the
+;; site-url could be persisted without a protocol specified. Other
+;; areas of the application expect that site-url will always include
+;; http/https. This migration ensures that if we have a site-url
+;; stored it has the current defaulting logic applied to it
+(defmigration ^{:author "senior", :added "0.25.1"} ensure-protocol-specified-in-site-url
+  (let [stored-site-url (db/select-one-field :value Setting :key "site-url")
+        defaulted-site-url (public-settings/site-url stored-site-url)]
+    (when (and stored-site-url
+               (not= stored-site-url defaulted-site-url))
+      (setting/set! "site-url" stored-site-url))))
diff --git a/src/metabase/driver.clj b/src/metabase/driver.clj
index ae8af15930d621d83489f1aa91a64aa04306185a..06d5200b0328efa654eb21f5f401cf90c50b54a8 100644
--- a/src/metabase/driver.clj
+++ b/src/metabase/driver.clj
@@ -1,6 +1,5 @@
 (ns metabase.driver
-  (:require [clojure.math.numeric-tower :as math]
-            [clojure.tools.logging :as log]
+  (:require [clojure.tools.logging :as log]
             [medley.core :as m]
             [metabase.models
              [database :refer [Database]]
@@ -50,10 +49,6 @@
 
    This name should be a \"nice-name\" that we'll display to the user."
 
-  (analyze-table ^java.util.Map [this, ^TableInstance table, ^java.util.Set new-field-ids]
-    "*OPTIONAL*. Return a map containing information that provides optional analysis values for TABLE.
-     Output should match the `AnalyzeTable` schema.")
-
   (can-connect? ^Boolean [this, ^java.util.Map details-map]
     "Check whether we can connect to a `Database` with DETAILS-MAP and perform a simple query. For example, a SQL database might
      try running a query like `SELECT 1;`. This function should return `true` or `false`.")
@@ -68,16 +63,16 @@
   (describe-database ^java.util.Map [this, ^DatabaseInstance database]
     "Return a map containing information that describes all of the schema settings in DATABASE, most notably a set of tables.
      It is expected that this function will be peformant and avoid draining meaningful resources of the database.
-     Results should match the `DescribeDatabase` schema.")
+     Results should match the `DatabaseMetadata` schema.")
 
-  (describe-table ^java.util.Map [this, ^DatabaseInstance database, ^java.util.Map table]
+  (describe-table ^java.util.Map [this, ^DatabaseInstance database, ^TableInstance table]
     "Return a map containing information that describes the physical schema of TABLE.
      It is expected that this function will be peformant and avoid draining meaningful resources of the database.
-     Results should match the `DescribeTable` schema.")
+     Results should match the `TableMetadata` schema.")
 
-  (describe-table-fks ^java.util.Set [this, ^DatabaseInstance database, ^java.util.Map table]
+  (describe-table-fks ^java.util.Set [this, ^DatabaseInstance database, ^TableInstance table]
     "*OPTIONAL*, BUT REQUIRED FOR DRIVERS THAT SUPPORT `:foreign-keys`*
-     Results should match the `DescribeTableFKs` schema.")
+     Results should match the `FKMetadata` schema.")
 
   (details-fields ^clojure.lang.Sequential [this]
     "A vector of maps that contain information about connection properties that should
@@ -198,51 +193,14 @@
 
   (table-rows-seq ^clojure.lang.Sequential [this, ^DatabaseInstance database, ^java.util.Map table]
     "*OPTIONAL*. Return a sequence of *all* the rows in a given TABLE, which is guaranteed to have at least `:name` and `:schema` keys.
+     (It is guaranteed too satisfy the `DatabaseMetadataTable` schema in `metabase.sync.interface`.)
      Currently, this is only used for iterating over the values in a `_metabase_metadata` table. As such, the results are not expected to be returned lazily.
      There is no expectation that the results be returned in any given order."))
 
 
-(defn- percent-valid-urls
-  "Recursively count the values of non-nil values in VS that are valid URLs, and return it as a percentage."
-  [vs]
-  (loop [valid-count 0, non-nil-count 0, [v & more :as vs] vs]
-    (cond (not (seq vs)) (if (zero? non-nil-count) 0.0
-                             (float (/ valid-count non-nil-count)))
-          (nil? v)       (recur valid-count non-nil-count more)
-          :else          (let [valid? (and (string? v)
-                                           (u/is-url? v))]
-                           (recur (if valid? (inc valid-count) valid-count)
-                                  (inc non-nil-count)
-                                  more)))))
-
-(defn default-field-percent-urls
-  "Default implementation for optional driver fn `field-percent-urls` that calculates percentage in Clojure-land."
-  [driver field]
-  (->> (field-values-lazy-seq driver field)
-       (filter identity)
-       (take max-sync-lazy-seq-results)
-       percent-valid-urls))
-
-(defn default-field-avg-length
-  "Default implementation of optional driver fn `field-avg-length` that calculates the average length in Clojure-land via `field-values-lazy-seq`."
-  [driver field]
-  (let [field-values        (->> (field-values-lazy-seq driver field)
-                                 (filter identity)
-                                 (take max-sync-lazy-seq-results))
-        field-values-count (count field-values)]
-    (if (zero? field-values-count)
-      0
-      (int (math/round (/ (->> field-values
-                               (map str)
-                               (map count)
-                               (reduce +))
-                          field-values-count))))))
-
-
 (def IDriverDefaultsMixin
   "Default implementations of `IDriver` methods marked *OPTIONAL*."
-  {:analyze-table                     (constantly nil)
-   :date-interval                     (u/drop-first-arg u/relative-date)
+  {:date-interval                     (u/drop-first-arg u/relative-date)
    :describe-table-fks                (constantly nil)
    :features                          (constantly nil)
    :format-custom-field-name          (u/drop-first-arg identity)
@@ -278,16 +236,19 @@
                  :features       (features driver)})
               @registered-drivers))
 
+(defn- init-driver-in-namespace! [ns-symb]
+  (require ns-symb)
+  (if-let [register-driver-fn (ns-resolve ns-symb (symbol "-init-driver"))]
+    (register-driver-fn)
+    (log/warn (format "No -init-driver function found for '%s'" (name ns-symb)))))
+
 (defn find-and-load-drivers!
   "Search Classpath for namespaces that start with `metabase.driver.`, then `require` them and look for the `driver-init`
    function which provides a uniform way for Driver initialization to be done."
   []
   (doseq [ns-symb @u/metabase-namespace-symbols
           :when   (re-matches #"^metabase\.driver\.[a-z0-9_]+$" (name ns-symb))]
-    (require ns-symb)
-    (if-let [register-driver-fn (ns-resolve ns-symb (symbol "-init-driver"))]
-      (register-driver-fn)
-      (log/warn (format "No -init-driver function found for '%s'" (name ns-symb))))))
+    (init-driver-in-namespace! ns-symb)))
 
 (defn is-engine?
   "Is ENGINE a valid driver name?"
@@ -351,8 +312,9 @@
   [engine]
   {:pre [engine]}
   (or ((keyword engine) @registered-drivers)
-      (let [namespce (symbol (format "metabase.driver.%s" (name engine)))]
-        (u/ignore-exceptions (require namespce))
+      (let [namespace-symb (symbol (format "metabase.driver.%s" (name engine)))]
+        ;; TODO - Maybe this should throw the Exception instead of swallowing it?
+        (u/ignore-exceptions (init-driver-in-namespace! namespace-symb))
         ((keyword engine) @registered-drivers))))
 
 
@@ -368,6 +330,15 @@
       (when-let [engine (db-id->engine db-id)]
         (engine->driver engine)))))
 
+(defn ->driver
+  "Return an appropraiate driver for ENGINE-OR-DATABASE-OR-DB-ID.
+   Offered since this is somewhat more flexible in the arguments it accepts."
+  ;; TODO - we should make `engine->driver` and `database-id->driver` private and just use this for everything
+  [engine-or-database-or-db-id]
+  (if (keyword? engine-or-database-or-db-id)
+    (engine->driver engine-or-database-or-db-id)
+    (database-id->driver (u/get-id engine-or-database-or-db-id))))
+
 
 ;; ## Implementation-Agnostic Driver API
 
diff --git a/src/metabase/driver/bigquery.clj b/src/metabase/driver/bigquery.clj
index 9e3d556d811790956412e5e5333c42bf0f681ce6..d63e228bb82e28ab6a2acf1c49bf2196a9ea6e59 100644
--- a/src/metabase/driver/bigquery.clj
+++ b/src/metabase/driver/bigquery.clj
@@ -21,7 +21,6 @@
              [field :as field]
              [table :as table]]
             [metabase.query-processor.util :as qputil]
-            [metabase.sync-database.analyze :as analyze]
             [metabase.util.honeysql-extensions :as hx]
             [toucan.db :as db])
   (:import com.google.api.client.googleapis.auth.oauth2.GoogleCredential
@@ -470,8 +469,7 @@
 
   driver/IDriver
   (merge driver/IDriverDefaultsMixin
-         {:analyze-table            analyze/generic-analyze-table
-          :can-connect?             (u/drop-first-arg can-connect?)
+         {:can-connect?             (u/drop-first-arg can-connect?)
           :date-interval            (u/drop-first-arg (comp prepare-value u/relative-date))
           :describe-database        (u/drop-first-arg describe-database)
           :describe-table           (u/drop-first-arg describe-table)
@@ -503,7 +501,8 @@
           :features                 (constantly (set/union #{:basic-aggregations
                                                              :standard-deviation-aggregations
                                                              :native-parameters
-                                                             :expression-aggregations}
+                                                             :expression-aggregations
+                                                             :binning}
                                                            (when-not config/is-test?
                                                              ;; during unit tests don't treat bigquery as having FK support
                                                              #{:foreign-keys})))
diff --git a/src/metabase/driver/druid.clj b/src/metabase/driver/druid.clj
index 1570bfeadba3d0dc9e81f23de74065a8bdf4d2ac..297290189732ab2d18398cc980c0417491fe1940 100644
--- a/src/metabase/driver/druid.clj
+++ b/src/metabase/driver/druid.clj
@@ -10,7 +10,6 @@
             [metabase.models
              [field :as field]
              [table :as table]]
-            [metabase.sync-database.analyze :as analyze]
             [metabase.util.ssh :as ssh]))
 
 ;;; ### Request helper fns
@@ -144,15 +143,6 @@
                          (field-values-lazy-seq details table-name field-name total-items-fetched paging-identifiers)))))))
 
 
-(defn- analyze-table
-  "Implementation of `analyze-table` for Druid driver."
-  [driver table new-table-ids]
-  ((analyze/make-analyze-table driver
-     :field-avg-length-fn   (constantly 0) ; TODO implement this?
-     :field-percent-urls-fn (constantly 0)
-     :calculate-row-count?  false) driver table new-table-ids))
-
-
 ;;; ### DruidrDriver Class Definition
 
 (defrecord DruidDriver []
@@ -163,7 +153,6 @@
   driver/IDriver
   (merge driver/IDriverDefaultsMixin
          {:can-connect?          (u/drop-first-arg can-connect?)
-          :analyze-table         analyze-table
           :describe-database     (u/drop-first-arg describe-database)
           :describe-table        (u/drop-first-arg describe-table)
           :details-fields        (constantly (ssh/with-tunnel-config
diff --git a/src/metabase/driver/druid/query_processor.clj b/src/metabase/driver/druid/query_processor.clj
index 205bc2a43cc1a93526498cd308ee716e5dae1f49..f72d43e1b3cafe48af4c3f85ee25b6b48eb64e45 100644
--- a/src/metabase/driver/druid/query_processor.clj
+++ b/src/metabase/driver/druid/query_processor.clj
@@ -92,10 +92,10 @@
 
 ;;; ### handle-source-table
 
-(defn- handle-source-table [_ {{source-table-name :name} :source-table} druid-query]
+(defn- handle-source-table [_ {{source-table-name :name} :source-table} query-context]
   {:pre [(or (string? source-table-name)
              (keyword? source-table-name))]}
-  (assoc druid-query :dataSource source-table-name))
+  (assoc-in query-context [:query :dataSource] source-table-name))
 
 
 ;;; ### handle-aggregation
@@ -107,7 +107,7 @@
       (instance? DateTimeField arg)))
 
 (defn- expression->field-names [{:keys [args]}]
-  {:post [(every? u/string-or-keyword? %)]}
+  {:post [(every? (some-fn keyword? string?) %)]}
   (flatten (for [arg   args
                  :when (or (field? arg)
                            (instance? Expression arg))]
@@ -216,36 +216,43 @@
   ([field output-name] (ag:filtered (filter:not (filter:nil? field))
                                     (ag:count output-name))))
 
-
-(defn- handle-aggregation [query-type {ag-type :aggregation-type, ag-field :field, output-name :output-name, custom-name :custom-name, :as ag} druid-query]
+(defn- create-aggregation-clause [output-name ag-type ag-field]
+  (let [output-name-kwd (keyword output-name)]
+    (match [ag-type ag-field]
+      ;; For 'distinct values' queries (queries with a breakout by no aggregation) just aggregate by count, but name it :___count so it gets discarded automatically
+      [nil     nil] [[(or output-name-kwd :___count)] {:aggregations [(ag:count (or output-name :___count))]}]
+
+      [:count  nil] [[(or output-name-kwd :count)] {:aggregations [(ag:count (or output-name :count))]}]
+
+      [:count    _] [[(or output-name-kwd :count)] {:aggregations [(ag:count ag-field (or output-name :count))]}]
+
+      [:avg      _] (let [count-name (name (gensym "___count_"))
+                          sum-name   (name (gensym "___sum_"))]
+                      [[(keyword count-name) (keyword sum-name) (or output-name-kwd :avg)]
+                       {:aggregations     [(ag:count ag-field count-name)
+                                           (ag:doubleSum ag-field sum-name)]
+                        :postAggregations [{:type   :arithmetic
+                                            :name   (or output-name :avg)
+                                            :fn     :/
+                                            :fields [{:type :fieldAccess, :fieldName sum-name}
+                                                     {:type :fieldAccess, :fieldName count-name}]}]}])
+      [:distinct _] [[(or output-name-kwd :distinct___count)]
+                     {:aggregations [{:type       :cardinality
+                                      :name       (or output-name :distinct___count)
+                                      :fieldNames [(->rvalue ag-field)]}]}]
+      [:sum      _] [[(or output-name-kwd :sum)] {:aggregations [(ag:doubleSum ag-field (or output-name :sum))]}]
+      [:min      _] [[(or output-name-kwd :min)] {:aggregations [(ag:doubleMin ag-field (or output-name :min))]}]
+      [:max      _] [[(or output-name-kwd :max)] {:aggregations [(ag:doubleMax ag-field (or output-name :max))]}])))
+
+(defn- handle-aggregation [query-type {ag-type :aggregation-type, ag-field :field, output-name :output-name, custom-name :custom-name, :as ag} query-context]
   (let [output-name (or custom-name output-name)]
-    (when (isa? query-type ::ag-query)
-      (merge-with concat
-        druid-query
-        (let [ag-type (when-not (= ag-type :rows) ag-type)]
-          (match [ag-type ag-field]
-            ;; For 'distinct values' queries (queries with a breakout by no aggregation) just aggregate by count, but name it :___count so it gets discarded automatically
-            [nil     nil] {:aggregations [(ag:count (or output-name :___count))]}
-
-            [:count  nil] {:aggregations [(ag:count (or output-name :count))]}
-
-            [:count    _] {:aggregations [(ag:count ag-field (or output-name :count))]}
-
-            [:avg      _] (let [count-name (name (gensym "___count_"))
-                                sum-name   (name (gensym "___sum_"))]
-                            {:aggregations     [(ag:count ag-field count-name)
-                                                (ag:doubleSum ag-field sum-name)]
-                             :postAggregations [{:type   :arithmetic
-                                                 :name   (or output-name :avg)
-                                                 :fn     :/
-                                                 :fields [{:type :fieldAccess, :fieldName sum-name}
-                                                          {:type :fieldAccess, :fieldName count-name}]}]})
-            [:distinct _] {:aggregations [{:type       :cardinality
-                                           :name       (or output-name :distinct___count)
-                                           :fieldNames [(->rvalue ag-field)]}]}
-            [:sum      _] {:aggregations [(ag:doubleSum ag-field (or output-name :sum))]}
-            [:min      _] {:aggregations [(ag:doubleMin ag-field (or output-name :min))]}
-            [:max      _] {:aggregations [(ag:doubleMax ag-field (or output-name :max))]}))))))
+    (if-not (isa? query-type ::ag-query)
+      query-context
+      (let [ag-type (when-not (= ag-type :rows) ag-type)
+            [projections ag-clauses] (create-aggregation-clause output-name ag-type ag-field)]
+        (-> query-context
+            (update :projections #(vec (concat % projections)))
+            (update :query #(merge-with concat % ag-clauses)))))))
 
 (defn- add-expression-aggregation-output-names [args]
   (for [arg args]
@@ -276,19 +283,20 @@
                     (expression->actual-ags arg)
                     [arg]))))
 
-(defn- handle-expression-aggregation [query-type {:keys [operator args], :as expression} druid-query]
+(defn- handle-expression-aggregation [query-type {:keys [operator args], :as expression} query-context]
   ;; filter out constants from the args list
-  (let [expression  (update expression :args add-expression-aggregation-output-names)
-        ags         (expression->actual-ags expression)
-        druid-query (handle-aggregations query-type {:aggregation ags} druid-query)]
-    (merge-with concat
-      druid-query
-      {:postAggregations [(expression-post-aggregation expression)]})))
-
-(defn- handle-aggregations [query-type {aggregations :aggregation} druid-query]
-  (loop [[ag & more] aggregations, query druid-query]
+  (let [expression    (update expression :args add-expression-aggregation-output-names)
+        ags           (expression->actual-ags expression)
+        query-context (handle-aggregations query-type {:aggregation ags} query-context)
+        post-agg      (expression-post-aggregation expression)]
+    (-> query-context
+        (update :projections conj (keyword (:name post-agg)))
+        (update :query #(merge-with concat % {:postAggregations [post-agg]})))))
+
+(defn- handle-aggregations [query-type {aggregations :aggregation} query-context]
+  (loop [[ag & more] aggregations, query query-context]
     (if (instance? Expression ag)
-      (handle-expression-aggregation query-type ag druid-query)
+      (handle-expression-aggregation query-type ag query-context)
       (let [query (handle-aggregation query-type ag query)]
         (if-not (seq more)
           query
@@ -397,16 +405,31 @@
 
 (defmulti ^:private handle-breakout query-type-dispatch-fn)
 
-(defmethod handle-breakout ::query [_ _ _]) ; only topN , grouped-timeseries & groupBy handle breakouts
-
-(defmethod handle-breakout ::grouped-timeseries [_ {[breakout-field] :breakout} druid-query]
-  (assoc druid-query :granularity (unit->granularity (:unit breakout-field))))
-
-(defmethod handle-breakout ::topN [_ {[breakout-field] :breakout} druid-query]
-  (assoc druid-query :dimension (->dimension-rvalue breakout-field)))
-
-(defmethod handle-breakout ::groupBy [_ {breakout-fields :breakout} druid-query]
-  (assoc druid-query :dimensions (mapv ->dimension-rvalue breakout-fields)))
+(defmethod handle-breakout ::query [_ _ query-context] ; only topN , grouped-timeseries & groupBy handle breakouts
+  query-context)
+
+(defmethod handle-breakout ::grouped-timeseries [_ {[breakout-field] :breakout} query-context]
+  (assoc-in query-context [:query :granularity] (unit->granularity (:unit breakout-field))))
+
+(defmethod handle-breakout ::topN [_ {[breakout-field] :breakout} query-context]
+  (let [dim-rvalue (->dimension-rvalue breakout-field)]
+    (-> query-context
+        (update :projections conj (keyword (if (and (map? dim-rvalue)
+                                                    (contains? dim-rvalue :outputName))
+                                             (:outputName dim-rvalue)
+                                             (name breakout-field))))
+        (assoc-in [:query :dimension] dim-rvalue))))
+
+(defmethod handle-breakout ::groupBy [_ {breakout-fields :breakout} query-context]
+  (-> query-context
+      (update :projections into (map (fn [breakout-field]
+                                       (let [dim-rvalue (->dimension-rvalue breakout-field)]
+                                         (keyword (if (and (map? dim-rvalue)
+                                                           (contains? dim-rvalue :outputName))
+                                                    (:outputName dim-rvalue)
+                                                    (name breakout-field)))))
+                                     breakout-fields))
+      (assoc-in [:query :dimensions] (mapv ->dimension-rvalue breakout-fields))))
 
 
 ;;; ### handle-filter
@@ -544,103 +567,113 @@
           :not (log/warn (u/format-color 'red "WARNING: Don't know how to negate: %s" clause)))))))
 
 
-(defn- handle-filter [_ {filter-clause :filter} druid-query]
-  (when filter-clause
+(defn- handle-filter [_ {filter-clause :filter} query-context]
+  (if-not filter-clause
+    query-context
     (let [filter    (parse-filter-clause:filter    filter-clause)
           intervals (parse-filter-clause:intervals filter-clause)]
-      (cond-> druid-query
-        (seq filter)    (assoc :filter filter)
-        (seq intervals) (assoc :intervals intervals)))))
+      (cond-> query-context
+        (seq filter)    (assoc-in [:query :filter] filter)
+        (seq intervals) (assoc-in [:query :intervals] intervals)))))
 
 
 ;;; ### handle-order-by
 
 (defmulti ^:private handle-order-by query-type-dispatch-fn)
 
-(defmethod handle-order-by ::query [_ _ _]
-  (log/warn (u/format-color 'red "Sorting with Druid is only allowed in queries that have one or more breakout columns. Ignoring :order-by clause.")))
+(defmethod handle-order-by ::query [_ _ query-context]
+  (log/warn (u/format-color 'red "Sorting with Druid is only allowed in queries that have one or more breakout columns. Ignoring :order-by clause."))
+  query-context)
 
 
-(defmethod handle-order-by ::topN [_ {[{ag-type :aggregation-type}] :aggregation, [breakout-field] :breakout, [{field :field, direction :direction}] :order-by} druid-query]
+(defmethod handle-order-by ::topN [_ {[{ag-type :aggregation-type}] :aggregation, [breakout-field] :breakout, [{field :field, direction :direction}] :order-by} query-context]
   (let [field             (->rvalue field)
         breakout-field    (->rvalue breakout-field)
         sort-by-breakout? (= field breakout-field)
         ag-field          (if (= ag-type :distinct) :distinct___count ag-type)]
-    (assoc druid-query :metric (match [sort-by-breakout? direction]
-                                 [true  :ascending]  {:type :alphaNumeric}
-                                 [true  :descending] {:type :inverted, :metric {:type :alphaNumeric}}
-                                 [false :ascending]  {:type :inverted, :metric ag-field}
-                                 [false :descending] ag-field))))
+    (assoc-in query-context [:query :metric] (match [sort-by-breakout? direction]
+                                               [true  :ascending]  {:type :alphaNumeric}
+                                               [true  :descending] {:type :inverted, :metric {:type :alphaNumeric}}
+                                               [false :ascending]  {:type :inverted, :metric ag-field}
+                                               [false :descending] ag-field))))
 
-(defmethod handle-order-by ::groupBy [_ {:keys [order-by]} druid-query]
-  (assoc-in druid-query [:limitSpec :columns] (vec (for [{:keys [field direction]} order-by]
-                                                     {:dimension (->rvalue field)
-                                                      :direction direction}))))
+(defmethod handle-order-by ::groupBy [_ {:keys [order-by]} query-context]
+  (assoc-in query-context [:query :limitSpec :columns] (vec (for [{:keys [field direction]} order-by]
+                                                              {:dimension (->rvalue field)
+                                                               :direction direction}))))
 
 ;; Handle order by timstamp field
-(defn- handle-order-by-timestamp [field direction druid-query]
-  (assoc druid-query :descending (and (instance? DateTimeField field)
-                                      (= direction :descending))))
+(defn- handle-order-by-timestamp [field direction query-context]
+  (assoc-in query-context [:query :descending] (and (instance? DateTimeField field)
+                                                    (= direction :descending))))
 
-(defmethod handle-order-by ::grouped-timeseries [_ {[{field :field, direction :direction}] :order-by} druid-query]
-  (handle-order-by-timestamp field direction druid-query))
+(defmethod handle-order-by ::grouped-timeseries [_ {[{field :field, direction :direction}] :order-by} query-context]
+  (handle-order-by-timestamp field direction query-context))
 
-(defmethod handle-order-by ::select [_ {[{field :field, direction :direction}] :order-by} druid-query]
-  (handle-order-by-timestamp field direction druid-query))
+(defmethod handle-order-by ::select [_ {[{field :field, direction :direction}] :order-by} query-context]
+  (handle-order-by-timestamp field direction query-context))
 
 ;;; ### handle-fields
 
 (defmulti ^:private handle-fields query-type-dispatch-fn)
 
-(defmethod handle-fields ::query [_ {fields :fields} _]
+(defmethod handle-fields ::query [_ {fields :fields} query-context]
   (when fields
-    (log/warn (u/format-color 'red "WARNING: It only makes sense to specify :fields for a bare rows query. Ignoring the clause."))))
+    (log/warn (u/format-color 'red "WARNING: It only makes sense to specify :fields for a bare rows query. Ignoring the clause.")))
+  query-context)
 
-(defmethod handle-fields ::select [_ {fields :fields} druid-query]
-  (when (seq fields)
-    (loop [dimensions [], metrics [], [field & more] fields]
+(defmethod handle-fields ::select [_ {fields :fields} query-context]
+  (if-not (seq fields)
+    query-context
+    (loop [dimensions [], metrics [], projections (:projections query-context), [field & more] fields]
       (cond
         ;; If you specify nil or empty `:dimensions` or `:metrics` Druid will just return all of the ones available. In cases where we don't
         ;; want anything to be returned in one or the other, we'll ask for a `:___dummy` column instead. Druid happily returns `nil` for the
         ;; column in every row, and it will get auto-filtered out of the results so the User will never see it.
-        (not field)                                 (assoc druid-query
-                                                           :dimensions (or (seq dimensions) [:___dummy])
-                                                           :metrics    (or (seq metrics)    [:___dummy]))
-        (instance? DateTimeField field)             (recur dimensions metrics more)
-        (= (dimension-or-metric? field) :dimension) (recur (conj dimensions (->rvalue field)) metrics more)
-        (= (dimension-or-metric? field) :metric)    (recur dimensions (conj metrics (->rvalue field)) more)))))
+        (not field)                                 (-> query-context
+                                                        (assoc :projections (conj projections :timestamp))
+                                                        (assoc-in [:query :dimensions] (or (seq dimensions) [:___dummy]))
+                                                        (assoc-in [:query :metrics]    (or (seq metrics)    [:___dummy])))
+        (instance? DateTimeField field)             (recur dimensions metrics projections more)
+        (= (dimension-or-metric? field) :dimension) (recur (conj dimensions (->rvalue field)) metrics (conj projections (keyword (name field))) more)
+        (= (dimension-or-metric? field) :metric)    (recur dimensions (conj metrics (->rvalue field)) (conj projections (keyword (name field))) more)))))
 
 
 ;;; ### handle-limit
 
 (defmulti ^:private handle-limit query-type-dispatch-fn)
 
-(defmethod handle-limit ::select [_ {limit :limit} druid-query]
-  (when limit
-    (assoc-in druid-query [:pagingSpec :threshold] limit)))
+(defmethod handle-limit ::select [_ {limit :limit} query-context]
+  (if-not limit
+    query-context
+    (assoc-in query-context [:query :pagingSpec :threshold] limit)))
 
-(defmethod handle-limit ::timeseries [_ {limit :limit} _]
+(defmethod handle-limit ::timeseries [_ {limit :limit} query-context]
   (when limit
-    (log/warn (u/format-color 'red "WARNING: Druid doenst allow limitSpec in timeseries queries. Ignoring the LIMIT clause."))))
+    (log/warn (u/format-color 'red "WARNING: Druid doenst allow limitSpec in timeseries queries. Ignoring the LIMIT clause.")))
+  query-context)
 
-(defmethod handle-limit ::topN [_ {limit :limit} druid-query]
-  (when limit
-    (assoc druid-query :threshold limit)))
+(defmethod handle-limit ::topN [_ {limit :limit} query-context]
+  (if-not limit
+    query-context
+    (assoc-in query-context [:query :threshold] limit)))
 
-(defmethod handle-limit ::groupBy [_ {limit :limit} druid-query]
-  (when limit
-    (-> druid-query
-        (assoc-in [:limitSpec :type]  :default)
-        (assoc-in [:limitSpec :limit] limit))))
+(defmethod handle-limit ::groupBy [_ {limit :limit} query-context]
+  (if-not limit
+    query-context
+    (-> query-context
+        (assoc-in [:query :limitSpec :type]  :default)
+        (assoc-in [:query :limitSpec :limit] limit))))
 
 
 ;;; ### handle-page TODO - no real way to implement this DB side, probably have to do Clojure-side w/ `take`/`drop`
 
 (defmulti ^:private handle-page query-type-dispatch-fn)
 
-(defmethod handle-page ::query [_ {page-clause :page} druid-query]
+(defmethod handle-page ::query [_ {page-clause :page} query-context]
   (when page-clause
-    (log/warn (u/format-color 'red "WARNING: 'page' is not yet implemented."))))
+    (log/warn (u/format-color 'red "WARNING: 'page' is not yet implemented.")))
+  query-context)
 
 
 ;;; ## Build + Log + Process Query
@@ -670,20 +703,17 @@
 (defn- build-druid-query [query]
   {:pre [(map? query)]}
   (let [query-type (druid-query-type query)]
-    (loop [druid-query (query-type->default-query query-type), [f & more] [handle-source-table
-                                                                           handle-aggregations
-                                                                           handle-breakout
-                                                                           handle-filter
-                                                                           handle-order-by
-                                                                           handle-fields
-                                                                           handle-limit
-                                                                           handle-page]]
-      (let [druid-query (or (f query-type query druid-query)
-                            druid-query)]
-        (if (seq more)
-          (recur druid-query more)
-          ;; Return pair of [query-type druid-query]
-          [query-type druid-query])))))
+    (reduce (fn [query-context f]
+              (f query-type query query-context))
+            {:projections [] :query (query-type->default-query query-type) :query-type query-type :mbql? true}
+            [handle-source-table
+             handle-aggregations
+             handle-breakout
+             handle-filter
+             handle-order-by
+             handle-fields
+             handle-limit
+             handle-page])))
 
 
 ;;;  ### post-processing
@@ -707,18 +737,15 @@
   (post-process (keyword "metabase.driver.druid.query-processor" (name queryType))
                 results))
 
-
 (defn- remove-bonus-keys
   "Remove keys that start with `___` from the results -- they were temporary, and we don't want to return them."
-  [[first-row :as results]]
-  (let [keys-to-remove (for [k     (keys first-row)
+  [columns]
+  (let [keys-to-remove (for [k     columns
                              :when (re-find #"^___" (name k))]
                          k)]
     (if-not (seq keys-to-remove)
-      results
-      (for [result results]
-        (apply dissoc result keys-to-remove)))))
-
+      columns
+      (filterv (complement (set keys-to-remove)) columns))))
 
 ;;; ### MBQL Processor
 
@@ -729,9 +756,7 @@
   (let [mbql-query (assoc (:query query)
                      :settings (:settings query))]
     (binding [*query* mbql-query]
-      (let [[query-type druid-query] (build-druid-query mbql-query)]
-        {:query      druid-query
-         :query-type query-type}))))
+      (build-druid-query mbql-query))))
 
 
 (defn- columns->getter-fns
@@ -751,17 +776,21 @@
 
 (defn execute-query
   "Execute a query for a Druid DB."
-  [do-query {database :database, {:keys [query query-type mbql?]} :native}]
+  [do-query {database :database, {:keys [query query-type mbql? projections]} :native}]
   {:pre [database query]}
   (let [details    (:details database)
         query      (if (string? query)
                      (json/parse-string query keyword)
                      query)
         query-type (or query-type (keyword "metabase.driver.druid.query-processor" (name (:queryType query))))
-        results    (->> (do-query details query)
-                        (post-process query-type)
-                        remove-bonus-keys)
-        columns    (keys (first results))
+        results     (->> query
+                         (do-query details)
+                         (post-process query-type))
+        columns    (if mbql?
+                     (->> projections
+                          remove-bonus-keys
+                          vec)
+                     (keys (first results)))
         getters    (columns->getter-fns columns)]
     ;; rename any occurances of `:timestamp___int` to `:timestamp` in the results so the user doesn't know about our behind-the-scenes conversion
     ;; and apply any other post-processing on the value such as parsing some units to int and rounding up approximate cardinality values.
diff --git a/src/metabase/driver/generic_sql.clj b/src/metabase/driver/generic_sql.clj
index eadfb16ddd560e89c2359bd327b43fd7edbf9357..d446edee7e2e9de03422eff4fb3a1e13e3390ab2 100644
--- a/src/metabase/driver/generic_sql.clj
+++ b/src/metabase/driver/generic_sql.clj
@@ -15,7 +15,6 @@
             [metabase.models
              [field :as field]
              [table :as table]]
-            [metabase.sync-database.analyze :as analyze]
             [metabase.util
              [honeysql-extensions :as hx]
              [ssh :as ssh]])
@@ -75,11 +74,6 @@
      Other drivers like BigQuery need to do additional qualification, e.g. the dataset name as well.
      (At the time of this writing, this is only used by the SQL parameters implementation; in the future it will probably be used in more places as well.)")
 
-  (field-percent-urls [this field]
-    "*OPTIONAL*. Implementation of the `:field-percent-urls-fn` to be passed to `make-analyze-table`.
-     The default implementation is `fast-field-percent-urls`, which avoids a full table scan. Substitue this with `slow-field-percent-urls` for databases
-     where this doesn't work, such as SQL Server.")
-
   (field->alias ^String [this, ^Field field]
     "*OPTIONAL*. Return the alias that should be used to for FIELD, i.e. in an `AS` clause. The default implementation calls `name`, which
      returns the *unqualified* name of `Field`.
@@ -142,7 +136,7 @@
 (defn- create-connection-pool
   "Create a new C3P0 `ComboPooledDataSource` for connecting to the given DATABASE."
   [{:keys [id engine details]}]
-  (log/debug (u/format-color 'magenta "Creating new connection pool for database %d ..." id))
+  (log/debug (u/format-color 'cyan "Creating new connection pool for database %d ..." id))
   (let [details-with-tunnel (ssh/include-ssh-tunnel details) ;; If the tunnel is disabled this returned unchanged
         spec (connection-details->spec (driver/engine->driver engine) details-with-tunnel)]
     (assoc (db/connection-pool (assoc spec
@@ -281,54 +275,6 @@
 (defn- table-rows-seq [driver database table]
   (query driver database table {:select [:*]}))
 
-(defn- field-avg-length [driver field]
-  (let [table (field/table field)
-        db    (table/database table)]
-    (or (some-> (query driver db table {:select [[(hsql/call :avg (string-length-fn driver (qualify+escape table field))) :len]]})
-                first
-                :len
-                math/round
-                int)
-        0)))
-
-(defn- url-percentage [url-count total-count]
-  (double (if (and total-count (pos? total-count) url-count)
-            ;; make sure to coerce to Double before dividing because if it's a BigDecimal division can fail for non-terminating floating-point numbers
-            (/ (double url-count)
-               (double total-count))
-            0.0)))
-
-;; TODO - Full table scan!?! Maybe just fetch first N non-nil values and do in Clojure-land instead
-(defn slow-field-percent-urls
-  "Slow implementation of `field-percent-urls` that (probably) requires a full table scan.
-   Only use this for DBs where `fast-field-percent-urls` doesn't work correctly, like SQLServer."
-  [driver field]
-  (let [table       (field/table field)
-        db          (table/database table)
-        field-k     (qualify+escape table field)
-        total-count (:count (first (query driver db table {:select [[:%count.* :count]]
-                                                           :where  [:not= field-k nil]})))
-        url-count   (:count (first (query driver db table {:select [[:%count.* :count]]
-                                                           :where  [:like field-k (hx/literal "http%://_%.__%")]})))]
-    (url-percentage url-count total-count)))
-
-
-(defn fast-field-percent-urls
-  "Fast, default implementation of `field-percent-urls` that avoids a full table scan."
-  [driver field]
-  (let [table       (field/table field)
-        db          (table/database table)
-        field-k     (qualify+escape table field)
-        pk-field    (field/Field (table/pk-field-id table))
-        results     (map :is_url (query driver db table (merge {:select [[(hsql/call :like field-k (hx/literal "http%://_%.__%")) :is_url]]
-                                                                :where  [:not= field-k nil]
-                                                                :limit  driver/max-sync-lazy-seq-results}
-                                                               (when pk-field
-                                                                 {:order-by [[(qualify+escape table pk-field) :asc]]}))))
-        total-count (count results)
-        url-count   (count (filter #(or (true? %) (= % 1)) results))]
-    (url-percentage url-count total-count)))
-
 
 (defn features
   "Default implementation of `IDriver` `features` for SQL drivers."
@@ -339,7 +285,8 @@
             :expressions
             :expression-aggregations
             :native-parameters
-            :nested-queries}
+            :nested-queries
+            :binning}
     (set-timezone-sql driver) (conj :set-timezone)))
 
 
@@ -427,17 +374,6 @@
             :dest-column-name (:pkcolumn_name result)}))))
 
 
-(defn analyze-table
-  "Default implementation of `analyze-table` for SQL drivers."
-  [driver table new-table-ids]
-  ((analyze/make-analyze-table driver
-     :field-avg-length-fn   (partial field-avg-length driver)
-     :field-percent-urls-fn (partial field-percent-urls driver))
-   driver
-   table
-   new-table-ids))
-
-
 (defn ISQLDriverDefaultsMixin
   "Default implementations for methods in `ISQLDriver`."
   []
@@ -456,7 +392,6 @@
    :excluded-schemas     (constantly nil)
    :field->identifier    (u/drop-first-arg (comp (partial apply hsql/qualify) field/qualified-name-components))
    :field->alias         (u/drop-first-arg name)
-   :field-percent-urls   fast-field-percent-urls
    :prepare-sql-param    (u/drop-first-arg identity)
    :prepare-value        (u/drop-first-arg :value)
    :quote-style          (constantly :ansi)
@@ -469,8 +404,7 @@
   []
   (require 'metabase.driver.generic-sql.query-processor)
   (merge driver/IDriverDefaultsMixin
-         {:analyze-table           analyze-table
-          :can-connect?            can-connect?
+         {:can-connect?            can-connect?
           :describe-database       describe-database
           :describe-table          describe-table
           :describe-table-fks      describe-table-fks
diff --git a/src/metabase/driver/generic_sql/query_processor.clj b/src/metabase/driver/generic_sql/query_processor.clj
index cdb3391ed95ef699ea7799509dc72c978f95d4ed..8f1876938a95807418f2ab99feed9bd89268ccbd 100644
--- a/src/metabase/driver/generic_sql/query_processor.clj
+++ b/src/metabase/driver/generic_sql/query_processor.clj
@@ -18,7 +18,7 @@
             [metabase.util.honeysql-extensions :as hx])
   (:import clojure.lang.Keyword
            java.sql.SQLException
-           [metabase.query_processor.interface AgFieldRef DateTimeField DateTimeValue Expression ExpressionRef Field FieldLiteral RelativeDateTimeValue Value]))
+           [metabase.query_processor.interface AgFieldRef BinnedField DateTimeField DateTimeValue Expression ExpressionRef Field FieldLiteral RelativeDateTimeValue Value]))
 
 (def ^:dynamic *query*
   "The outer query currently being processed."
@@ -41,11 +41,18 @@
 
 ;;; ## Formatting
 
+(defn- qualified-alias
+  "Convert the given `FIELD` to a stringified alias"
+  [field]
+  (some->> field
+           (sql/field->alias (driver))
+           hx/qualify-and-escape-dots))
+
 (defn as
   "Generate a FORM `AS` FIELD alias using the name information of FIELD."
   [form field]
-  (if-let [alias (sql/field->alias (driver) field)]
-    [form (hx/qualify-and-escape-dots alias)]
+  (if-let [alias (qualified-alias field)]
+    [form alias]
     form))
 
 ;; TODO - Consider moving this into query processor interface and making it a method on `ExpressionRef` instead ?
@@ -104,6 +111,21 @@
   (formatted [{unit :unit, field :field}]
     (sql/date (driver) unit (formatted field)))
 
+  BinnedField
+  (formatted [{:keys [bin-width min-value max-value field]}]
+    (let [formatted-field (formatted field)]
+      ;;
+      ;; Equation is | (value - min) |
+      ;;             | ------------- | * bin-width + min-value
+      ;;             |_  bin-width  _|
+      ;;
+      (-> formatted-field
+          (hx/- min-value)
+          (hx// bin-width)
+          hx/floor
+          (hx/* bin-width)
+          (hx/+ min-value))))
+
   ;; e.g. the ["aggregation" 0] fields we allow in order-by
   AgFieldRef
   (formatted [{index :index}]
@@ -181,17 +203,14 @@
         form
         (recur form more)))))
 
-
 (defn apply-breakout
   "Apply a `breakout` clause to HONEYSQL-FORM. Default implementation of `apply-breakout` for SQL drivers."
-  [_ honeysql-form {breakout-fields :breakout, fields-fields :fields}]
-  (-> honeysql-form
-      ;; Group by all the breakout fields
-      ((partial apply h/group) (map formatted breakout-fields))
-      ;; Add fields form only for fields that weren't specified in :fields clause -- we don't want to include it twice, or HoneySQL will barf
-      ((partial apply h/merge-select) (for [field breakout-fields
-                                            :when (not (contains? (set fields-fields) field))]
-                                        (as (formatted field) field)))))
+  [_ honeysql-form {breakout-fields :breakout, fields-fields :fields :as query}]
+  (as-> honeysql-form new-hsql
+    (apply h/merge-select new-hsql (for [field breakout-fields
+                                         :when (not (contains? (set fields-fields) field))]
+                                     (as (formatted field) field)))
+    (apply h/group new-hsql (map formatted breakout-fields))))
 
 (defn apply-fields
   "Apply a `fields` clause to HONEYSQL-FORM. Default implementation of `apply-fields` for SQL drivers."
@@ -250,14 +269,15 @@
 
 (defn apply-order-by
   "Apply `order-by` clause to HONEYSQL-FORM. Default implementation of `apply-order-by` for SQL drivers."
-  [_ honeysql-form {subclauses :order-by}]
-  (loop [honeysql-form honeysql-form, [{:keys [field direction]} & more] subclauses]
-    (let [honeysql-form (h/merge-order-by honeysql-form [(formatted field) (case direction
-                                                                             :ascending  :asc
-                                                                             :descending :desc)])]
-      (if (seq more)
-        (recur honeysql-form more)
-        honeysql-form))))
+  [_ honeysql-form {subclauses :order-by breakout-fields :breakout}]
+  (let [[{:keys [special-type] :as first-breakout-field}] breakout-fields]
+    (loop [honeysql-form honeysql-form, [{:keys [field direction]} & more] subclauses]
+      (let [honeysql-form (h/merge-order-by honeysql-form [(formatted field) (case direction
+                                                                               :ascending  :asc
+                                                                               :descending :desc)])]
+        (if (seq more)
+          (recur honeysql-form more)
+          honeysql-form)))))
 
 (defn apply-page
   "Apply `page` clause to HONEYSQL-FORM. Default implementation of `apply-page` for SQL drivers."
diff --git a/src/metabase/driver/googleanalytics/query_processor.clj b/src/metabase/driver/googleanalytics/query_processor.clj
index cc0990c380f4708b0309793958bdfd670b8c1f31..71ab7c07497b74ff89527b6bc66d2972eba2e286 100644
--- a/src/metabase/driver/googleanalytics/query_processor.clj
+++ b/src/metabase/driver/googleanalytics/query_processor.clj
@@ -57,7 +57,7 @@
 ;;; ### source-table
 
 (defn- handle-source-table [{{source-table-name :name} :source-table}]
-  {:pre [(u/string-or-keyword? source-table-name)]}
+  {:pre [((some-fn keyword? string?) source-table-name)]}
   {:ids (str "ga:" source-table-name)})
 
 
@@ -265,7 +265,7 @@
 
 (defn- filter-type ^clojure.lang.Keyword [filter-clause]
   (when (and (sequential? filter-clause)
-             (u/string-or-keyword? (first filter-clause)))
+             ((some-fn keyword? string?) (first filter-clause)))
     (qputil/normalize-token (first filter-clause))))
 
 (defn- compound-filter? [filter-clause]
diff --git a/src/metabase/driver/mongo.clj b/src/metabase/driver/mongo.clj
index 176593d4f20820a750d7d7786e142f6cefbe472f..6ce11eb88c28d0afd906a81dc1a7524e58deb2bd 100644
--- a/src/metabase/driver/mongo.clj
+++ b/src/metabase/driver/mongo.clj
@@ -1,7 +1,7 @@
 (ns metabase.driver.mongo
   "MongoDB Driver."
   (:require [cheshire.core :as json]
-            [clojure.string :as s]
+            [clojure.string :as str]
             [clojure.tools.logging :as log]
             [metabase
              [driver :as driver]
@@ -11,9 +11,8 @@
              [util :refer [*mongo-connection* with-mongo-connection]]]
             [metabase.models
              [database :refer [Database]]
-             [field :as field]
-             [table :as table]]
-            [metabase.sync-database.analyze :as analyze]
+             [field :as field]]
+            [metabase.sync.interface :as si]
             [metabase.util.ssh :as ssh]
             [monger
              [collection :as mc]
@@ -21,6 +20,7 @@
              [conversion :as conv]
              [db :as mdb]
              [query :as mq]]
+            [schema.core :as s]
             [toucan.db :as db])
   (:import com.mongodb.DB))
 
@@ -147,18 +147,7 @@
        :fields (set (for [field (keys parsed-rows)]
                       (describe-table-field field (field parsed-rows))))})))
 
-(defn- analyze-table [table new-field-ids]
-  ;; We only care about 1) table counts and 2) field values
-  {:row_count (analyze/table-row-count table)
-   :fields    (for [{:keys [id] :as field} (table/fields table)
-                    :when (analyze/test-for-cardinality? field (contains? new-field-ids (:id field)))]
-                (analyze/test:cardinality-and-extract-field-values field {:id id}))})
-
-(defn- field-values-lazy-seq [{:keys [qualified-name-components table], :as field}]
-  (assert (and (map? field)
-               (delay? qualified-name-components)
-               (delay? table))
-    (format "Field is missing required information:\n%s" (u/pprint-to-str 'red field)))
+(s/defn ^:private ^:always-validate field-values-lazy-seq [field :- si/FieldInstance]
   (lazy-seq
    (assert *mongo-connection*
      "You must have an open Mongo connection in order to get lazy results with field-values-lazy-seq.")
@@ -166,7 +155,7 @@
          name-components (rest (field/qualified-name-components field))]
      (assert (seq name-components))
      (for [row (mq/with-collection *mongo-connection* (:name table)
-                 (mq/fields [(s/join \. name-components)]))]
+                 (mq/fields [(str/join \. name-components)]))]
        (get-in row (map keyword name-components))))))
 
 
@@ -177,8 +166,7 @@
 (u/strict-extend MongoDriver
   driver/IDriver
   (merge driver/IDriverDefaultsMixin
-         {:analyze-table                     (u/drop-first-arg analyze-table)
-          :can-connect?                      (u/drop-first-arg can-connect?)
+         {:can-connect?                      (u/drop-first-arg can-connect?)
           :describe-database                 (u/drop-first-arg describe-database)
           :describe-table                    (u/drop-first-arg describe-table)
           :details-fields                    (constantly (ssh/with-tunnel-config
diff --git a/src/metabase/driver/mongo/query_processor.clj b/src/metabase/driver/mongo/query_processor.clj
index 4cc67a2dd9fc0f5b7ed3880e00c6de51799cac6b..7eed0a7e421586f2d2c36871de5af6ef24e2ee7d 100644
--- a/src/metabase/driver/mongo/query_processor.clj
+++ b/src/metabase/driver/mongo/query_processor.clj
@@ -197,11 +197,14 @@
 
 ;;; ### initial projection
 
-(defn- add-initial-projection [query pipeline]
+(defn- add-initial-projection [query pipeline-ctx]
   (let [all-fields (distinct (annotate/collect-fields query :keep-date-time-fields))]
-    (when (seq all-fields)
-      {$project (into (array-map) (for [field all-fields]
-                                    {(->lvalue field) (->initial-rvalue field)}))})))
+    (if-not (seq all-fields)
+      pipeline-ctx
+      (let [projections (map #(vector (->lvalue %) (->initial-rvalue %)) all-fields)]
+        (-> pipeline-ctx
+            (assoc  :projections (doall (map (comp keyword first) projections)))
+            (update :query conj {$project (into (hash-map) projections)}))))))
 
 
 ;;; ### filter
@@ -232,9 +235,10 @@
     :not (parse-filter-subclause subclause :negate)
     nil  (parse-filter-subclause clause)))
 
-(defn- handle-filter [{filter-clause :filter} pipeline]
-  (when filter-clause
-    {$match (parse-filter-clause filter-clause)}))
+(defn- handle-filter [{filter-clause :filter} pipeline-ctx]
+  (if-not filter-clause
+    pipeline-ctx
+    (update pipeline-ctx :query conj {$match (parse-filter-clause filter-clause)})))
 
 
 ;;; ### aggregation
@@ -254,69 +258,78 @@
       :min      {$min (->rvalue field)}
       :max      {$max (->rvalue field)})))
 
-(defn- handle-breakout+aggregation [{breakout-fields :breakout, aggregations :aggregation} pipeline]
+(defn- handle-breakout+aggregation [{breakout-fields :breakout, aggregations :aggregation} pipeline-ctx]
   (let [aggregations? (seq aggregations)
         breakout?     (seq breakout-fields)]
-    (when (or aggregations? breakout?)
-      (filter identity
-              [ ;; create a totally sweet made-up column called __group to store the fields we'd like to group by
-               (when breakout?
-                 {$project (merge {"_id"      "$_id"
-                                   "___group" (into {} (for [field breakout-fields]
-                                                         {(->lvalue field) (->rvalue field)}))}
-                                  (into {} (for [{ag-field :field} aggregations
-                                                 :when             ag-field]
-                                             {(->lvalue ag-field) (->rvalue ag-field)})))})
-               ;; Now project onto the __group and the aggregation rvalue
-               {$group (merge {"_id" (when breakout?
-                                       "$___group")}
-                              (into {} (for [{ag-type :aggregation-type, :as aggregation} aggregations]
-                                         {(ag-type->field-name ag-type) (aggregation->rvalue aggregation)})))}
-               ;; Sort by _id (___group)
-               {$sort {"_id" 1}}
-               ;; now project back to the fields we expect
-               {$project (merge {"_id" false}
-                                (into {} (for [{ag-type :aggregation-type} aggregations]
-                                           {(ag-type->field-name ag-type) (if (= ag-type :distinct)
-                                                                            {$size "$count"} ; HACK
-                                                                            true)}))
-                                (into {} (for [field breakout-fields]
-                                           {(->lvalue field) (format "$_id.%s" (->lvalue field))})))}]))))
+    (if-not (or aggregations? breakout?)
+      pipeline-ctx
+      (let [projected-fields (concat (for [{ag-type :aggregation-type} aggregations]
+                                       [(ag-type->field-name ag-type) (if (= ag-type :distinct)
+                                                                        {$size "$count"} ; HACK
+                                                                        true)])
+                                     (for [field breakout-fields]
+                                       [(->lvalue field) (format "$_id.%s" (->lvalue field))]))]
+        (-> pipeline-ctx
+            (assoc :projections (doall (map (comp keyword first) projected-fields)))
+            (update :query into (filter identity
+                                        [ ;; create a totally sweet made-up column called __group to store the fields we'd like to group by
+                                         (when breakout?
+                                           {$project (merge {"_id"      "$_id"
+                                                             "___group" (into {} (for [field breakout-fields]
+                                                                                   {(->lvalue field) (->rvalue field)}))}
+                                                            (into {} (for [{ag-field :field} aggregations
+                                                                           :when             ag-field]
+                                                                       {(->lvalue ag-field) (->rvalue ag-field)})))})
+                                         ;; Now project onto the __group and the aggregation rvalue
+                                         {$group (merge {"_id" (when breakout?
+                                                                 "$___group")}
+                                                        (into {} (for [{ag-type :aggregation-type, :as aggregation} aggregations]
+                                                                   {(ag-type->field-name ag-type) (aggregation->rvalue aggregation)})))}
+                                         ;; Sort by _id (___group)
+                                         {$sort {"_id" 1}}
+                                         ;; now project back to the fields we expect
+                                         {$project (merge {"_id" false}
+                                                          (into {} projected-fields))}])))))))
 
 
 ;;; ### order-by
 
-(defn- handle-order-by [{:keys [order-by]} pipeline]
-  (when (seq order-by)
-    {$sort (into (array-map) (for [{:keys [field direction]} order-by]
-                               {(->lvalue field) (case direction
-                                                   :ascending   1
-                                                   :descending -1)}))}))
-
+(defn- handle-order-by [{:keys [order-by]} pipeline-ctx]
+  (if-not (seq order-by)
+    pipeline-ctx
+    (update pipeline-ctx :query conj {$sort (into (hash-map)
+                                                  (for [{:keys [field direction]} order-by]
+                                                    [(->lvalue field) (case direction
+                                                                        :ascending   1
+                                                                        :descending -1)]))})))
 
 ;;; ### fields
 
-(defn- handle-fields [{:keys [fields]} pipeline]
-  (when (seq fields)
-    ;; add project _id = false to keep _id from getting automatically returned unless explicitly specified
-    {$project (into (array-map "_id" false)
-                    (for [field fields]
-                      {(->lvalue field) (->rvalue field)}))}))
-
+(defn- handle-fields [{:keys [fields]} pipeline-ctx]
+  (if-not (seq fields)
+    pipeline-ctx
+    (let [new-projections (doall (map #(vector (->lvalue %) (->rvalue %)) fields))]
+      (-> pipeline-ctx
+          (assoc :projections (map (comp keyword first) new-projections))
+          ;; add project _id = false to keep _id from getting automatically returned unless explicitly specified
+          (update :query conj {$project (merge {"_id" false}
+                                               (into (hash-map) new-projections))})))))
 
 ;;; ### limit
 
-(defn- handle-limit [{:keys [limit]} pipeline]
-  (when limit
-    {$limit limit}))
+(defn- handle-limit [{:keys [limit]} pipeline-ctx]
+  (if-not limit
+    pipeline-ctx
+    (update pipeline-ctx :query conj {$limit limit})))
 
 
 ;;; ### page
 
-(defn- handle-page [{{page-num :page items-per-page :items, :as page-clause} :page} pipeline]
-  (when page-clause
-    [{$skip (* items-per-page (dec page-num))}
-     {$limit items-per-page}]))
+(defn- handle-page [{{page-num :page items-per-page :items, :as page-clause} :page} pipeline-ctx]
+  (if-not page-clause
+    pipeline-ctx
+    (update pipeline-ctx :query into [{$skip (* items-per-page (dec page-num))}
+                                      {$limit items-per-page}])))
 
 
 ;;; # process + run
@@ -324,21 +337,25 @@
 (defn- generate-aggregation-pipeline
   "Generate the aggregation pipeline. Returns a sequence of maps representing each stage."
   [query]
-  (loop [pipeline [], [f & more] [add-initial-projection
-                                  handle-filter
-                                  handle-breakout+aggregation
-                                  handle-order-by
-                                  handle-fields
-                                  handle-limit
-                                  handle-page]]
-    (let [out      (f query pipeline)
-          pipeline (cond
-                     (nil? out)        pipeline
-                     (map? out)        (conj pipeline out)
-                     (sequential? out) (vec (concat pipeline out)))]
-      (if-not (seq more)
-        pipeline
-        (recur pipeline more)))))
+  (reduce (fn [pipeline-ctx f]
+            (f query pipeline-ctx))
+          {:projections [], :query []}
+          [add-initial-projection
+           handle-filter
+           handle-breakout+aggregation
+           handle-order-by
+           handle-fields
+           handle-limit
+           handle-page]))
+
+(defn- create-unescaping-rename-map [original-keys]
+  (into {} (for [k original-keys]
+             (let [k-str     (name k)
+                   unescaped (-> k-str
+                                 (s/replace #"___" ".")
+                                 (s/replace #"~~~(.+)$" ""))]
+               (when-not (= k-str unescaped)
+                 {k (keyword unescaped)})))))
 
 (defn- unescape-names
   "Restore the original, unescaped nested Field names in the keys of RESULTS.
@@ -346,13 +363,7 @@
   [results]
   ;; Build a map of escaped key -> unescaped key by looking at the keys in the first result
   ;; e.g. {:source___username :source.username}
-  (let [replacements (into {} (for [k (keys (first results))]
-                                (let [k-str     (name k)
-                                      unescaped (-> k-str
-                                                    (s/replace #"___" ".")
-                                                    (s/replace #"~~~(.+)$" ""))]
-                                  (when-not (= k-str unescaped)
-                                    {k (keyword unescaped)}))))]
+  (let [replacements (create-unescaping-rename-map (keys (first results)))]
     ;; If the map is non-empty then map set/rename-keys over the results with it
     (if-not (seq replacements)
       results
@@ -402,7 +413,7 @@
      (form->encoded-fn-name [:___ObjectId \"583327789137b2700a1621fb\"]) -> :ObjectId"
   [form]
   (when (vector? form)
-    (when (u/string-or-keyword? (first form))
+    (when ((some-fn keyword? string?) (first form))
       (when-let [[_ k] (re-matches #"^___(\w+$)" (name (first form)))]
         (let [k (keyword k)]
           (when (contains? fn-name->decoder k)
@@ -449,15 +460,16 @@
   {:pre [(map? database)
          (string? source-table-name)]}
   (binding [*query* query]
-    (let [generated-pipeline (generate-aggregation-pipeline (:query query))]
+    (let [{proj :projections, generated-pipeline :query} (generate-aggregation-pipeline (:query query))]
       (log-monger-form generated-pipeline)
-      {:query      generated-pipeline
+      {:projections proj
+       :query generated-pipeline
        :collection source-table-name
        :mbql?      true})))
 
 (defn execute-query
   "Process and run a native MongoDB query."
-  [{{:keys [collection query mbql?]} :native, database :database}]
+  [{{:keys [collection query mbql? projections]} :native, database :database}]
   {:pre [query
          (string? collection)
          (map? database)]}
@@ -470,11 +482,20 @@
                   results
                   [results])
         ;; if we formed the query using MBQL then we apply a couple post processing functions
-        results (if-not mbql? results
-                              (-> results
-                                  unescape-names
-                                  unstringify-dates))
-        columns (vec (keys (first results)))]
+        results (if-not mbql?
+                  results
+                  (-> results
+                      unescape-names
+                      unstringify-dates))
+        rename-map (create-unescaping-rename-map projections)
+        columns (if-not mbql?
+                  (vec (keys (first results)))
+                  (map (fn [proj]
+                         (if (contains? rename-map proj)
+                           (get rename-map proj)
+                           proj))
+                       projections))]
+
     {:columns   columns
      :rows      (for [row results]
                   (mapv row columns))
diff --git a/src/metabase/driver/oracle.clj b/src/metabase/driver/oracle.clj
index 2309c2fa228dd78ff3357f114d44d0e92ca55ff7..d768dc0433ceac708678dd6477b09f47dc21fc52 100644
--- a/src/metabase/driver/oracle.clj
+++ b/src/metabase/driver/oracle.clj
@@ -126,6 +126,51 @@
                                                       :milliseconds (hx// field-or-value (hsql/raw 1000))))))
 
 
+(defn- increment-identifier-suffix
+  "Add an appropriate suffix to a keyword IDENTIFIER to make it distinct from previous usages of the same identifier, e.g.
+
+     (increment-identifier-suffix :my_col)   ; -> :my_col_2
+     (increment-identifier-suffix :my_col_2) ; -> :my_col_3"
+  [identifier]
+  (keyword
+   (let [identifier (name identifier)]
+     (if-let [[_ existing-suffix] (re-find #"^.*_(\d+$)" identifier)]
+       ;; if identifier already has an alias like col_2 then increment it to col_3
+       (let [new-suffix (str (inc (Integer/parseInt existing-suffix)))]
+         (clojure.string/replace identifier (re-pattern (str existing-suffix \$)) new-suffix))
+       ;; otherwise just stick a _2 on the end so it's col_2
+       (str identifier "_2")))))
+
+(defn- alias-everything
+  "Make sure all the columns in SELECT-CLAUSE are alias forms, e.g. `[:table.col :col]` instead of `:table.col`.
+   (This faciliates our deduplication logic.)"
+  [select-clause]
+  (for [col select-clause]
+    (if (sequential? col)
+      ;; if something's already an alias form like [:table.col :col] it's g2g
+      col
+      ;; otherwise if it's something like :table.col replace with [:table.col :col]
+      [col (keyword (last (clojure.string/split (name col) #"\.")))])))
+
+(defn- deduplicate-identifiers
+  "Make sure every column in SELECT-CLAUSE has a unique alias.
+   This is done because Oracle can't figure out how to use a query
+  that produces duplicate columns in a subselect."
+  [select-clause]
+  (if (= select-clause [:*])
+    ;; if we're doing `SELECT *` there's no way we can deduplicate anything so we're SOL, return as-is
+    select-clause
+    ;; otherwise we can actually deduplicate things
+    (loop [already-seen #{}, acc [], [[col alias] & more] (alias-everything select-clause)]
+      (cond
+        ;; if not more cols are left to deduplicate, we're done
+        (not col)                      acc
+        ;; otherwise if we've already used this alias, replace it with one like `identifier_2` and try agan
+        (contains? already-seen alias) (recur already-seen acc (cons [col (increment-identifier-suffix alias)]
+                                                                     more))
+        ;; otherwise if we haven't seen it record it as seen and move on to the next column
+        :else                          (recur (conj already-seen alias) (conj acc [col alias]) more)))))
+
 ;; Oracle doesn't support `LIMIT n` syntax. Instead we have to use `WHERE ROWNUM <= n` (`NEXT n ROWS ONLY` isn't supported on Oracle versions older than 12).
 ;; This has to wrap the actual query, e.g.
 ;;
@@ -137,6 +182,9 @@
 ;; )
 ;; WHERE ROWNUM < 10;
 ;;
+;; This wrapping can cause problems if there is an ambiguous column reference in the nested query (i.e. two columns with the same alias name).
+;; To ensure that doesn't happen, those column references need to be disambiguated first
+;;
 ;; To do an offset we have to do something like:
 ;;
 ;; SELECT *
@@ -156,8 +204,9 @@
 (defn- apply-limit [honeysql-query {value :limit}]
   {:pre [(integer? value)]}
   {:select [:*]
-   :from   [(merge {:select [:*]}   ; if `honeysql-query` doesn't have a `SELECT` clause yet (which might be the case when using a source query)
-                   honeysql-query)] ; fall back to including a `SELECT *` just to make sure a valid query is produced
+   :from   [(-> (merge {:select [:*]} ; if `honeysql-query` doesn't have a `SELECT` clause yet (which might be the case when using a source query)
+                       honeysql-query); fall back to including a `SELECT *` just to make sure a valid query is produced
+                (update :select deduplicate-identifiers))]
    :where  [:<= (hsql/raw "rownum") value]})
 
 (defn- apply-page [honeysql-query {{:keys [items page]} :page}]
@@ -278,7 +327,6 @@
                                           ;; we just want to ignore all the test "session schemas" that don't match the current test
                                           (require 'metabase.test.data.oracle)
                                           ((resolve 'metabase.test.data.oracle/non-session-schemas)))))
-          :field-percent-urls        sql/slow-field-percent-urls
           :set-timezone-sql          (constantly "ALTER session SET time_zone = %s")
           :prepare-value             (u/drop-first-arg prepare-value)
           :string-length-fn          (u/drop-first-arg string-length-fn)
diff --git a/src/metabase/driver/presto.clj b/src/metabase/driver/presto.clj
index a5e1cc952c09e7f772f82d81e9583c22d578568f..a493cf8af5ad6fb7b6f9bf2013709e3ad7bae9af 100644
--- a/src/metabase/driver/presto.clj
+++ b/src/metabase/driver/presto.clj
@@ -16,7 +16,6 @@
              [field :as field]
              [table :as table]]
             [metabase.query-processor.util :as qputil]
-            [metabase.sync-database.analyze :as analyze]
             [metabase.util
              [honeysql-extensions :as hx]
              [ssh :as ssh]])
@@ -109,31 +108,6 @@
 
 ;;; IDriver implementation
 
-(defn- field-avg-length [{field-name :name, :as field}]
-  (let [table             (field/table field)
-        {:keys [details]} (table/database table)
-        sql               (format "SELECT cast(round(avg(length(%s))) AS integer) FROM %s WHERE %s IS NOT NULL"
-                            (quote-name field-name)
-                            (quote+combine-names (:schema table) (:name table))
-                            (quote-name field-name))
-        {[[v]] :rows}     (execute-presto-query! details sql)]
-    (or v 0)))
-
-(defn- field-percent-urls [{field-name :name, :as field}]
-  (let [table             (field/table field)
-        {:keys [details]} (table/database table)
-        sql               (format "SELECT cast(count_if(url_extract_host(%s) <> '') AS double) / cast(count(*) AS double) FROM %s WHERE %s IS NOT NULL"
-                            (quote-name field-name)
-                            (quote+combine-names (:schema table) (:name table))
-                            (quote-name field-name))
-        {[[v]] :rows}     (execute-presto-query! details sql)]
-    (if (= v "NaN") 0.0 v)))
-
-(defn- analyze-table [driver table new-table-ids]
-  ((analyze/make-analyze-table driver
-     :field-avg-length-fn   field-avg-length
-     :field-percent-urls-fn field-percent-urls) driver table new-table-ids))
-
 (defn- can-connect? [{:keys [catalog] :as details}]
   (let [{[[v]] :rows} (execute-presto-query! details (str "SHOW SCHEMAS FROM " (quote-name catalog) " LIKE 'information_schema'"))]
     (= v "information_schema")))
@@ -298,8 +272,7 @@
 (u/strict-extend PrestoDriver
   driver/IDriver
   (merge (sql/IDriverSQLDefaultsMixin)
-         {:analyze-table                     analyze-table
-          :can-connect?                      (u/drop-first-arg can-connect?)
+         {:can-connect?                      (u/drop-first-arg can-connect?)
           :date-interval                     (u/drop-first-arg date-interval)
           :describe-database                 (u/drop-first-arg describe-database)
           :describe-table                    (u/drop-first-arg describe-table)
@@ -334,7 +307,8 @@
                                                                       :standard-deviation-aggregations
                                                                       :expressions
                                                                       :native-parameters
-                                                                      :expression-aggregations}
+                                                                      :expression-aggregations
+                                                                      :binning}
                                                                     (when-not config/is-test?
                                                                       ;; during unit tests don't treat presto as having FK support
                                                                       #{:foreign-keys})))
@@ -350,7 +324,6 @@
           :current-datetime-fn       (constantly :%now)
           :date                      (u/drop-first-arg date)
           :excluded-schemas          (constantly #{"information_schema"})
-          :field-percent-urls        (u/drop-first-arg field-percent-urls)
           :prepare-value             (u/drop-first-arg prepare-value)
           :quote-style               (constantly :ansi)
           :stddev-fn                 (constantly :stddev_samp)
diff --git a/src/metabase/driver/sqlserver.clj b/src/metabase/driver/sqlserver.clj
index 0190f65567e841ae30b7fab21f371fa01538b09c..4a99ca24a3e76e67e97d0f05ebcd6c303448eee1 100644
--- a/src/metabase/driver/sqlserver.clj
+++ b/src/metabase/driver/sqlserver.clj
@@ -186,7 +186,6 @@
           :current-datetime-fn       (constantly :%getutcdate)
           :date                      (u/drop-first-arg date)
           :excluded-schemas          (constantly #{"sys" "INFORMATION_SCHEMA"})
-          :field-percent-urls        sql/slow-field-percent-urls
           :prepare-value             (u/drop-first-arg prepare-value)
           :stddev-fn                 (constantly :stdev)
           :string-length-fn          (u/drop-first-arg string-length-fn)
diff --git a/src/metabase/events/sync_database.clj b/src/metabase/events/sync_database.clj
index 7172e2fd98e8ff5f19adc2f4afdf86b72002b92e..a9490440b9474e83f6c29652e65c906b1af7eb33 100644
--- a/src/metabase/events/sync_database.clj
+++ b/src/metabase/events/sync_database.clj
@@ -3,7 +3,7 @@
             [clojure.tools.logging :as log]
             [metabase
              [events :as events]
-             [sync-database :as sync-database]]
+             [sync :as sync]]
             [metabase.models.database :refer [Database]]))
 
 (def ^:const sync-database-topics
@@ -28,7 +28,7 @@
       (when-let [database (Database (events/object->model-id topic object))]
         ;; just kick off a sync on another thread
         (future (try
-                  (sync-database/sync-database! database)
+                  (sync/sync-database! database)
                   (catch Throwable t
                     (log/error (format "Error syncing Database: %d" (:id database)) t))))))
     (catch Throwable e
diff --git a/src/metabase/fingerprinting/comparison.clj b/src/metabase/fingerprinting/comparison.clj
new file mode 100644
index 0000000000000000000000000000000000000000..9515e76524ecd6420b1e102faabbcf66fbf372de
--- /dev/null
+++ b/src/metabase/fingerprinting/comparison.clj
@@ -0,0 +1,117 @@
+(ns metabase.fingerprinting.comparison
+  "Fingerprint similarity comparison."
+  (:require [clojure.set :as set]
+            [kixi.stats.math :as math]
+            [metabase.fingerprinting
+             [fingerprinters :as fingerprinters]
+             [histogram :as h]]
+            [redux.core :as redux])
+  (:import com.bigml.histogram.Histogram))
+
+(def magnitude
+  "Transducer that claclulates magnitude (Euclidean norm) of given vector.
+   https://en.wikipedia.org/wiki/Euclidean_distance"
+  (redux/post-complete (redux/pre-step + math/sq) math/sqrt))
+
+(defn cosine-distance
+  "Cosine distance between vectors `a` and `b`.
+   https://en.wikipedia.org/wiki/Cosine_similarity"
+  [a b]
+  (transduce identity
+             (redux/post-complete
+              (redux/fuse {:magnitude-a (redux/pre-step magnitude first)
+                           :magnitude-b (redux/pre-step magnitude second)
+                           :product     (redux/pre-step + (partial apply *))})
+              (fn [{:keys [magnitude-a magnitude-b product]}]
+                (- 1 (/ product magnitude-a magnitude-b))))
+             (map vector a b)))
+
+(defmulti
+  ^{:doc "Difference between two features.
+          Confined to [0, 1] with 0 being same, and 1 orthogonal."
+    :arglists '([a v])}
+  difference #(mapv type %&))
+
+(defmethod difference [Number Number]
+  [a b]
+  (cond
+    (every? zero? [a b]) 0
+    (zero? (max a b))    1
+    :else                (/ (- (max a b) (min a b))
+                            (max a b))))
+
+(defmethod difference [Boolean Boolean]
+  [a b]
+  (if (= a b) 0 1))
+
+(defmethod difference [clojure.lang.Sequential clojure.lang.Sequential]
+  [a b]
+  (* 0.5 (cosine-distance a b)))
+
+(defn chi-squared-distance
+  "Chi-squared distane between empirical probability distributions `p` and `q`.
+   https://stats.stackexchange.com/questions/184101/comparing-two-histograms-using-chi-square-distance"
+  [p q]
+  (reduce + (map (fn [pi qi]
+                   (if (zero? (+ pi qi))
+                     0
+                     (/ (math/sq (- pi qi))
+                        (+ pi qi))))
+                 p q)))
+
+(defn- unify-categories
+  "Given two PMFs add missing categories and align them so they both cover the
+   same set of categories."
+  [pmf-a pmf-b]
+  (let [categories-a (into #{} (map first) pmf-a)
+        categories-b (into #{} (map first) pmf-b)]
+    [(->> (set/difference categories-a categories-b)
+          (map #(vector % 0))
+          (concat pmf-a)
+          (sort-by first))
+     (->> (set/difference categories-b categories-a)
+          (map #(vector % 0))
+          (concat pmf-b)
+          (sort-by first))]))
+
+(defmethod difference [Histogram Histogram]
+  [a b]
+  (let [[pdf-a pdf-b] (if (h/categorical? a)
+                        (unify-categories (h/pdf a) (h/pdf b))
+                        (map h/pdf [a b]))]
+    ;; We are only interested in the shape, hence scale-free comparison
+    (chi-squared-distance (map second pdf-a) (map second pdf-b))))
+
+(defn- flatten-map
+  ([m] (flatten-map nil m))
+  ([prefix m]
+   (into {}
+     (mapcat (fn [[k v]]
+               (let [k (keyword (some-> prefix str (subs 1)) (name k))]
+                 (if (map? v)
+                   (flatten-map k v)
+                   [[k v]]))))
+     m)))
+
+(defn pairwise-differences
+  "Pairwise differences of (feature) vectors `a` and `b`."
+  [a b]
+  (into {}
+    (map (fn [[k a] [_ b]]
+           [k (difference a b)])
+         (flatten-map (fingerprinters/comparison-vector a))
+         (flatten-map (fingerprinters/comparison-vector b)))))
+
+(def ^:private ^:const ^Double interestingness-thershold 0.2)
+
+(defn fingerprint-distance
+  "Distance metric between fingerprints `a` and `b`."
+  [a b]
+  (let [differences (pairwise-differences a b)]
+    {:distance   (transduce (map val)
+                            (redux/post-complete
+                             magnitude
+                             #(/ % (math/sqrt (count differences))))
+                            differences)
+     :components (sort-by val > differences)
+     :thereshold interestingness-thershold}))
diff --git a/src/metabase/fingerprinting/core.clj b/src/metabase/fingerprinting/core.clj
new file mode 100644
index 0000000000000000000000000000000000000000..c13ad250739a3ce7f2bd29e78550a305afd29419
--- /dev/null
+++ b/src/metabase/fingerprinting/core.clj
@@ -0,0 +1,135 @@
+(ns metabase.fingerprinting.core
+  "Fingerprinting (feature extraction) for various models."
+  (:require [clojure.walk :refer [postwalk]]
+            [metabase.db.metadata-queries :as metadata]
+            [metabase.fingerprinting
+             [comparison :as comparison]
+             [costs :as costs]
+             [fingerprinters :as f]
+             [feature-descriptions :refer [add-descriptions]]]
+            [medley.core :as m]
+            [metabase.models
+             [card :refer [Card]]
+             [field :refer [Field]]
+             [metric :refer [Metric]]
+             [segment :refer [Segment]]
+             [table :refer [Table]]]
+            [metabase.util :as u]
+            [redux.core :as redux]))
+
+(defn- fingerprint-field
+  "Transduce given column with corresponding fingerprinter."
+  [opts field data]
+  (transduce identity (f/fingerprinter opts field) data))
+
+(defn- fingerprint-query
+  "Transuce each column in given dataset with corresponding fingerprinter."
+  [opts {:keys [rows cols]}]
+  (transduce identity
+             (->> cols
+                  (remove :remapped_to)
+                  (map-indexed (fn [i field]
+                                 (redux/pre-step (f/fingerprinter opts field)
+                                                 #(nth % i))))
+                  (apply redux/juxt))
+             rows))
+
+(defmulti
+  ^{:doc "Given a model, fetch corresponding dataset and compute its fingerprint.
+
+          Takes a map of options as first argument. Recognized options:
+          * `:max-cost`   a map with keys `:computation` and `:query` which
+                          limits maximal resource expenditure when computing
+                          the fingerprint.
+                          See `metabase.fingerprinting.costs` for details."
+    :arglists '([opts field])}
+  fingerprint #(type %2))
+
+(def ^:private ^:const ^Long max-sample-size 10000)
+
+(defn- extract-query-opts
+  [{:keys [max-cost]}]
+  (cond-> {}
+    (costs/sample-only? max-cost) (assoc :limit max-sample-size)))
+
+(defmethod fingerprint (type Field)
+  [opts field]
+  {:fingerprint (->> (metadata/field-values field (extract-query-opts opts))
+                     (fingerprint-field opts field)
+                     (merge {:table (Table (:table_id field))}))})
+
+(defmethod fingerprint (type Table)
+  [opts table]
+  {:constituents (fingerprint-query opts (metadata/query-values
+                                          (:db_id table)
+                                          (merge (extract-query-opts opts)
+                                                 {:source-table (:id table)})))
+   :fingerprint  {:table table}})
+
+(defmethod fingerprint (type Card)
+  [opts card]
+  (let [resolution (let [[head _ resolution] (-> card
+                                                 :dataset_query
+                                                 :query
+                                                 :breakout
+                                                 first)]
+                     (when (= head :datetime-field)
+                       resolution))
+        query (-> card :dataset_query :query)
+        {:keys [rows cols]} (->> query
+                                 (merge (extract-query-opts opts))
+                                 (metadata/query-values (:database_id card)))
+        {:keys [breakout aggregation]} (group-by :source cols)
+        fields [(first breakout)
+                (or (first aggregation) (second breakout))]]
+    {:constituents [(fingerprint-field opts (first fields) (map first rows))
+                    (fingerprint-field opts (second fields) (map second rows))]
+     :fingerprint  (merge
+                    (fingerprint-field (assoc opts :resolution resolution)
+                                       fields rows)
+                    {:card  card
+                     :table (Table (:table_id card))})}))
+
+(defmethod fingerprint (type Segment)
+  [opts segment]
+  {:constituents (fingerprint-query opts (metadata/query-values
+                                          (metadata/db-id segment)
+                                          (merge (extract-query-opts opts)
+                                                 (:definition segment))))
+   :fingerprint  {:table   (Table (:table_id segment))
+                  :segment segment}})
+
+(defmethod fingerprint (type Metric)
+  [_ metric]
+  {:metric metric})
+
+(defn compare-fingerprints
+  "Compare fingerprints of two models."
+  [opts a b]
+  (let [[a b] (map (partial fingerprint opts) [a b])]
+    {:constituents [a b]
+     :comparison   (into {}
+                     (map (fn [[k a] [_ b]]
+                            [k (if (sequential? a)
+                                 (map comparison/fingerprint-distance a b)
+                                 (comparison/fingerprint-distance a b))])
+                          a b))}))
+
+(defn- trim-decimals
+  [decimal-places fingerprint]
+  (postwalk
+   (fn [x]
+     (if (float? x)
+       (u/round-to-decimals (+ (- (min (long (f/order-of-magnitude x)) 0))
+                               decimal-places)
+                            x)
+       x))
+   fingerprint))
+
+(defn x-ray
+  "Turn the fingerprint structure into an x-ray."
+  [fingerprint]
+  (let [x-ray (comp add-descriptions (partial trim-decimals 2) f/x-ray)]
+    (-> fingerprint
+        (update :fingerprint  x-ray)
+        (update :constituents (partial map x-ray)))))
diff --git a/src/metabase/fingerprinting/costs.clj b/src/metabase/fingerprinting/costs.clj
new file mode 100644
index 0000000000000000000000000000000000000000..28a2c15d150c7882ba1fb9a980f80d679127dcb9
--- /dev/null
+++ b/src/metabase/fingerprinting/costs.clj
@@ -0,0 +1,38 @@
+(ns metabase.fingerprinting.costs
+  "Predicates for limiting resource expanditure during fingerprinting."
+  (:require [schema.core :as s]))
+
+(def MaxCost
+  "Schema for max-cost parameter."
+  {:computation (s/enum :linear :unbounded :yolo)
+   :query       (s/enum :cache :sample :full-scan :joins)})
+
+(def ^{:arglists '([max-cost])} linear-computation?
+  "Limit computation to O(n) or better."
+  (comp #{:linear} :computation))
+
+(def ^{:arglists '([max-cost])} unbounded-computation?
+  "Alow unbounded but always convergent computation.
+   Default if no cost limit is specified."
+  (comp (partial contains? #{:unbounded :yolo nil}) :computation))
+
+(def ^{:arglists '([max-cost])} yolo-computation?
+  "Alow any computation including full blown machine learning."
+  (comp #{:yolo} :computation))
+
+(def ^{:arglists '([max-cost])} cache-only?
+  "Use cached data only."
+  (comp #{:cache} :query))
+
+(def ^{:arglists '([max-cost])} sample-only?
+  "Only sample data."
+  (comp #{:sample} :query))
+
+(def ^{:arglists '([max-cost])} full-scan?
+  "Alow full table scans.
+   Default if no cost limit is specified."
+  (comp (partial contains? #{:full-scan :joins nil}) :query))
+
+(def ^{:arglists '([max-cost])} alow-joins?
+  "Alow bringing in data from other tables if needed."
+  (comp #{:joins} :query))
diff --git a/src/metabase/fingerprinting/feature_descriptions.clj b/src/metabase/fingerprinting/feature_descriptions.clj
new file mode 100644
index 0000000000000000000000000000000000000000..fd6552fb44e72b92ccb83f5c72cd57d8b6e35cd4
--- /dev/null
+++ b/src/metabase/fingerprinting/feature_descriptions.clj
@@ -0,0 +1,76 @@
+(ns metabase.fingerprinting.feature-descriptions
+  "Desciptions of all the fingerprint features exposed as x-rays."
+  (:require [medley.core :as m]))
+
+(def ^:private descriptions
+  {:histogram              {:label       "Distribution"
+                            :description "Distribution of values."
+                            :link        "https://en.wikipedia.org/wiki/Probability_mass_function"}
+   :percentiles            {:label "Percentiles"
+                            :link  "https://en.wikipedia.org/wiki/Percentile"}
+   :sum                    {:label       "Sum"
+                            :description "Sum of all values."}
+   :sum-of-squares         {:label       "Sum of squares"
+                            :description "Sum of squares of all values."}
+   :%>mean                 {:label "Share of values greater than mean."}
+   :cv                     {:label       "Coefficient of variation"
+                            :description "Ratio between mean and standard deviation. Used as a dispersion measure."
+                            :link        "https://en.wikipedia.org/wiki/Coefficient_of_variation"}
+   :range-vs-sd            {:label "Ratio between standard deviation and range of values."}
+   :mean-median-spread     {:label       "Relative mean-median spread"
+                            :description "The lower the ratio, the more symmetric the distribution."}
+   :range                  {:label       "Range"
+                            :description "Range between the smallest and the largest value."}
+   :cardinality            {:label       "Cardinality"
+                            :description "Number of different values."}
+   :min                    {:label "Minimal value"}
+   :max                    {:label "Maximal value"}
+   :mean                   {:label       "Mean"
+                            :description "Mean (expected) value."
+                            :link        "https://en.wikipedia.org/wiki/Mean"}
+   :median                 {:label       "Median"
+                            :description "Value seperating the data set in two equal halfs -- the \"middle\" value."
+                            :link        "https://en.wikipedia.org/wiki/Median"}
+   :var                    {:label       "Variance"
+                            :description "Measure of how far the values are spread from the mean."
+                            :link        "https://en.wikipedia.org/wiki/Variance"}
+   :sd                     {:label       "Standard deviation"
+                            :description "Measure of how far the values are spread from the mean."
+                            :link        "https://en.wikipedia.org/wiki/Standard_deviation"}
+   :count                  {:label       "Count"
+                            :description "Number of rows in the dataset."
+                            }
+   :kurtosis               {:label       "Kurtosis"
+                            :description "Descriptor of the shape of the distribution. Measures tail extremity (outliers)"
+                            :link        "https://en.wikipedia.org/wiki/Kurtosis"}
+   :skewness               {:label       "Skewness"
+                            :description "Measure of asymmetry of the distribution."
+                            :link        "https://en.wikipedia.org/wiki/Skewness"}
+   :entropy                {:label       "Entropy"
+                            :description "Measure of unpredictability of the state (ie. of its average information content)."
+                            :link        "https://en.wikipedia.org/wiki/Entropy_(information_theory)"}
+   :linear-regression      {:label       "Linear regression"
+                            :description "Slope and intercept of a linear function fit to data."
+                            :link        "https://en.wikipedia.org/wiki/Linear_regression"}
+   :correlation            {:label       "Correlation"
+                            :description "The quality of a least squares fitting --  the extent to which two variables have a linear relationship with each other."
+                            :link        "http://mathworld.wolfram.com/CorrelationCoefficient.html"}
+   :covariance             {:label       "Covariance"
+                            :description "A measure of the joint variability."
+                            :link        "https://en.wikipedia.org/wiki/Covariance"}
+   :seasonal-decomposition {:label       "Seasonal decomposition"
+                            :description "Decomposes time series into seasonal, trend, and residual components."
+                            :link        "http://www.stat.washington.edu/courses/stat527/s13/readings/Cleveland_JASA_1979.pdf"}
+   :earliest               {:label "The earliest value"}
+   :latest                 {:label "The latest value"}
+   :histogram-hour         {:label "Distribution of hours in a day"}
+   :histogram-day          {:label "Distribution of days of week"}
+   :histogram-month        {:label "Distribution of months"}
+   :histogram-quarter      {:label "Distribution of quarters"}})
+
+(def ^{:arglists '([fingerprint])} add-descriptions
+  "Add descriptions of features to naked values where applicable."
+  (partial m/map-kv (fn [k v]
+                      (if-let [description (descriptions k)]
+                        [k (assoc description :value v)]
+                        [k v]))))
diff --git a/src/metabase/fingerprinting/fingerprinters.clj b/src/metabase/fingerprinting/fingerprinters.clj
new file mode 100644
index 0000000000000000000000000000000000000000..c3990bcd4b6174e347a9c74c983b32997695d6ff
--- /dev/null
+++ b/src/metabase/fingerprinting/fingerprinters.clj
@@ -0,0 +1,554 @@
+(ns metabase.fingerprinting.fingerprinters
+  "Fingerprinting (feature extraction) for various models."
+  (:require [bigml.histogram.core :as h.impl]
+            [clojure.math.numeric-tower :refer [ceil expt floor round]] ;;;;;; temp!
+            [clj-time
+             [coerce :as t.coerce]
+             [core :as t]
+             [format :as t.format]
+             [periodic :as t.periodic]]
+            [kixi.stats
+             [core :as stats]
+             [math :as math]]
+            [medley.core :as m]
+            [metabase.fingerprinting
+             [histogram :as h]
+             [costs :as costs]]
+            [metabase.util :as u]            ;;;; temp!
+            [redux.core :as redux]
+            [tide.core :as tide])
+  (:import com.clearspring.analytics.stream.cardinality.HyperLogLogPlus))
+
+(def ^:private ^:const percentiles (range 0 1 0.1))
+
+(defn rollup
+  "Transducer that groups by `groupfn` and reduces each group with `f`.
+   Note the contructor airity of `f` needs to be free of side effects."
+  [f groupfn]
+  (let [init (f)]
+    (fn
+      ([] (transient {}))
+      ([acc]
+       (into {}
+         (map (fn [[k v]]
+                [k (f v)]))
+         (persistent! acc)))
+      ([acc x]
+       (let [k (groupfn x)]
+         (assoc! acc k (f (get acc k init) x)))))))
+
+(defn safe-divide
+  "Like `clojure.core//`, but returns nil if denominator is 0."
+  [x & denominators]
+  (when (or (and (not-empty denominators) (not-any? zero? denominators))
+            (and (not (zero? x)) (empty? denominators)))
+    (apply / x denominators)))
+
+(defn growth
+  "Relative difference between `x1` an `x2`."
+  [x2 x1]
+  (when (every? some? [x2 x1])
+    (safe-divide (* (if (neg? x1) -1 1) (- x2 x1)) x1)))
+
+(def ^:private ^:const ^Double cardinality-error 0.01)
+
+(defn cardinality
+  "Transducer that sketches cardinality using HyperLogLog++.
+   https://research.google.com/pubs/pub40671.html"
+  ([] (HyperLogLogPlus. 14 25))
+  ([^HyperLogLogPlus acc] (.cardinality acc))
+  ([^HyperLogLogPlus acc x]
+   (.offer acc x)
+   acc))
+
+(def ^:private Num      [:type/Number :type/*])
+(def ^:private DateTime [:type/DateTime :type/*])
+(def ^:private Category [:type/* :type/Category])
+; (def ^:private Any      [:type/* :type/*])
+(def ^:private Text     [:type/Text :type/*])
+
+;;;;;;;;;;;;;;;;;; temporary cp until we merge the binning branch ;;;;;;;;;;
+
+
+(defn- calculate-bin-width [min-value max-value num-bins]
+  (u/round-to-decimals 5 (/ (- max-value min-value)
+                            num-bins)))
+
+(defn- calculate-num-bins [min-value max-value bin-width]
+  (long (ceil (/ (- max-value min-value)
+                         bin-width))))
+
+(defn- ceil-to
+  [precision x]
+  (let [scale (/ precision)]
+    (/ (ceil (* x scale)) scale)))
+
+(defn- floor-to
+  [precision x]
+  (let [scale (/ precision)]
+    (/ (floor (* x scale)) scale)))
+
+;;;;;;;; cast to long
+(defn order-of-magnitude
+  "Return oder of magnitude."
+  [x]
+  (if (zero? x)
+    0
+    (long (floor (/ (math/log (math/abs x)) (math/log 10))))))
+
+(def ^:private ^:const pleasing-numbers [1 1.25 2 2.5 3 5 7.5 10])
+
+(defn- nicer-bin-width
+  [min-value max-value num-bins]
+  (let [min-bin-width (calculate-bin-width min-value max-value num-bins)
+        scale         (expt 10 (order-of-magnitude min-bin-width))]
+    (->> pleasing-numbers
+         (map (partial * scale))
+         (drop-while (partial > min-bin-width))
+         first)))
+
+(defn- nicer-bounds
+  [min-value max-value bin-width]
+  [(floor-to bin-width min-value) (ceil-to bin-width max-value)])
+
+(def ^:private ^:const max-steps 10)
+
+(defn- fixed-point
+  [f]
+  (fn [x]
+    (->> (iterate f x)
+         (partition 2 1)
+         (take max-steps)
+         (drop-while (partial apply not=))
+         ffirst)))
+
+(def ^:private ^{:arglists '([binned-field])} nicer-breakout
+  (fixed-point
+   (fn
+     [{:keys [min-value max-value bin-width num-bins strategy] :as binned-field}]
+     (let [bin-width (if (= strategy :num-bins)
+                       (nicer-bin-width min-value max-value num-bins)
+                       bin-width)
+           [min-value max-value] (nicer-bounds min-value max-value bin-width)]
+       (-> binned-field
+           (assoc :min-value min-value
+                  :max-value max-value
+                  :num-bins  (if (= strategy :num-bins)
+                               num-bins
+                               (calculate-num-bins min-value max-value bin-width))
+                  :bin-width bin-width))))))
+
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+(defn- equidistant-bins
+  [histogram]
+  (if (h/categorical? histogram)
+    (-> histogram h.impl/bins first :target :counts)
+    (let [{:keys [min max]} (h.impl/bounds histogram)]
+      (cond
+        (nil? min)  []
+        (= min max) [[min 1.0]]
+        :else       (let [{:keys [min-value num-bins bin-width]}
+                          (nicer-breakout
+                           {:min-value min
+                            :max-value max
+                            :num-bins  (->> histogram
+                                            h/optimal-bin-width
+                                            (calculate-num-bins min max))
+                            :strategy  :num-bins})]
+                      (->> min-value
+                           (iterate (partial + bin-width))
+                           (take (inc num-bins))
+                           (map (fn [x]
+                                  [x (h.impl/sum histogram x)]))
+                           (partition 2 1)
+                           (map (fn [[[x s1] [_ s2]]]
+                                  [x (- s2 s1)]))))))))
+
+(defn- histogram->dataset
+  ([field histogram] (histogram->dataset identity field histogram))
+  ([keyfn field histogram]
+   {:rows    (let [norm (safe-divide (h.impl/total-count histogram))]
+               (for [[k v] (equidistant-bins histogram)]
+                 [(keyfn k) (* v norm)]))
+    :columns [(:name field) "SHARE"]
+    :cols [(dissoc field :remapped_from)
+           {:name         "SHARE"
+            :display_name "Share"
+            :description  "Share of corresponding bin in the overall population."
+            :base_type    :type/Float}]}))
+
+(defn- field-type
+  [field]
+  (if (sequential? field)
+    (mapv field-type field)
+    [(:base_type field) (or (:special_type field) :type/*)]))
+
+(defmulti
+  ^{:doc "Transducer that summarizes (_fingerprints_) given coll. What features
+          are extracted depends on the type of corresponding `Field`(s), amount
+          of data points available (some algorithms have a minimum data points
+          requirement) and `max-cost.computation` setting.
+          Note we are heavily using data sketches so some summary values may be
+          approximate."
+    :arglists '([opts field])}
+  fingerprinter #(field-type %2))
+
+(defmulti
+  ^{:doc "Make fingerprint human readable."
+    :arglists '([fingerprint])}
+  x-ray :type)
+
+(defmethod x-ray :default
+  [fingerprint]
+  fingerprint)
+
+(defmulti
+  ^{:doc "Fingerprint feature vector for comparison/difference purposes."
+    :arglists '([fingerprint])}
+  comparison-vector :type)
+
+(defmethod comparison-vector :default
+  [fingerprint]
+  (dissoc fingerprint :type :field :has-nils?))
+
+(defmethod fingerprinter Num
+  [{:keys [max-cost]} field]
+  (redux/post-complete
+   (redux/fuse {:histogram      h/histogram
+                :cardinality    cardinality
+                :kurtosis       stats/kurtosis
+                :skewness       stats/skewness
+                :sum            (redux/with-xform + (remove nil?))
+                :sum-of-squares (redux/with-xform + (comp (remove nil?)
+                                                          (map math/sq)))})
+   (fn [{:keys [histogram cardinality kurtosis skewness sum sum-of-squares]}]
+     (if (pos? (h/total-count histogram))
+       (let [nil-count   (h/nil-count histogram)
+             total-count (h/total-count histogram)
+             uniqueness  (/ cardinality (max total-count 1))
+             var         (or (h.impl/variance histogram) 0)
+             sd          (math/sqrt var)
+             min         (h.impl/minimum histogram)
+             max         (h.impl/maximum histogram)
+             mean        (h.impl/mean histogram)
+             median      (h.impl/median histogram)
+             range       (- max min)]
+         (merge
+          {:histogram          histogram
+           :percentiles        (apply h.impl/percentiles histogram percentiles)
+           :positive-definite? (>= min 0)
+           :%>mean             (- 1 ((h.impl/cdf histogram) mean))
+           :uniqueness         uniqueness
+           :var>sd?            (> var sd)
+           :nil%               (/ nil-count (clojure.core/max total-count 1))
+           :has-nils?          (pos? nil-count)
+           :0<=x<=1?           (<= 0 min max 1)
+           :-1<=x<=1?          (<= -1 min max 1)
+           :cv                 (safe-divide sd mean)
+           :range-vs-sd        (safe-divide sd range)
+           :mean-median-spread (safe-divide (- mean median) range)
+           :min-vs-max         (safe-divide min max)
+           :range              range
+           :cardinality        cardinality
+           :min                min
+           :max                max
+           :mean               mean
+           :median             median
+           :var                var
+           :sd                 sd
+           :count              total-count
+           :kurtosis           kurtosis
+           :skewness           skewness
+           :all-distinct?      (>= uniqueness (- 1 cardinality-error))
+           :entropy            (h/entropy histogram)
+           :type               Num
+           :field              field}
+          (when (costs/full-scan? max-cost)
+            {:sum            sum
+             :sum-of-squares sum-of-squares})))
+       {:count 0
+        :type  Num
+        :field field}))))
+
+(defmethod comparison-vector Num
+  [fingerprint]
+  (select-keys fingerprint
+               [:histogram :mean :median :min :max :sd :count :kurtosis
+                :skewness :entropy :nil% :uniqueness :range :min-vs-max]))
+
+(defmethod x-ray Num
+  [{:keys [field count] :as fingerprint}]
+  (if (pos? count)
+    (-> fingerprint
+        (update :histogram (partial histogram->dataset field))
+        (dissoc :has-nils? :var>sd? :0<=x<=1? :-1<=x<=1? :all-distinct?
+                :positive-definite? :var>sd? :uniqueness :min-vs-max))
+    fingerprint))
+
+(defmethod fingerprinter [Num Num]
+  [_ field]
+  (redux/post-complete
+   (redux/fuse {:linear-regression (stats/simple-linear-regression first second)
+                :correlation       (stats/correlation first second)
+                :covariance        (stats/covariance first second)})
+   #(assoc % :type [Num Num]
+           :field field)))
+
+(def ^:private ^{:arglists '([t])} to-double
+  "Coerce `DateTime` to `Double`."
+  (comp double t.coerce/to-long))
+
+(def ^:private ^{:arglists '([t])} from-double
+  "Coerce `Double` into a `DateTime`."
+  (comp t.coerce/from-long long))
+
+(defn- fill-timeseries
+  "Given a coll of `[DateTime, Any]` pairs with periodicty `step` fill missing
+   periods with 0."
+  [step ts]
+  (let [ts-index (into {} ts)]
+    (into []
+      (comp (map to-double)
+            (take-while (partial >= (-> ts last first)))
+            (map (fn [t]
+                   [t (ts-index t 0)])))
+      (some-> ts
+              ffirst
+              from-double
+              (t.periodic/periodic-seq step)))))
+
+(defn- decompose-timeseries
+  "Decompose given timeseries with expected periodicty `period` into trend,
+   seasonal component, and reminder.
+   `period` can be one of `:day`, `week`, or `:month`."
+  [period ts]
+  (let [period (case period
+                 :month 12
+                 :week  52
+                 :day   365)]
+    (when (>= (count ts) (* 2 period))
+      (select-keys (tide/decompose period ts) [:trend :seasonal :reminder]))))
+
+(defmethod fingerprinter [DateTime Num]
+  [{:keys [max-cost resolution query]} field]
+  (redux/post-complete
+   (redux/pre-step
+    (redux/fuse {:linear-regression (stats/simple-linear-regression first second)
+                 :series            (if (nil? resolution)
+                                      conj
+                                      (redux/post-complete
+                                       conj
+                                       (partial fill-timeseries
+                                                (case resolution
+                                                  :month (t/months 1)
+                                                  :week  (t/weeks 1)
+                                                  :day   (t/days 1)))))})
+    (fn [[x y]]
+      [(-> x t.format/parse to-double) y]))
+   (fn [{:keys [series linear-regression]}]
+     (let [ys-r (->> series (map second) reverse not-empty)]
+       (merge {:resolution             resolution
+               :type                   [DateTime Num]
+               :field                  field
+               :series                 series
+               :linear-regression      linear-regression
+               :seasonal-decomposition
+               (when (and resolution
+                          (costs/unbounded-computation? max-cost))
+                 (decompose-timeseries resolution series))}
+              (when (costs/alow-joins? series)
+                {:YoY 0
+                 :MoM 0
+                 :WoW 0
+                 :DoD 0}))))))
+
+(defmethod comparison-vector [DateTime Num]
+  [fingerprint]
+  (dissoc fingerprint :type :resolution :field))
+
+(defmethod x-ray [DateTime Num]
+  [fingerprint]
+  (dissoc fingerprint :series))
+
+;; This one needs way more thinking
+;;
+;; (defmethod fingerprinter [Category Any]
+;;   [opts [x y]]
+;;   (rollup (redux/pre-step (fingerprinter opts y) second) first))
+
+(defmethod fingerprinter Text
+  [_ field]
+  (redux/post-complete
+   (redux/fuse {:histogram (redux/pre-step
+                            h/histogram
+                            (stats/somef (comp count u/jdbc-clob->str)))})
+   (fn [{:keys [histogram]}]
+     (let [nil-count   (h/nil-count histogram)
+           total-count (h/total-count histogram)]
+       {:min        (h.impl/minimum histogram)
+        :max        (h.impl/maximum histogram)
+        :histogram  histogram
+        :count      total-count
+        :nil%       (/ nil-count (max total-count 1))
+        :has-nils?  (pos? nil-count)
+        :type       Text
+        :field      field}))))
+
+(defmethod x-ray Text
+  [{:keys [field] :as fingerprint}]
+  (update fingerprint :histogram (partial histogram->dataset field)))
+
+(defn- quarter
+  [dt]
+  (-> dt t/month (/ 3) Math/ceil long))
+
+(defmethod fingerprinter DateTime
+  [_ field]
+  (redux/post-complete
+   (redux/pre-step
+    (redux/fuse {:histogram         (redux/pre-step h/histogram t.coerce/to-long)
+                 :histogram-hour    (redux/pre-step h/histogram-categorical
+                                                    (stats/somef t/hour))
+                 :histogram-day     (redux/pre-step h/histogram-categorical
+                                                    (stats/somef t/day-of-week))
+                 :histogram-month   (redux/pre-step h/histogram-categorical
+                                                    (stats/somef t/month))
+                 :histogram-quarter (redux/pre-step h/histogram-categorical
+                                                    (stats/somef quarter))})
+    t.format/parse)
+   (fn [{:keys [histogram histogram-hour histogram-day histogram-month
+                histogram-quarter]}]
+     (let [nil-count   (h/nil-count histogram)
+           total-count (h/total-count histogram)]
+       {:earliest          (h.impl/minimum histogram)
+        :latest            (h.impl/maximum histogram)
+        :histogram         histogram
+        :percentiles       (apply h.impl/percentiles histogram percentiles)
+        :histogram-hour    histogram-hour
+        :histogram-day     histogram-day
+        :histogram-month   histogram-month
+        :histogram-quarter histogram-quarter
+        :count             total-count
+        :nil%              (/ nil-count (max total-count 1))
+        :has-nils?         (pos? nil-count)
+        :entropy           (h/entropy histogram)
+        :type              DateTime
+        :field             field}))))
+
+(defmethod comparison-vector DateTime
+  [fingerprint]
+  (dissoc fingerprint :type :percentiles :field :has-nils?))
+
+(defn- round-to-month
+  [dt]
+  (if (<= (t/day dt) 15)
+    (t/floor dt t/month)
+    (t/date-time (t/year dt) (inc (t/month dt)))))
+
+(defn- month-frequencies
+  [earliest latest]
+  (let [earilest    (round-to-month latest)
+        latest      (round-to-month latest)
+        start-month (t/month earliest)
+        duration    (t/in-months (t/interval earliest latest))]
+    (->> (range (dec start-month) (+ start-month duration))
+         (map #(inc (mod % 12)))
+         frequencies)))
+
+(defn- quarter-frequencies
+  [earliest latest]
+  (let [earilest      (round-to-month latest)
+        latest        (round-to-month latest)
+        start-quarter (quarter earliest)
+        duration      (round (/ (t/in-months (t/interval earliest latest)) 3))]
+    (->> (range (dec start-quarter) (+ start-quarter duration))
+         (map #(inc (mod % 4)))
+         frequencies)))
+
+(defn- weigh-periodicity
+  [weights card]
+  (let [baseline (apply min (vals weights))]
+    (update card :rows (partial map (fn [[k v]]
+                                      [k (* v (/ baseline (weights k)))])))))
+
+(defmethod x-ray DateTime
+  [{:keys [field earliest latest count] :as fingerprint}]
+  (if (pos? count)
+    (let [earliest (from-double earliest)
+          latest   (from-double latest)]
+      (-> fingerprint
+          (assoc  :earliest          earliest)
+          (assoc  :latest            latest)
+          (update :histogram         (partial histogram->dataset from-double field))
+          (update :percentiles       (partial m/map-vals from-double))
+          (update :histogram-hour    (partial histogram->dataset
+                                              {:name         "HOUR"
+                                               :display_name "Hour of day"
+                                               :base_type    :type/Integer
+                                               :special_type :type/Category}))
+          (update :histogram-day     (partial histogram->dataset
+                                              {:name         "DAY"
+                                               :display_name "Day of week"
+                                               :base_type    :type/Integer
+                                               :special_type :type/Category}))
+          (update :histogram-month   (comp
+                                      (partial weigh-periodicity
+                                               (month-frequencies earliest latest))
+                                      (partial histogram->dataset
+                                               {:name         "MONTH"
+                                                :display_name "Month of year"
+                                                :base_type    :type/Integer
+                                                :special_type :type/Category})))
+          (update :histogram-quarter (comp
+                                      (partial weigh-periodicity
+                                               (quarter-frequencies earliest latest))
+                                      (partial histogram->dataset
+                                               {:name         "QUARTER"
+                                                :display_name "Quarter of year"
+                                                :base_type    :type/Integer
+                                                :special_type :type/Category})))))
+    (select-keys fingerprint [:count :type :field])))
+
+(defmethod fingerprinter Category
+  [_ field]
+  (redux/post-complete
+   (redux/fuse {:histogram   h/histogram-categorical
+                :cardinality cardinality})
+   (fn [{:keys [histogram cardinality]}]
+     (let [nil-count   (h/nil-count histogram)
+           total-count (h/total-count histogram)
+           uniqueness  (/ cardinality (max total-count 1))]
+       {:histogram   histogram
+        :uniqueness  uniqueness
+        :nil%        (/ nil-count (max total-count 1))
+        :has-nils?   (pos? nil-count)
+        :cardinality cardinality
+        :count       total-count
+        :entropy     (h/entropy histogram)
+        :type        Category
+        :field       field}))))
+
+(defmethod comparison-vector Category
+  [fingerprint]
+  (dissoc fingerprint :type :cardinality :field :has-nils?))
+
+(defmethod x-ray Category
+  [{:keys [field] :as fingerprint}]
+  (update fingerprint :histogram (partial histogram->dataset field)))
+
+(defmethod fingerprinter :default
+  [_ field]
+  (redux/post-complete
+   (redux/fuse {:total-count stats/count
+                :nil-count   (redux/with-xform stats/count (filter nil?))})
+   (fn [{:keys [total-count nil-count]}]
+     {:count     total-count
+      :nil%      (/ nil-count (max total-count 1))
+      :has-nils? (pos? nil-count)
+      :type      [nil (field-type field)]
+      :field     field})))
+
+(prefer-method fingerprinter Category Text)
+(prefer-method fingerprinter Num Category)
diff --git a/src/metabase/fingerprinting/histogram.clj b/src/metabase/fingerprinting/histogram.clj
new file mode 100644
index 0000000000000000000000000000000000000000..5e4a491ede3dc13b80f2b70ab90184c54b3f96d6
--- /dev/null
+++ b/src/metabase/fingerprinting/histogram.clj
@@ -0,0 +1,82 @@
+(ns metabase.fingerprinting.histogram
+  "Wrappers and additional functionality for `bigml.histogram`."
+  (:require [bigml.histogram.core :as impl]
+            [kixi.stats.math :as math]
+            [redux.core :as redux])
+  (:import com.bigml.histogram.Histogram))
+
+(defn histogram
+  "Transducer that summarizes numerical data with a histogram."
+  ([] (impl/create))
+  ([^Histogram histogram] histogram)
+  ([^Histogram histogram x] (impl/insert-simple! histogram x)))
+
+(defn histogram-categorical
+  "Transducer that summarizes categorical data with a histogram."
+  ([] (impl/create))
+  ([^Histogram histogram] histogram)
+  ([^Histogram histogram x] (impl/insert-categorical! histogram (when x 1) x)))
+
+(def ^{:arglists '([^Histogram histogram])} categorical?
+  "Returns true if given histogram holds categorical values."
+  (comp (complement #{:none :unset}) impl/target-type))
+
+(def ^:private ^:const ^Long pdf-sample-points 100)
+
+(defn pdf
+  "Probability density function of given histogram.
+   Obtained by sampling density at `pdf-sample-points` points from the histogram
+   or at each target if histogram holds categorical data.
+   https://en.wikipedia.org/wiki/Probability_density_function"
+  [^Histogram histogram]
+  (if (categorical? histogram)
+    (let [norm (/ (impl/total-count histogram))]
+      (for [[target count] (-> histogram impl/bins first :target :counts)]
+        [target (* count norm)]))
+    (let [{:keys [min max]} (impl/bounds histogram)]
+      (cond
+        (nil? min)  []
+        (= min max) [[min 1.0]]
+        :else       (let [step (/ (- max min) pdf-sample-points)]
+                      (transduce (take pdf-sample-points)
+                                 (fn
+                                   ([] {:total-density 0
+                                        :densities     (transient [])})
+                                   ([{:keys [total-density densities]}]
+                                    (for [[x density] (persistent! densities)]
+                                      [x (/ density total-density)]))
+                                   ([acc x]
+                                    (let [d (impl/density histogram x)]
+                                      (-> acc
+                                          (update :densities conj! [x d])
+                                          (update :total-density + d)))))
+                                 (iterate (partial + step) min)))))))
+
+(def ^{:arglists '([^Histogram histogram])} nil-count
+  "Return number of nil values histogram holds."
+  (comp :count impl/missing-bin))
+
+(defn total-count
+  "Return total number (including nils) of values histogram holds."
+  [^Histogram histogram]
+  (+ (impl/total-count histogram)
+     (nil-count histogram)))
+
+(defn entropy
+  "Calculate (Shannon) entropy of given histogram.
+   https://en.wikipedia.org/wiki/Entropy_(information_theory)"
+  [^Histogram histogram]
+  (transduce (comp (map second)
+                   (remove zero?)
+                   (map #(* % (math/log %))))
+             (redux/post-complete + -)
+             (pdf histogram)))
+
+(defn optimal-bin-width
+  "Determine optimal bin width (and consequently number of bins) for a given
+   histogram using Freedman-Diaconis rule.
+   https://en.wikipedia.org/wiki/Freedman%E2%80%93Diaconis_rule"
+  [^Histogram histogram]
+  (let [{first-q 0.25 third-q 0.75} (impl/percentiles histogram 0.25 0.75)]
+    (when first-q
+      (* 2 (- third-q first-q) (math/pow (impl/total-count histogram) (/ -3))))))
diff --git a/src/metabase/models/database.clj b/src/metabase/models/database.clj
index 18c311e28db5b19c9b08992ba5b0661746ef4ef1..aa448ae66ede2aa96ebace5883564d7cb3b0506e 100644
--- a/src/metabase/models/database.clj
+++ b/src/metabase/models/database.clj
@@ -49,8 +49,7 @@
 (defn- pre-delete [{:keys [id]}]
   (db/delete! 'Card        :database_id id)
   (db/delete! 'Permissions :object      [:like (str (perms/object-path id) "%")])
-  (db/delete! 'Table       :db_id       id)
-  (db/delete! 'RawTable    :database_id id))
+  (db/delete! 'Table       :db_id       id))
 
 
 (defn- perms-objects-set [database _]
diff --git a/src/metabase/models/dimension.clj b/src/metabase/models/dimension.clj
new file mode 100644
index 0000000000000000000000000000000000000000..39f86363042a21e678458235064c33f24b5920ce
--- /dev/null
+++ b/src/metabase/models/dimension.clj
@@ -0,0 +1,16 @@
+(ns metabase.models.dimension
+  (:require [toucan.models :as models]
+            [metabase.util :as u]))
+
+(def dimension-types
+  "Possible values for `Dimension.type`"
+  #{:internal
+    :external})
+
+(models/defmodel Dimension :dimension)
+
+(u/strict-extend (class Dimension)
+  models/IModel
+  (merge models/IModelDefaults
+         {:types      (constantly {:type :keyword})
+          :properties (constantly {:timestamped? true})}))
diff --git a/src/metabase/models/field.clj b/src/metabase/models/field.clj
index 1ff8fee88f1049ed8351cc402ec8734e2a0dec79..32bb1f84e9b618ad86ab2ea54738c12c80e9aae8 100644
--- a/src/metabase/models/field.clj
+++ b/src/metabase/models/field.clj
@@ -6,7 +6,8 @@
              [config :as config]
              [util :as u]]
             [metabase.models
-             [field-values :refer [FieldValues]]
+             [dimension :refer [Dimension]]
+             [field-values :as fv :refer [FieldValues]]
              [humanization :as humanization]
              [interface :as i]
              [permissions :as perms]]
@@ -66,16 +67,18 @@
           :types          (constantly {:base_type       :keyword
                                        :special_type    :keyword
                                        :visibility_type :keyword
-                                       :description     :clob})
+                                       :description     :clob
+                                       :fingerprint     :json})
           :properties     (constantly {:timestamped? true})
           :pre-insert     pre-insert
           :pre-update     pre-update
           :pre-delete     pre-delete})
+
   i/IObjectPermissions
   (merge i/IObjectPermissionsDefaults
-         {:perms-objects-set  perms-objects-set
-          :can-read?          (partial i/current-user-has-full-permissions? :read)
-          :can-write?         i/superuser?}))
+         {:perms-objects-set perms-objects-set
+          :can-read?         (partial i/current-user-has-full-permissions? :read)
+          :can-write?        i/superuser?}))
 
 
 ;;; ------------------------------------------------------------ Hydration / Util Fns ------------------------------------------------------------
@@ -93,16 +96,39 @@
   [{:keys [id]}]
   (db/select [FieldValues :field_id :values], :field_id id))
 
+(defn- keyed-by-field-ids
+  "Queries for `MODEL` instances related by `FIELDS`, returns a map
+  keyed by :field_id"
+  [fields model]
+  (let [field-ids (set (map :id fields))]
+    (u/key-by :field_id (when (seq field-ids)
+                          (db/select model :field_id [:in field-ids])))))
+
 (defn with-values
   "Efficiently hydrate the `FieldValues` for a collection of FIELDS."
   {:batched-hydrate :values}
   [fields]
-  (let [field-ids        (set (map :id fields))
-        id->field-values (u/key-by :field_id (when (seq field-ids)
-                                               (db/select FieldValues :field_id [:in field-ids])))]
+  (let [id->field-values (keyed-by-field-ids fields FieldValues)]
+    (for [field fields]
+      (assoc field :values (get id->field-values (:id field) [])))))
+
+(defn with-normal-values
+  "Efficiently hydrate the `FieldValues` for visibility_type normal FIELDS."
+  {:batched-hydrate :normal_values}
+  [fields]
+  (let [id->field-values (keyed-by-field-ids (filter fv/field-should-have-field-values? fields)
+                                             [FieldValues :id :human_readable_values :values :field_id])]
     (for [field fields]
       (assoc field :values (get id->field-values (:id field) [])))))
 
+(defn with-dimensions
+  "Efficiently hydrate the `Dimension` for a collection of FIELDS."
+  {:batched-hydrate :dimensions}
+  [fields]
+  (let [id->dimensions (keyed-by-field-ids fields Dimension)]
+    (for [field fields]
+      (assoc field :dimensions (get id->dimensions (:id field) [])))))
+
 (defn with-targets
   "Efficiently hydrate the FK target fields for a collection of FIELDS."
   {:batched-hydrate :target}
@@ -139,115 +165,3 @@
   {:arglists '([field])}
   [{:keys [table_id]}]
   (db/select-one 'Table, :id table_id))
-
-
-;;; ------------------------------------------------------------ Sync Util Type Inference Fns ------------------------------------------------------------
-
-(def ^:private ^:const pattern+base-types+special-type
-  "Tuples of `[name-pattern set-of-valid-base-types special-type]`.
-   Fields whose name matches the pattern and one of the base types should be given the special type.
-
-   *  Convert field name to lowercase before matching against a pattern
-   *  Consider a nil set-of-valid-base-types to mean \"match any base type\""
-  (let [bool-or-int #{:type/Boolean :type/Integer}
-        float       #{:type/Float}
-        int-or-text #{:type/Integer :type/Text}
-        text        #{:type/Text}]
-    [[#"^.*_lat$"       float       :type/Latitude]
-     [#"^.*_lon$"       float       :type/Longitude]
-     [#"^.*_lng$"       float       :type/Longitude]
-     [#"^.*_long$"      float       :type/Longitude]
-     [#"^.*_longitude$" float       :type/Longitude]
-     [#"^.*_rating$"    int-or-text :type/Category]
-     [#"^.*_type$"      int-or-text :type/Category]
-     [#"^.*_url$"       text        :type/URL]
-     [#"^_latitude$"    float       :type/Latitude]
-     [#"^active$"       bool-or-int :type/Category]
-     [#"^city$"         text        :type/City]
-     [#"^country$"      text        :type/Country]
-     [#"^countryCode$"  text        :type/Country]
-     [#"^currency$"     int-or-text :type/Category]
-     [#"^first_name$"   text        :type/Name]
-     [#"^full_name$"    text        :type/Name]
-     [#"^gender$"       int-or-text :type/Category]
-     [#"^last_name$"    text        :type/Name]
-     [#"^lat$"          float       :type/Latitude]
-     [#"^latitude$"     float       :type/Latitude]
-     [#"^lon$"          float       :type/Longitude]
-     [#"^lng$"          float       :type/Longitude]
-     [#"^long$"         float       :type/Longitude]
-     [#"^longitude$"    float       :type/Longitude]
-     [#"^name$"         text        :type/Name]
-     [#"^postalCode$"   int-or-text :type/ZipCode]
-     [#"^postal_code$"  int-or-text :type/ZipCode]
-     [#"^rating$"       int-or-text :type/Category]
-     [#"^role$"         int-or-text :type/Category]
-     [#"^sex$"          int-or-text :type/Category]
-     [#"^state$"        text        :type/State]
-     [#"^status$"       int-or-text :type/Category]
-     [#"^type$"         int-or-text :type/Category]
-     [#"^url$"          text        :type/URL]
-     [#"^zip_code$"     int-or-text :type/ZipCode]
-     [#"^zipcode$"      int-or-text :type/ZipCode]]))
-
-;; Check that all the pattern tuples are valid
-(when-not config/is-prod?
-  (doseq [[name-pattern base-types special-type] pattern+base-types+special-type]
-    (assert (instance? java.util.regex.Pattern name-pattern))
-    (assert (every? (u/rpartial isa? :type/*) base-types))
-    (assert (isa? special-type :type/*))))
-
-(defn- infer-field-special-type
-  "If `name` and `base-type` matches a known pattern, return the `special_type` we should assign to it."
-  [field-name base-type]
-  (when (and (string? field-name)
-             (keyword? base-type))
-    (or (when (= "id" (s/lower-case field-name)) :type/PK)
-        (some (fn [[name-pattern valid-base-types special-type]]
-                (when (and (some (partial isa? base-type) valid-base-types)
-                           (re-matches name-pattern (s/lower-case field-name)))
-                  special-type))
-              pattern+base-types+special-type))))
-
-
-;;; ------------------------------------------------------------ Sync Util CRUD Fns ------------------------------------------------------------
-
-(defn update-field-from-field-def!
-  "Update an EXISTING-FIELD from the given FIELD-DEF."
-  {:arglists '([existing-field field-def])}
-  [{:keys [id], :as existing-field} {field-name :name, :keys [base-type special-type pk? parent-id]}]
-  (u/prog1 (assoc existing-field
-             :base_type    base-type
-             :display_name (or (:display_name existing-field)
-                               (humanization/name->human-readable-name field-name))
-             :special_type (or (:special_type existing-field)
-                               special-type
-                               (when pk?
-                                 :type/PK)
-                               (infer-field-special-type field-name base-type))
-
-             :parent_id    parent-id)
-    ;; if we have a different base-type or special-type, then update
-    (when (first (d/diff <> existing-field))
-      (db/update! Field id
-        :display_name (:display_name <>)
-        :base_type    base-type
-        :special_type (:special_type <>)
-        :parent_id    parent-id))))
-
-(defn create-field-from-field-def!
-  "Create a new `Field` from the given FIELD-DEF."
-  {:arglists '([table-id field-def])}
-  [table-id {field-name :name, :keys [base-type special-type pk? parent-id raw-column-id]}]
-  {:pre [(integer? table-id) (string? field-name) (isa? base-type :type/*)]}
-  (let [special-type (or special-type
-                       (when pk? :type/PK)
-                       (infer-field-special-type field-name base-type))]
-    (db/insert! Field
-      :table_id      table-id
-      :raw_column_id raw-column-id
-      :name          field-name
-      :display_name  (humanization/name->human-readable-name field-name)
-      :base_type     base-type
-      :special_type  special-type
-      :parent_id     parent-id)))
diff --git a/src/metabase/models/field_values.clj b/src/metabase/models/field_values.clj
index 2c64fcd33d67d5316160c26decc109dcb8ce15c5..64ec86bb94ff6e9325cf015495624c644d3b4b4c 100644
--- a/src/metabase/models/field_values.clj
+++ b/src/metabase/models/field_values.clj
@@ -5,6 +5,19 @@
              [db :as db]
              [models :as models]]))
 
+(def ^:const ^Integer low-cardinality-threshold
+  "Fields with less than this many distinct values should automatically be given a special type of `:type/Category`."
+  300)
+
+(def ^:private ^:const ^Integer entry-max-length
+  "The maximum character length for a stored `FieldValues` entry."
+  100)
+
+(def ^:private ^:const ^Integer total-max-length
+  "Maximum total length for a `FieldValues` entry (combined length of all values for the field)."
+  (* low-cardinality-threshold entry-max-length))
+
+
 ;; ## Entity + DB Multimethods
 
 (models/defmodel FieldValues :metabase_fieldvalues)
@@ -16,13 +29,6 @@
           :types       (constantly {:human_readable_values :json, :values :json})
           :post-select (u/rpartial update :human_readable_values #(or % {}))}))
 
-;; columns:
-;; *  :id
-;; *  :field_id
-;; *  :updated_at             WHY! I *DESPISE* THESE USELESS FIELDS
-;; *  :created_at
-;; *  :values                 (JSON-encoded array like ["table" "scalar" "pie"])
-;; *  :human_readable_values  (JSON-encoded map like {:table "Table" :scalar "Scalar"}
 
 ;; ## `FieldValues` Helper Functions
 
@@ -36,28 +42,77 @@
   (and (not (contains? #{:retired :sensitive :hidden :details-only} (keyword visibility_type)))
        (not (isa? (keyword base_type) :type/DateTime))
        (or (isa? (keyword base_type) :type/Boolean)
-           (isa? (keyword special_type) :type/Category))))
+           (isa? (keyword special_type) :type/Category)
+           (isa? (keyword special_type) :type/Enum))))
 
-(defn- create-field-values!
-  "Create `FieldValues` for a `Field`."
-  {:arglists '([field] [field human-readable-values])}
-  [{field-id :id, field-name :name, :as field} & [human-readable-values]]
-  {:pre [(integer? field-id)]}
-  (log/debug (format "Creating FieldValues for Field %s..." (or field-name field-id))) ; use field name if available
-  (db/insert! FieldValues
-    :field_id              field-id
-    :values                ((resolve 'metabase.db.metadata-queries/field-distinct-values) field)
-    :human_readable_values human-readable-values))
-
-(defn update-field-values!
-  "Update the `FieldValues` for FIELD, creating them if needed"
-  [{field-id :id, :as field}]
-  {:pre [(integer? field-id)
-         (field-should-have-field-values? field)]}
-  (if-let [field-values (FieldValues :field_id field-id)]
-    (db/update! FieldValues (u/get-id field-values)
-      :values ((resolve 'metabase.db.metadata-queries/field-distinct-values) field))
-    (create-field-values! field)))
+
+(defn- values-less-than-total-max-length?
+  "`true` if the combined length of all the values in DISTINCT-VALUES is below the
+   threshold for what we'll allow in a FieldValues entry. Does some logging as well."
+  [distinct-values]
+  (let [total-length (reduce + (map (comp count str)
+                                    distinct-values))]
+    (u/prog1 (<= total-length total-max-length)
+      (log/debug (format "Field values total length is %d (max %d)." total-length total-max-length)
+                 (if <>
+                   "FieldValues are allowed for this Field."
+                   "FieldValues are NOT allowed for this Field.")))))
+
+(defn- cardinality-less-than-threshold?
+  "`true` if the number of DISTINCT-VALUES is less that `low-cardinality-threshold`.
+   Does some logging as well."
+  [distinct-values]
+  (let [num-values (count distinct-values)]
+    (u/prog1 (<= num-values low-cardinality-threshold)
+      (log/debug (if <>
+                   (format "Field has %d distinct values (max %d). FieldValues are allowed for this Field." num-values low-cardinality-threshold)
+                   (format "Field has over %d values. FieldValues are NOT allowed for this Field." low-cardinality-threshold))))))
+
+
+(defn- distinct-values
+  "Fetch a sequence of distinct values for FIELD that are below the `total-max-length` threshold.
+   If the values are past the threshold, this returns `nil`."
+  [field]
+  (require 'metabase.db.metadata-queries)
+  (let [values ((resolve 'metabase.db.metadata-queries/field-distinct-values) field)]
+    (when (cardinality-less-than-threshold? values)
+      (when (values-less-than-total-max-length? values)
+        values))))
+
+
+(defn create-or-update-field-values!
+  "Create or update the FieldValues object for FIELD."
+  [field & [human-readable-values]]
+  (let [field-values (FieldValues :field_id (u/get-id field))
+        values       (distinct-values field)
+        field-name   (or (:name field) (:id field))]
+    (cond
+      ;; if the FieldValues object already exists then update values in it
+      (and field-values values)
+      (do
+        (log/debug (format "Storing updated FieldValues for Field %s..." field-name))
+        (db/update! FieldValues (u/get-id field-values)
+          :values values))
+      ;; if FieldValues object doesn't exist create one
+      values
+      (do
+        (log/debug (format "Storing FieldValues for Field %s..." field-name))
+        (db/insert! FieldValues
+          :field_id              (u/get-id field)
+          :values                values
+          :human_readable_values human-readable-values))
+      ;; otherwise this Field isn't eligible, so delete any FieldValues that might exist
+      :else
+      (db/delete! FieldValues :field_id (u/get-id field)))))
+
+
+(defn field-values->pairs
+  "Returns a list of pairs (or single element vectors if there are no
+  human_readable_values) for the given `FIELD-VALUES` instance"
+  [{:keys [values human_readable_values] :as field-values}]
+  (if (seq human_readable_values)
+    (map vector values human_readable_values)
+    (map vector values)))
 
 (defn create-field-values-if-needed!
   "Create `FieldValues` for a `Field` if they *should* exist but don't already exist.
@@ -67,7 +122,7 @@
   {:pre [(integer? field-id)]}
   (when (field-should-have-field-values? field)
     (or (FieldValues :field_id field-id)
-        (create-field-values! field human-readable-values))))
+        (create-or-update-field-values! field human-readable-values))))
 
 (defn save-field-values!
   "Save the `FieldValues` for FIELD-ID, creating them if needed, otherwise updating them."
diff --git a/src/metabase/models/permissions_group.clj b/src/metabase/models/permissions_group.clj
index a146b5a95596b370c8f42447ca4d4cc6a531e2b2..34a9f0f635bb517e4325c6a5483e6c93f1002d7c 100644
--- a/src/metabase/models/permissions_group.clj
+++ b/src/metabase/models/permissions_group.clj
@@ -41,7 +41,7 @@
 (defn exists-with-name?
   "Does a `PermissionsGroup` with GROUP-NAME exist in the DB? (case-insensitive)"
   ^Boolean [group-name]
-  {:pre [(u/string-or-keyword? group-name)]}
+  {:pre [((some-fn keyword? string?) group-name)]}
   (db/exists? PermissionsGroup
     :%lower.name (s/lower-case (name group-name))))
 
diff --git a/src/metabase/models/raw_column.clj b/src/metabase/models/raw_column.clj
deleted file mode 100644
index ae91c006a15196df7762418de3fa4372cd6ada73..0000000000000000000000000000000000000000
--- a/src/metabase/models/raw_column.clj
+++ /dev/null
@@ -1,24 +0,0 @@
-(ns ^:deprecated metabase.models.raw-column
-  (:require [metabase.util :as u]
-            [toucan
-             [db :as db]
-             [models :as models]]))
-
-(models/defmodel ^:deprecated RawColumn :raw_column)
-
-(defn- pre-insert [table]
-  (let [defaults {:active  true
-                  :is_pk   false
-                  :details {}}]
-    (merge defaults table)))
-
-(defn- pre-delete [{:keys [id]}]
-  (db/delete! RawColumn :fk_target_column_id id))
-
-(u/strict-extend (class RawColumn)
-  models/IModel (merge models/IModelDefaults
-                   {:hydration-keys (constantly [:columns])
-                    :types          (constantly {:base_type :keyword, :details :json})
-                    :properties     (constantly {:timestamped? true})
-                    :pre-insert     pre-insert
-                    :pre-delete     pre-delete}))
diff --git a/src/metabase/models/raw_table.clj b/src/metabase/models/raw_table.clj
deleted file mode 100644
index 880d5a90cfdff999e2127540846837127653fdea..0000000000000000000000000000000000000000
--- a/src/metabase/models/raw_table.clj
+++ /dev/null
@@ -1,43 +0,0 @@
-(ns ^:deprecated metabase.models.raw-table
-  (:require [metabase.models.raw-column :refer [RawColumn]]
-            [metabase.util :as u]
-            [toucan
-             [db :as db]
-             [models :as models]]))
-
-(models/defmodel ^:deprecated RawTable :raw_table)
-
-(defn- pre-insert [table]
-  (let [defaults {:details {}}]
-    (merge defaults table)))
-
-(defn- pre-delete [{:keys [id]}]
-  (db/delete! 'Table :raw_table_id id)
-  (db/delete! RawColumn :raw_table_id id))
-
-(u/strict-extend (class RawTable)
-  models/IModel (merge models/IModelDefaults
-                   {:types      (constantly {:details :json})
-                    :properties (constantly {:timestamped? true})
-                    :pre-insert pre-insert
-                    :pre-delete pre-delete}))
-
-
-;;; ## ---------------------------------------- PERSISTENCE FUNCTIONS ----------------------------------------
-
-
-(defn ^:hydrate columns
-  "Return the `RawColumns` belonging to RAW-TABLE."
-  [{:keys [id]}]
-  (db/select RawColumn, :raw_table_id id, {:order-by [[:name :asc]]}))
-
-(defn active-tables
-  "Return the active `RawColumns` belonging to RAW-TABLE."
-  [database-id]
-  (db/select RawTable, :database_id database-id, :active true, {:order-by [[:schema :asc]
-                                                                           [:name :asc]]}))
-
-(defn active-columns
-  "Return the active `RawColumns` belonging to RAW-TABLE."
-  [{:keys [id]}]
-  (db/select RawColumn, :raw_table_id id, :active true, {:order-by [[:name :asc]]}))
diff --git a/src/metabase/models/setting.clj b/src/metabase/models/setting.clj
index 27a05177548c6e3f93faceaef89e99a7dd175c09..3c9439d78a9a6a8b76f0cae103369505046d5a66 100644
--- a/src/metabase/models/setting.clj
+++ b/src/metabase/models/setting.clj
@@ -53,7 +53,7 @@
 
 
 (def ^:private Type
-  (s/enum :string :boolean :json :integer))
+  (s/enum :string :boolean :json :integer :double))
 
 (def ^:private SettingDefinition
   {:name        s/Keyword
@@ -158,6 +158,12 @@
   (when-let [s (get-string setting-or-name)]
     (Integer/parseInt s)))
 
+(defn get-double
+  "Get double value of (presumably `:double`) SETTING-OR-NAME. This is the default getter for `:double` settings."
+  ^Double [setting-or-name]
+  (when-let [s (get-string setting-or-name)]
+    (Double/parseDouble s)))
+
 (defn get-json
   "Get the string value of SETTING-OR-NAME and parse it as JSON."
   [setting-or-name]
@@ -167,7 +173,8 @@
   {:string  get-string
    :boolean get-boolean
    :integer get-integer
-   :json    get-json})
+   :json    get-json
+   :double  get-double})
 
 (defn get
   "Fetch the value of SETTING-OR-NAME. What this means depends on the Setting's `:getter`; by default, this looks for first for a corresponding env var,
@@ -236,6 +243,15 @@
                                                   (re-matches #"^\d+$" new-value))))
                                  (str new-value))))
 
+(defn set-double!
+  "Set the value of double SETTING-OR-NAME."
+  [setting-or-name new-value]
+  (set-string! setting-or-name (when new-value
+                                 (assert (or (float? new-value)
+                                             (and (string? new-value)
+                                                  (re-matches #"[+-]?([0-9]*[.])?[0-9]+" new-value) )))
+                                 (str new-value))))
+
 (defn set-json!
   "Serialize NEW-VALUE for SETTING-OR-NAME as a JSON string and save it."
   [setting-or-name new-value]
@@ -246,7 +262,8 @@
   {:string  set-string!
    :boolean set-boolean!
    :integer set-integer!
-   :json    set-json!})
+   :json    set-json!
+   :double  set-double!})
 
 (defn set!
   "Set the value of SETTING-OR-NAME. What this means depends on the Setting's `:setter`; by default, this just updates the Settings cache and writes its value to the DB.
@@ -369,16 +386,19 @@
 
 
 (defn- user-facing-info [setting]
-  (let [k (:name setting)
-        v (get k)]
-    {:key         k
-     :value       (when (and (not= v (env-var-value setting))
-                             (not= v (:default setting)))
-                    v)
-     :description (:description setting)
-     :default     (or (when (env-var-value setting)
-                        (format "Using $%s" (env-var-name setting)))
-                      (:default setting))}))
+  (let [k         (:name setting)
+        v         (get k)
+        env-value (env-var-value setting)]
+    {:key            k
+     :value          (when (and (not= v env-value)
+                                (not= v (:default setting)))
+                       v)
+     :is_env_setting (boolean env-value)
+     :env_name       (env-var-name setting)
+     :description    (:description setting)
+     :default        (or (when env-value
+                           (format "Using $%s" (env-var-name setting)))
+                         (:default setting))}))
 
 (defn all
   "Return a sequence of Settings maps in a format suitable for consumption by the frontend.
diff --git a/src/metabase/models/table.clj b/src/metabase/models/table.clj
index c72a0eb0f45245c94336677f1c8b18771972a8a3..8b4dc8924643d4f2f4200e0f0556bdda94a97fc5 100644
--- a/src/metabase/models/table.clj
+++ b/src/metabase/models/table.clj
@@ -151,48 +151,3 @@
   [table-id]
   {:pre [(integer? table-id)]}
   (db/select-one-field :db_id Table, :id table-id))
-
-
-;;; ------------------------------------------------------------ Persistence Functions ------------------------------------------------------------
-
-(defn retire-tables!
-  "Retire all `Tables` in the list of TABLE-IDs along with all of each tables `Fields`."
-  [table-ids]
-  {:pre [(u/maybe? set? table-ids) (every? integer? table-ids)]}
-  (when (seq table-ids)
-    ;; retire the tables
-    (db/update-where! Table {:id [:in table-ids]}
-      :active false)
-    ;; retire the fields of retired tables
-    (db/update-where! Field {:table_id [:in table-ids]}
-      :visibility_type "retired")))
-
-(defn update-table-from-tabledef!
-  "Update `Table` with the data from TABLE-DEF."
-  [{:keys [id display_name], :as existing-table} {table-name :name}]
-  {:pre [(integer? id)]}
-  (let [updated-table (assoc existing-table
-                        :display_name (or display_name (humanization/name->human-readable-name table-name)))]
-    ;; the only thing we need to update on a table is the :display_name, if it never got set
-    (when (nil? display_name)
-      (db/update! Table id
-        :display_name (:display_name updated-table)))
-    ;; always return the table when we are done
-    updated-table))
-
-(defn create-table-from-tabledef!
-  "Create `Table` with the data from TABLE-DEF."
-  [database-id {schema-name :schema, table-name :name, raw-table-id :raw-table-id, visibility-type :visibility-type}]
-  (if-let [existing-id (db/select-one-id Table :db_id database-id, :raw_table_id raw-table-id, :schema schema-name, :name table-name, :active false)]
-    ;; if the table already exists but is marked *inactive*, mark it as *active*
-    (db/update! Table existing-id
-      :active true)
-    ;; otherwise create a new Table
-    (db/insert! Table
-      :db_id           database-id
-      :raw_table_id    raw-table-id
-      :schema          schema-name
-      :name            table-name
-      :visibility_type visibility-type
-      :display_name    (humanization/name->human-readable-name table-name)
-      :active          true)))
diff --git a/src/metabase/public_settings.clj b/src/metabase/public_settings.clj
index cbc1f9238d7e52528dd6a8f33cead577b0d0837d..497e494f14c19b72c04110870a2b4cdc5721c7fd 100644
--- a/src/metabase/public_settings.clj
+++ b/src/metabase/public_settings.clj
@@ -55,6 +55,11 @@
   :type    :boolean
   :default false)
 
+(defsetting enable-nested-queries
+  "Allow using a saved question as the source for other queries?"
+  :type    :boolean
+  :default true)
+
 
 (defsetting enable-query-caching
   "Enabling caching will save the results of queries that take a long time to run."
@@ -88,6 +93,18 @@
   :type    :integer
   :default 10)
 
+(defsetting breakout-bins-num
+  "When using the default binning strategy and a number of bins is not
+  provided, this number will be used as the default."
+  :type :integer
+  :default 8)
+
+(defsetting breakout-bin-width
+  "When using the default binning strategy for a field of type
+  Coordinate (such as Latitude and Longitude), this number will be used
+  as the default bin width (in degrees)."
+  :type :double
+  :default 10.0)
 
 (defn remove-public-uuid-if-public-sharing-is-disabled
   "If public sharing is *disabled* and OBJECT has a `:public_uuid`, remove it so people don't try to use it (since it won't work).
@@ -117,16 +134,17 @@
    :anon_tracking_enabled (anon-tracking-enabled)
    :custom_geojson        (setting/get :custom-geojson)
    :email_configured      ((resolve 'metabase.email/email-configured?))
+   :embedding             (enable-embedding)
    :enable_query_caching  (enable-query-caching)
+   :enable_nested_queries (enable-nested-queries)
    :engines               ((resolve 'metabase.driver/available-drivers))
    :ga_code               "UA-60817802-1"
    :google_auth_client_id (setting/get :google-auth-client-id)
-   :ldap_configured       ((resolve 'metabase.integrations.ldap/ldap-configured?))
    :has_sample_dataset    (db/exists? 'Database, :is_sample true)
+   :ldap_configured       ((resolve 'metabase.integrations.ldap/ldap-configured?))
    :map_tile_server_url   (map-tile-server-url)
    :password_complexity   password/active-password-complexity
    :public_sharing        (enable-public-sharing)
-   :embedding             (enable-embedding)
    :report_timezone       (setting/get :report-timezone)
    :setup_token           ((resolve 'metabase.setup/token-value))
    :site_name             (site-name)
diff --git a/src/metabase/pulse/render.clj b/src/metabase/pulse/render.clj
index aeda60eff850892edae154ced2919ea14e104df0..0bd159695c8098d19dbd85db49f28d4e225d8f6d 100644
--- a/src/metabase/pulse/render.clj
+++ b/src/metabase/pulse/render.clj
@@ -224,13 +224,26 @@
     (render-to-png html os width)
     (.toByteArray os)))
 
+(defn- create-remapping-lookup [cols col-indexes]
+  (into {}
+        (for [col-index col-indexes
+              :let [{:keys [remapped_from]} (nth cols col-index)]
+              :when remapped_from]
+          [remapped_from col-index])))
+
 (defn- render-table
   [card rows cols col-indexes bar-column]
-  (let [max-value (if bar-column (apply max (map bar-column rows)))]
+  (let [max-value (if bar-column (apply max (map bar-column rows)))
+        remapping-lookup (create-remapping-lookup cols col-indexes)]
     [:table {:style (style {:padding-bottom :8px, :border-bottom (str "4px solid " color-gray-1)})}
      [:thead
       [:tr
-       (for [col-idx col-indexes :let [col (nth cols col-idx)]]
+       (for [col-idx col-indexes
+             :let [col-at-index (nth cols col-idx)
+                   col (if (:remapped_to col-at-index)
+                         (nth cols (get remapping-lookup (:name col-at-index)))
+                         col-at-index)]
+             :when (not (:remapped_from col-at-index))]
          [:th {:style (style bar-td-style bar-th-style {:min-width :60px})}
           (h (s/upper-case (name (or (:display_name col) (:name col)))))])
        (when bar-column
@@ -238,9 +251,14 @@
      [:tbody
       (map-indexed (fn [row-idx row]
                      [:tr {:style (style {:color (if (odd? row-idx) color-gray-2 color-gray-3)})}
-                      (for [col-idx col-indexes :let [col (nth cols col-idx)]]
+                      (for [col-idx col-indexes
+                            :let [col (nth cols col-idx)]
+                            :when (not (:remapped_from col))]
                         [:td {:style (style bar-td-style (when (and bar-column (= col-idx 1)) {:font-weight 700}))}
-                         (-> row (nth col-idx) (format-cell col) h)])
+                         (if-let [remapped-index (and (:remapped_to col)
+                                                      (get remapping-lookup (:name col)))]
+                           (-> row (nth remapped-index) (format-cell (nth cols remapped-index)) h)
+                           (-> row (nth col-idx) (format-cell col) h))])
                       (when bar-column
                         [:td {:style (style bar-td-style {:width :99%})}
                          [:div {:style (style {:background-color color-purple
diff --git a/src/metabase/query_processor.clj b/src/metabase/query_processor.clj
index 708986eb302154d7ac56f3dfe0ddcb67ae477e2c..b7253cedd21e125c5fede30201867c4b9be2ff7e 100644
--- a/src/metabase/query_processor.clj
+++ b/src/metabase/query_processor.clj
@@ -8,17 +8,19 @@
              [query :as query]
              [query-execution :as query-execution :refer [QueryExecution]]]
             [metabase.query-processor.middleware
+             [add-dimension-projections :as add-dim]
              [add-implicit-clauses :as implicit-clauses]
              [add-row-count-and-status :as row-count-and-status]
              [add-settings :as add-settings]
              [annotate-and-sort :as annotate-and-sort]
+             [binning :as binning]
              [cache :as cache]
              [catch-exceptions :as catch-exceptions]
              [cumulative-aggregations :as cumulative-ags]
              [dev :as dev]
              [driver-specific :as driver-specific]
+             [expand :as expand]
              [expand-macros :as expand-macros]
-             [expand-resolve :as expand-resolve]
              [fetch-source-query :as fetch-source-query]
              [format-rows :as format-rows]
              [limit :as limit]
@@ -27,7 +29,9 @@
              [parameters :as parameters]
              [permissions :as perms]
              [results-metadata :as results-metadata]
-             [resolve-driver :as resolve-driver]]
+             [resolve-driver :as resolve-driver]
+             [resolve :as resolve]
+             [source-table :as source-table]]
             [metabase.query-processor.util :as qputil]
             [metabase.util.schema :as su]
             [schema.core :as s]
@@ -85,10 +89,14 @@
       dev/check-results-format
       limit/limit
       cumulative-ags/handle-cumulative-aggregations
-      implicit-clauses/add-implicit-clauses
       format-rows/format-rows
+      binning/update-binning-strategy
       results-metadata/record-and-return-metadata!
-      expand-resolve/expand-resolve                    ; ▲▲▲ QUERY EXPANSION POINT  ▲▲▲ All functions *above* will see EXPANDED query during PRE-PROCESSING
+      resolve/resolve-middleware
+      add-dim/add-remapping
+      implicit-clauses/add-implicit-clauses
+      source-table/resolve-source-table-middleware
+      expand/expand-middleware                         ; ▲▲▲ QUERY EXPANSION POINT  ▲▲▲ All functions *above* will see EXPANDED query during PRE-PROCESSING
       row-count-and-status/add-row-count-and-status    ; ▼▼▼ RESULTS WRAPPING POINT ▼▼▼ All functions *below* will see results WRAPPED in `:data` during POST-PROCESSING
       parameters/substitute-parameters
       expand-macros/expand-macros
@@ -121,7 +129,9 @@
   "Expand a QUERY the same way it would normally be done as part of query processing.
    This is useful for things that need to look at an expanded query, such as permissions checking for Cards."
   (->> identity
-       expand-resolve/expand-resolve
+       resolve/resolve-middleware
+       source-table/resolve-source-table-middleware
+       expand/expand-middleware
        parameters/substitute-parameters
        expand-macros/expand-macros
        fetch-source-query/fetch-source-query))
diff --git a/src/metabase/query_processor/annotate.clj b/src/metabase/query_processor/annotate.clj
index 4b5436ed249fa63820fd9e9662ff1575e5f99fb4..450a533ea27b1bdcdf251f46f4fc74337a8aaa84 100644
--- a/src/metabase/query_processor/annotate.clj
+++ b/src/metabase/query_processor/annotate.clj
@@ -15,7 +15,8 @@
              [humanization :as humanization]]
             [metabase.query-processor
              [interface :as i]
-             [sort :as sort]]
+             [sort :as sort]
+             [util :as qputil]]
             [toucan.db :as db])
   (:import [metabase.query_processor.interface Expression ExpressionRef]))
 
@@ -49,6 +50,14 @@
           (i/map->DateTimeField {:field field, :unit unit}))
         fields))
 
+    metabase.query_processor.interface.BinnedField
+    (let [{:keys [strategy min-value max-value], nested-field :field} this]
+      [(assoc nested-field :binning_info {:binning_strategy strategy
+                                          :bin_width (:bin-width this)
+                                          :num_bins (:num-bins this)
+                                          :min_value min-value
+                                          :max_value max-value})])
+
     metabase.query_processor.interface.Field
     (if-let [parent (:parent this)]
       [this parent]
@@ -224,6 +233,28 @@
                          :when (contains? missing-keys k)]
                      (info-for-missing-key inner-query fields k (map k initial-rows))))))
 
+(defn- fixup-renamed-fields
+  "After executing the query, it's possible that java.jdbc changed the
+  name of the column that was originally in the query. This can happen
+  when java.jdbc finds two columns with the same name, it will append
+  an integer (like _2) on the end. When this is done on an existing
+  column in the query, this function fixes that up, updating the
+  column information we have with the new name that java.jdbc assigned
+  the column. The `add-unknown-fields-if-needed` function above is
+  similar, but is used when we don't have existing information on that
+  column and need to infer it."
+  [query actual-keys]
+  (let [expected-field-names (set (map (comp keyword name) (:fields query)))]
+    (if (= expected-field-names (set actual-keys))
+      query
+      (update query :fields
+              (fn [fields]
+                (mapv (fn [expected-field actual-key]
+                        (if (not= (name expected-field) (name actual-key))
+                          (assoc expected-field :field-name (name actual-key))
+                          expected-field))
+                      fields actual-keys))))))
+
 (defn- convert-field-to-expected-format
   "Rename keys, provide default values, etc. for FIELD so it is in the format expected by the frontend."
   [field]
@@ -243,7 +274,9 @@
                           :schema-name        :schema_name
                           :special-type       :special_type
                           :table-id           :table_id
-                          :visibility-type    :visibility_type})
+                          :visibility-type    :visibility_type
+                          :remapped-to        :remapped_to
+                          :remapped-from      :remapped_from})
         (dissoc :position :clause-position :parent :parent-id :table-name))))
 
 (defn- fk-field->dest-fn
@@ -289,25 +322,28 @@
   [inner-query result-keys initial-rows]
   {:pre [(sequential? result-keys)]}
   (when (seq result-keys)
-    (->> (collect-fields (dissoc inner-query :expressions))
-         ;; qualify the field name to make sure it matches what will come back. (For Mongo nested queries only)
-         (map qualify-field-name)
-         ;; add entries for aggregate fields
-         (add-aggregate-fields-if-needed inner-query)
-         ;; make field-name a keyword
-         (map (u/rpartial update :field-name keyword))
-         ;; add entries for fields we weren't expecting
-         (add-unknown-fields-if-needed inner-query result-keys initial-rows)
-         ;; remove expected fields not present in the results, and make sure they're unique
-         (filter (comp (partial contains? (set result-keys)) :field-name))
-         ;; now sort the fields
-         (sort/sort-fields inner-query)
-         ;; remove any duplicate entires
-         (m/distinct-by :field-name)
-         ;; convert them to the format expected by the frontend
-         (map convert-field-to-expected-format)
-         ;; add FK info
-         add-extra-info-to-fk-fields)))
+    (let [result-keys-set (set result-keys)
+          query-with-renamed-columns (fixup-renamed-fields inner-query result-keys)]
+      (->> (dissoc query-with-renamed-columns :expressions)
+           collect-fields
+           ;; qualify the field name to make sure it matches what will come back. (For Mongo nested queries only)
+           (map qualify-field-name)
+           ;; add entries for aggregate fields
+           (add-aggregate-fields-if-needed inner-query)
+           ;; make field-name a keyword
+           (map (u/rpartial update :field-name keyword))
+           ;; add entries for fields we weren't expecting
+           (add-unknown-fields-if-needed inner-query result-keys initial-rows)
+           ;; remove expected fields not present in the results, and make sure they're unique
+           (filter (comp (partial contains? (set result-keys)) :field-name))
+           ;; now sort the fields
+           (sort/sort-fields inner-query)
+           ;; remove any duplicate entires
+           (m/distinct-by :field-name)
+           ;; convert them to the format expected by the frontend
+           (map convert-field-to-expected-format)
+           ;; add FK info
+           add-extra-info-to-fk-fields))))
 
 (defn annotate-and-sort
   "Post-process a structured query to add metadata to the results. This stage:
diff --git a/src/metabase/query_processor/interface.clj b/src/metabase/query_processor/interface.clj
index 46f7a7e1dc63ef3e625f041034f65ac6c4b24d40..524b0ff2942dd90a0d51dfe06076ef5e429c596e 100644
--- a/src/metabase/query_processor/interface.clj
+++ b/src/metabase/query_processor/interface.clj
@@ -1,11 +1,14 @@
 (ns metabase.query-processor.interface
   "Definitions of `Field`, `Value`, and other record types present in an expanded query.
    This namespace should just contain definitions of various protocols and record types; associated logic
-   should go in `metabase.query-processor.expand`."
-  (:require [metabase.models.field :as field]
+   should go in `metabase.query-processor.middleware.expand`."
+  (:require [metabase.models
+             [dimension :as dim]
+             [field :as field]]
             [metabase.util :as u]
             [metabase.util.schema :as su]
-            [schema.core :as s])
+            [schema.core :as s]
+            [metabase.sync.interface :as i])
   (:import clojure.lang.Keyword
            java.sql.Timestamp))
 
@@ -84,6 +87,22 @@
 ;;; |                                                                             FIELDS                                                                             |
 ;;; +----------------------------------------------------------------------------------------------------------------------------------------------------------------+
 
+
+(s/defrecord FieldValues [field-value-id          :- su/IntGreaterThanZero
+                          field-id                :- su/IntGreaterThanZero
+                          values                  :- (s/maybe (s/cond-pre [s/Any] {} []))
+                          human-readable-values   :- (s/maybe (s/cond-pre [s/Any] {} []))
+                          created-at              :- java.util.Date
+                          updated-at              :- java.util.Date])
+
+(s/defrecord Dimensions [dimension-id            :- su/IntGreaterThanZero
+                         field-id                :- su/IntGreaterThanZero
+                         dimension-name          :- su/NonBlankString
+                         human-readable-field-id :- (s/maybe su/IntGreaterThanZero)
+                         dimension-type          :- (apply s/enum dim/dimension-types)
+                         created-at              :- java.util.Date
+                         updated-at              :- java.util.Date])
+
 ;; Field is the "expanded" form of a Field ID (field reference) in MBQL
 (s/defrecord Field [field-id           :- su/IntGreaterThanZero
                     field-name         :- su/NonBlankString
@@ -99,7 +118,12 @@
                     description        :- (s/maybe su/NonBlankString)
                     parent-id          :- (s/maybe su/IntGreaterThanZero)
                     ;; Field once its resolved; FieldPlaceholder before that
-                    parent             :- s/Any]
+                    parent             :- s/Any
+                    remapped-from      :- (s/maybe s/Str)
+                    remapped-to        :- (s/maybe s/Str)
+                    dimensions         :- (s/maybe (s/cond-pre Dimensions {} []))
+                    values             :- (s/maybe (s/cond-pre FieldValues {} []))
+                    fingerprint        :- (s/maybe i/Fingerprint)]
   clojure.lang.Named
   (getName [_] field-name) ; (name <field>) returns the *unqualified* name of the field, #obvi
 
@@ -149,6 +173,19 @@
   clojure.lang.Named
   (getName [_] (name field)))
 
+(def binning-strategies
+  "Valid binning strategies for a `BinnedField`"
+  #{:num-bins :bin-width :default})
+
+(s/defrecord BinnedField [field     :- Field
+                          strategy  :- (apply s/enum binning-strategies)
+                          num-bins  :- s/Int
+                          min-value :- s/Num
+                          max-value :- s/Num
+                          bin-width :- s/Num]
+  clojure.lang.Named
+  (getName [_] (name field)))
+
 (s/defrecord ExpressionRef [expression-name :- su/NonBlankString]
   clojure.lang.Named
   (getName [_] expression-name)
@@ -160,11 +197,16 @@
 ;;; Placeholder Types
 
 ;; Replace Field IDs with these during first pass
-(s/defrecord FieldPlaceholder [field-id      :- su/IntGreaterThanZero
-                               fk-field-id   :- (s/maybe (s/constrained su/IntGreaterThanZero
-                                                                        (fn [_] (or (assert-driver-supports :foreign-keys) true)) ; assert-driver-supports will throw Exception if driver is bound
-                                                                        "foreign-keys is not supported by this driver."))         ; and driver does not support foreign keys
-                               datetime-unit :- (s/maybe DatetimeFieldUnit)])
+(s/defrecord FieldPlaceholder [field-id            :- su/IntGreaterThanZero
+                               fk-field-id         :- (s/maybe (s/constrained su/IntGreaterThanZero
+                                                                              (fn [_] (or (assert-driver-supports :foreign-keys) true)) ; assert-driver-supports will throw Exception if driver is bound
+                                                                              "foreign-keys is not supported by this driver."))         ; and driver does not support foreign keys
+                               datetime-unit       :- (s/maybe DatetimeFieldUnit)
+                               remapped-from       :- (s/maybe s/Str)
+                               remapped-to         :- (s/maybe s/Str)
+                               field-display-name  :- (s/maybe s/Str)
+                               binning-strategy    :- (s/maybe (apply s/enum binning-strategies))
+                               binning-param       :- (s/maybe s/Num)])
 
 (s/defrecord AgFieldRef [index :- s/Int])
 ;; TODO - add a method to get matching expression from the query?
diff --git a/src/metabase/query_processor/middleware/add_dimension_projections.clj b/src/metabase/query_processor/middleware/add_dimension_projections.clj
new file mode 100644
index 0000000000000000000000000000000000000000..c97575f1788b3b86c90ad2eeaa254186f9d3d5d9
--- /dev/null
+++ b/src/metabase/query_processor/middleware/add_dimension_projections.clj
@@ -0,0 +1,147 @@
+(ns metabase.query-processor.middleware.add-dimension-projections
+  "Middleware for adding remapping and other dimension related projections"
+  (:require [metabase.models.field :refer [with-dimensions with-values]]
+            [metabase.query-processor
+             [interface :as i]
+             [util :as qputil]]))
+
+(defn- create-remapped-col [col-name remapped-from]
+  {:description nil,
+   :id nil,
+   :table_id nil,
+   :expression-name col-name,
+   :source :fields,
+   :name col-name,
+   :display_name col-name,
+   :target nil,
+   :extra_info {}
+   :remapped_from remapped-from
+   :remapped_to nil})
+
+(defn- create-fk-remap-col [fk-field-id dest-field-id remapped-from field-display-name]
+  (i/map->FieldPlaceholder {:fk-field-id fk-field-id
+                            :field-id dest-field-id
+                            :remapped-from remapped-from
+                            :remapped-to nil
+                            :field-display-name field-display-name}))
+
+(defn- row-map-fn [dim-seq]
+  (fn [row]
+    (concat row (map (fn [{:keys [col-index xform-fn]}]
+                          (xform-fn (nth row col-index)))
+                        dim-seq))))
+
+(defn- transform-values-for-col
+  "Converts `VALUES` to a type compatible with the base_type found for
+  `COL`. These values should be directly comparable with the values
+  returned from the database for the given `COL`."
+  [{:keys [base_type] :as col} values]
+  (cond
+    (isa? base_type :type/Decimal)
+    (map bigdec values)
+
+    (isa? base_type :type/Float)
+    (map double values)
+
+    (isa? base_type :type/BigInteger)
+    (map bigint values)
+
+    (isa? base_type :type/Integer)
+    (map int values)
+
+    (isa? base_type :type/Text)
+    (map str values)
+
+    :else
+    values))
+
+(defn- assoc-remapped-to [from->to]
+  (fn [col]
+    (-> col
+        (update :remapped_to #(or % (from->to (:name col))))
+        (update :remapped_from #(or % nil)))))
+
+(defn- col->dim-map
+  [idx {{remap-to :dimension-name remap-type :dimension-type field-id :field-id} :dimensions :as col}]
+  (when field-id
+    (let [remap-from (:name col)]
+      {:col-index idx
+       :from remap-from
+       :to remap-to
+       :xform-fn (zipmap (transform-values-for-col col (get-in col [:values :values]))
+                         (get-in col [:values :human-readable-values]))
+       :new-column (create-remapped-col remap-to remap-from)
+       :dimension-type remap-type})))
+
+(defn- create-remap-col-pairs
+  "Return pairs of field id and the new remapped column that the field
+  should be remapped to. This is a list of pairs as we want to
+  preserve order"
+  [fields]
+  (for [{{:keys [field-id human-readable-field-id dimension-type dimension-name]} :dimensions,
+         field-name :field-name, source-field-id :field-id} fields
+        :when (= :external dimension-type)]
+    [source-field-id (create-fk-remap-col field-id
+                                          human-readable-field-id
+                                          field-name
+                                          dimension-name)]))
+
+(defn- update-remapped-order-by
+  "Order by clauses that include an external remapped column should be
+  replace that original column in the order by with the newly remapped
+  column. This should order by the text of the remapped column vs. the
+  id of the source column before the remapping"
+  [remap-cols-by-id order-by-seq]
+  (when (seq order-by-seq)
+    (mapv (fn [{{:keys [field-id]} :field :as order-by-clause}]
+            (if-let [remapped-col (get remap-cols-by-id field-id)]
+              (assoc order-by-clause :field remapped-col)
+              order-by-clause))
+          order-by-seq)))
+
+(defn- add-fk-remaps
+  "Function that will include FK references needed for external
+  remappings. This will then flow through to the resolver to get the
+  new tables included in the join."
+  [query]
+  (let [remap-col-pairs (create-remap-col-pairs (get-in query [:query :fields]))]
+    (if (seq remap-col-pairs)
+      (-> query
+          (update-in [:query :order-by] #(update-remapped-order-by (into {} remap-col-pairs) %))
+          (update-in [:query :fields] concat (map second remap-col-pairs)))
+      query)))
+
+(defn- remap-results
+  "Munges results for remapping after the query has been executed. For
+  internal remappings, a new column needs to be added and each row
+  flowing through needs to include the remapped data for the new
+  column. For external remappings, the column information needs to be
+  updated with what it's being remapped from and the user specified
+  name for the remapped column."
+  [results]
+  (let [indexed-dims (keep-indexed col->dim-map (:cols results))
+        internal-only-dims (filter #(= :internal (:dimension-type %)) indexed-dims)
+        remap-fn (row-map-fn internal-only-dims)
+        columns (concat (:cols results)
+                        (map :new-column internal-only-dims))
+        from->to (reduce (fn [acc {:keys [remapped_from name]}]
+                           (if remapped_from
+                             (assoc acc remapped_from name)
+                             acc))
+                         {} columns)]
+    (-> results
+        (update :columns into (map :to internal-only-dims))
+        (update :cols (fn [cols]
+                        (mapv (comp #(dissoc % :dimensions :values)
+                                    (assoc-remapped-to from->to))
+                              columns)))
+        (update :rows #(map remap-fn %)))))
+
+(defn add-remapping
+  "Query processor middleware. `QP` is the query processor, returns a
+  function that works on a `QUERY` map. Delgates to `add-fk-remaps`
+  for making remapping changes to the query (before executing the
+  query). Then delegates to `remap-results` to munge the results after
+  query execution."
+  [qp]
+  (comp remap-results qp add-fk-remaps))
diff --git a/src/metabase/query_processor/middleware/add_implicit_clauses.clj b/src/metabase/query_processor/middleware/add_implicit_clauses.clj
index 21483b4ca8499cce00e6137be0994fbaaf8d222a..c4eea5b849ad4253e00e09101313e5f603e4ee6b 100644
--- a/src/metabase/query_processor/middleware/add_implicit_clauses.clj
+++ b/src/metabase/query_processor/middleware/add_implicit_clauses.clj
@@ -4,19 +4,23 @@
             [metabase.models.field :refer [Field]]
             [metabase.query-processor
              [interface :as i]
-             [resolve :as resolve]
              [sort :as sort]
              [util :as qputil]]
-            [toucan.db :as db]))
+            [metabase.query-processor.middleware.resolve :as resolve]
+            [toucan
+             [db :as db]
+             [hydrate :refer [hydrate]]]))
 
 (defn- fetch-fields-for-souce-table-id [source-table-id]
   (map resolve/rename-mb-field-keys
-       (db/select [Field :name :display_name :base_type :special_type :visibility_type :table_id :id :position :description]
-         :table_id        source-table-id
-         :visibility_type [:not-in ["sensitive" "retired"]]
-         :parent_id       nil
-         {:order-by [[:position :asc]
-                     [:id :desc]]})))
+       (-> (db/select [Field :name :display_name :base_type :special_type :visibility_type :table_id :id :position :description :fingerprint]
+             :table_id        source-table-id
+             :visibility_type [:not-in ["sensitive" "retired"]]
+             :parent_id       nil
+             {:order-by [[:position :asc]
+                         [:id :desc]]})
+            (hydrate :values)
+            (hydrate :dimensions))))
 
 (defn- fields-for-source-table
   "Return the all fields for SOURCE-TABLE, for use as an implicit `:fields` clause."
@@ -24,7 +28,9 @@
   ;; Sort the implicit FIELDS so the SQL (or other native query) that gets generated (mostly) approximates the 'magic' sorting
   ;; we do on the results. This is done so when the outer query we generate is a `SELECT *` the order doesn't change
   (for [field (sort/sort-fields inner-query (fetch-fields-for-souce-table-id source-table-id))
-        :let  [field (resolve/resolve-table (i/map->Field field) {[nil source-table-id] source-table})]]
+        :let  [field (-> field
+                         resolve/convert-db-field
+                         (resolve/resolve-table {[nil source-table-id] source-table}))]]
     (if (qputil/datetime-field? field)
       (i/map->DateTimeField {:field field, :unit :default})
       field)))
@@ -53,8 +59,8 @@
 (defn- add-implicit-breakout-order-by
   "`Fields` specified in `breakout` should add an implicit ascending `order-by` subclause *unless* that field is *explicitly* referenced in `order-by`."
   [{breakout-fields :breakout, order-by :order-by, :as inner-query}]
-  (let [order-by-fields                   (set (map :field order-by))
-        implicit-breakout-order-by-fields (filter (partial (complement contains?) order-by-fields)
+  (let [order-by-fields                   (set (map (comp #(select-keys % [:field-id :fk-field-id]) :field) order-by))
+        implicit-breakout-order-by-fields (remove (comp order-by-fields #(select-keys % [:field-id :fk-field-id]))
                                                   breakout-fields)]
     (cond-> inner-query
       (seq implicit-breakout-order-by-fields) (update :order-by concat (for [field implicit-breakout-order-by-fields]
@@ -70,7 +76,6 @@
     query
     (update query :query add-implicit-clauses-to-inner-query)))
 
-
 (defn add-implicit-clauses
   "Add an implicit `fields` clause to queries with no `:aggregation`, `breakout`, or explicit `:fields` clauses.
    Add implicit `:order-by` clauses for fields specified in a `:breakout`."
diff --git a/src/metabase/query_processor/middleware/binning.clj b/src/metabase/query_processor/middleware/binning.clj
new file mode 100644
index 0000000000000000000000000000000000000000..59b246fff866ce4f38aa63d8825143fa5b01f431
--- /dev/null
+++ b/src/metabase/query_processor/middleware/binning.clj
@@ -0,0 +1,169 @@
+(ns metabase.query-processor.middleware.binning
+  (:require [clojure.math.numeric-tower :refer [ceil expt floor]]
+            [clojure.walk :as walk]
+            [metabase
+             [public-settings :as public-settings]
+             [util :as u]]
+            [metabase.query-processor.interface])
+  (:import [metabase.query_processor.interface BetweenFilter BinnedField ComparisonFilter]))
+
+(defn- update!
+  "Similar to `clojure.core/update` but works on transient maps"
+  [^clojure.lang.ITransientAssociative coll k f]
+  (assoc! coll k (f (get coll k))))
+
+(defn- filter->field-map
+  "A bit of a stateful hack using clojure.walk/prewalk to find any
+  comparison or between filter. This should be replaced by a zipper
+  for a more functional/composable approach to this problem."
+  [mbql-filter]
+  (let [acc (transient {})]
+    (walk/prewalk
+     (fn [x]
+       (when (or (instance? BetweenFilter x)
+                 (and (instance? ComparisonFilter x)
+                      (contains? #{:< :> :<= :>=} (:filter-type x))))
+         (update! acc (get-in x [:field :field-id]) #(if (seq %)
+                                                       (conj % x)
+                                                       [x])))
+       x)
+     mbql-filter)
+    (persistent! acc)))
+
+(defn- calculate-bin-width [min-value max-value num-bins]
+  (u/round-to-decimals 5 (/ (- max-value min-value)
+                            num-bins)))
+
+(defn- calculate-num-bins [min-value max-value bin-width]
+  (long (Math/ceil (/ (- max-value min-value)
+                         bin-width))))
+
+(defn- extract-bounds
+  "Given query criteria, find a min/max value for the binning strategy
+  using the greatest user specified min value and the smallest user
+  specified max value. When a user specified min or max is not found,
+  use the global min/max for the given field."
+  [{:keys [field-id fingerprint]} field-filter-map]
+  (let [{global-min :min, global-max :max} (get-in fingerprint [:type :type/Number])
+        user-maxes (for [{:keys [filter-type] :as query-filter} (get field-filter-map field-id)
+                         :when (contains? #{:< :<= :between} filter-type)]
+                     (if (= :between filter-type)
+                       (get-in query-filter [:max-val :value])
+                       (get-in query-filter [:value :value])))
+        user-mins (for [{:keys [filter-type] :as query-filter} (get field-filter-map field-id)
+                        :when (contains? #{:> :>= :between} filter-type)]
+                    (if (= :between filter-type)
+                      (get-in query-filter [:min-val :value])
+                      (get-in query-filter [:value :value])))]
+    [(or (when (seq user-mins)
+           (apply max user-mins))
+         global-min)
+     (or (when (seq user-maxes)
+           (apply min user-maxes))
+         global-max)]))
+
+(defn- ceil-to
+  [precision x]
+  (let [scale (/ precision)]
+    (/ (ceil (* x scale)) scale)))
+
+(defn- floor-to
+  [precision x]
+  (let [scale (/ precision)]
+    (/ (floor (* x scale)) scale)))
+
+(def ^:private ^:const pleasing-numbers [1 1.25 2 2.5 3 5 7.5 10])
+
+(defn- nicer-bin-width
+  [min-value max-value num-bins]
+  (let [min-bin-width (calculate-bin-width min-value max-value num-bins)
+        scale         (expt 10 (u/order-of-magnitude min-bin-width))]
+    (->> pleasing-numbers
+         (map (partial * scale))
+         (drop-while (partial > min-bin-width))
+         first)))
+
+(defn- nicer-bounds
+  [min-value max-value bin-width]
+  [(floor-to bin-width min-value) (ceil-to bin-width max-value)])
+
+(def ^:private ^:const max-steps 10)
+
+(defn- fixed-point
+  [f]
+  (fn [x]
+    (->> (iterate f x)
+         (partition 2 1)
+         (take max-steps)
+         (drop-while (partial apply not=))
+         ffirst)))
+
+(def ^:private ^{:arglists '([binned-field])} nicer-breakout
+  (fixed-point
+   (fn
+     [{:keys [min-value max-value bin-width num-bins strategy] :as binned-field}]
+     (let [bin-width (if (= strategy :num-bins)
+                       (nicer-bin-width min-value max-value num-bins)
+                       bin-width)
+           [min-value max-value] (nicer-bounds min-value max-value bin-width)]
+       (-> binned-field
+           (assoc :min-value min-value
+                  :max-value max-value
+                  :num-bins  (if (= strategy :num-bins)
+                               num-bins
+                               (calculate-num-bins min-value max-value bin-width))
+                  :bin-width bin-width))))))
+
+(defn- resolve-default-strategy [{:keys [strategy field]} min-value max-value]
+  (if (isa? (:special-type field) :type/Coordinate)
+    (let [bin-width (public-settings/breakout-bin-width)]
+      {:strategy  :bin-width
+       :bin-width bin-width
+       :num-bins  (calculate-num-bins min-value max-value bin-width)})
+    (let [num-bins (public-settings/breakout-bins-num)]
+      {:strategy  :num-bins
+       :num-bins  num-bins
+       :bin-width (calculate-bin-width min-value max-value num-bins)})))
+
+(defn- update-binned-field
+  "Given a field, resolve the binning strategy (either provided or
+  found if default is specified) and calculate the number of bins and
+  bin width for this file. `FILTER-FIELD-MAP` contains related
+  criteria that could narrow the domain for the field."
+  [{:keys [field num-bins strategy bin-width] :as binned-field} filter-field-map]
+  (let [[min-value max-value] (extract-bounds field filter-field-map)]
+    (when-not (and min-value max-value)
+      (throw (Exception. (format "Unable to bin field '%s' with id '%s' without a min/max value"
+                                 (get-in binned-field [:field :field-name])
+                                 (get-in binned-field [:field :field-id])))))
+    (let [resolved-binned-field (merge binned-field
+                                       {:min-value min-value :max-value max-value}
+                                       (case strategy
+
+                                         :num-bins
+                                         {:bin-width (calculate-bin-width min-value max-value num-bins)}
+
+                                         :bin-width
+                                         {:num-bins (calculate-num-bins min-value max-value bin-width)}
+
+                                         :default
+                                         (resolve-default-strategy binned-field min-value max-value)))]
+      ;; Bail out and use unmodifed version if we can't converge on a
+      ;; nice version.
+      (or (nicer-breakout resolved-binned-field) resolved-binned-field))))
+
+(defn update-binning-strategy
+  "When a binned field is found, it might need to be updated if a
+  relevant query criteria affects the min/max value of the binned
+  field. This middleware looks for that criteria, then updates the
+  related min/max values and calculates the bin-width based on the
+  criteria values (or global min/max information)."
+  [qp]
+  (fn [query]
+    (let [filter-field-map (filter->field-map (get-in query [:query :filter]))]
+      (qp
+       (walk/postwalk (fn [node]
+                        (if (instance? BinnedField node)
+                          (update-binned-field node filter-field-map)
+                          node))
+                      query)))))
diff --git a/src/metabase/query_processor/middleware/catch_exceptions.clj b/src/metabase/query_processor/middleware/catch_exceptions.clj
index b28c2409645bbf1861655cc5f1ec50d23038ff36..c3de96317638fb8f666cf23068f31fbdb2424480 100644
--- a/src/metabase/query_processor/middleware/catch_exceptions.clj
+++ b/src/metabase/query_processor/middleware/catch_exceptions.clj
@@ -1,6 +1,9 @@
 (ns metabase.query-processor.middleware.catch-exceptions
   "Middleware for catching exceptions thrown by the query processor and returning them in a friendlier format."
-  (:require [metabase.query-processor.middleware.expand-resolve :as expand-resolve]
+  (:require [metabase.query-processor.middleware
+             [expand :as expand]
+             [resolve :as resolve]
+             [source-table :as source-table]]
             [metabase.query-processor.util :as qputil]
             [metabase.util :as u]
             schema.utils)
@@ -13,8 +16,12 @@
           :stacktrace     (u/filtered-stacktrace e)
           :query          (dissoc query :database :driver)
           :expanded-query (when (qputil/mbql-query? query)
-                            (u/ignore-exceptions
-                              (dissoc (expand-resolve/expand-and-resolve query) :database :driver)))}
+                            (-> query
+                                expand/expand
+                                source-table/resolve-source-table-middleware
+                                resolve/resolve
+                                (dissoc :database :driver)
+                                u/ignore-exceptions))}
          (when-let [data (ex-data e)]
            {:ex-data data})
          additional-info))
diff --git a/src/metabase/query_processor/expand.clj b/src/metabase/query_processor/middleware/expand.clj
similarity index 97%
rename from src/metabase/query_processor/expand.clj
rename to src/metabase/query_processor/middleware/expand.clj
index e08ba5c528299c8216e42fad469bd17d479ff58c..bc19c7ebfbc4744fc0657eaad89cafb2eb679bd8 100644
--- a/src/metabase/query_processor/expand.clj
+++ b/src/metabase/query_processor/middleware/expand.clj
@@ -1,4 +1,4 @@
-(ns metabase.query-processor.expand
+(ns metabase.query-processor.middleware.expand
   "Converts a Query Dict as received by the API into an *expanded* one that contains extra information that will be needed to
    construct the appropriate native Query, and perform various post-processing steps such as Field ordering."
   (:refer-clojure :exclude [< <= > >= = != and or not filter count distinct sum min max + - / *])
@@ -209,6 +209,13 @@
 
 ;;; ## breakout & fields
 
+(s/defn ^:ql ^:always-validate binning-strategy :- FieldPlaceholder
+  "Reference to a `BinnedField`. This is just a `Field` reference with an associated `STRATEGY-NAME` and `STRATEGY-PARAM`"
+  ([f strategy-name & [strategy-param]]
+   (let [strategy (qputil/normalize-token strategy-name)
+         field (field f)]
+     (assoc field :binning-strategy strategy, :binning-param strategy-param))))
+
 (defn- fields-list-clause
   ([k query] query)
   ([k query & fields] (assoc query k (mapv field fields))))
@@ -525,10 +532,17 @@
                                   :value       {:field-placeholder {:field-id 100}
                                                 :value 200}}}}
 
-   The \"placeholder\" objects above are fetched from the DB and replaced in the next QP step, in `metabase.query-processor.resolve`."
+   The \"placeholder\" objects above are fetched from the DB and replaced in the next QP step, in `metabase.query-processor.middleware.resolve`."
   [outer-query]
   (update outer-query :query expand-inner))
 
+(defn expand-middleware
+  "Wraps `expand` in a query-processor middleware function"
+  [qp]
+  (fn [query]
+    (qp (if (qputil/mbql-query? query)
+          (expand query)
+          query))))
 
 (defmacro query
   "Build a query by threading an (initially empty) map through each form in BODY with `->`.
diff --git a/src/metabase/query_processor/middleware/expand_resolve.clj b/src/metabase/query_processor/middleware/expand_resolve.clj
deleted file mode 100644
index 1fa626cbf7e50126336f8726eb095774b74d707a..0000000000000000000000000000000000000000
--- a/src/metabase/query_processor/middleware/expand_resolve.clj
+++ /dev/null
@@ -1,29 +0,0 @@
-(ns metabase.query-processor.middleware.expand-resolve
-  "Middleware for converting a MBQL query into an 'expanded' form that contains additional information needed by drivers for running queries,
-   and resolving various referenced Fields and Tables."
-  (:require [metabase.models.database :refer [Database]]
-            [metabase.query-processor
-             [expand :as expand]
-             [resolve :as resolve]
-             [util :as qputil]]
-            [toucan.db :as db]))
-
-(def ^{:arglists '([query])} expand-and-resolve
-  "Expand and resolve a QUERY.
-   (This function is *not* middleware; use `expand-resolve` for that purpose. This is provided for cases where we want to return the expanded/resolved
-   query in error messages)."
-  (comp resolve/resolve expand/expand))
-
-(defn- expand-resolve*
-  [{database-id :database, :as query}]
-  (let [resolved-db (db/select-one [Database :name :id :engine :details], :id database-id)
-        query       (if-not (qputil/mbql-query? query)
-                      query
-                      (expand-and-resolve query))]
-    (assoc query :database resolved-db)))
-
-(defn expand-resolve
-  "Middleware that transforms an MBQL into an expanded form with more information and structure. Also resolves references to fields, tables,
-   etc, into their concrete details which are necessary for query formation by the executing driver."
-  [qp]
-  (comp qp expand-resolve*))
diff --git a/src/metabase/query_processor/middleware/parameters/sql.clj b/src/metabase/query_processor/middleware/parameters/sql.clj
index a086f09265ae320bb44891c5d82ae200fc7b674c..d95991321d7aba97e854f7ed8bf4c4e2d560c037 100644
--- a/src/metabase/query_processor/middleware/parameters/sql.clj
+++ b/src/metabase/query_processor/middleware/parameters/sql.clj
@@ -7,8 +7,8 @@
             [clojure.tools.logging :as log]
             [honeysql.core :as hsql]
             [metabase.models.field :as field :refer [Field]]
-            [metabase.query-processor.expand :as ql]
             [metabase.query-processor.middleware.parameters.dates :as date-params]
+            [metabase.query-processor.middleware.expand :as ql]
             [metabase.util :as u]
             [metabase.util.schema :as su]
             [schema.core :as s]
@@ -50,6 +50,9 @@
 
 (defrecord ^:private DateRange [start end])
 
+;; List of numbers to faciliate things like using params in a SQL `IN` clause. See the discussion in `value->number` for more details.
+(s/defrecord ^:private CommaSeparatedNumbers [numbers :- [s/Num]])
+
 ;; convenience for representing an *optional* parameter present in a query but whose value is unspecified in the param values.
 (defrecord ^:private NoValue [])
 
@@ -75,6 +78,7 @@
 
 (def ^:private ParamValue
   (s/named (s/maybe (s/cond-pre NoValue
+                                CommaSeparatedNumbers
                                 Dimension
                                 Date
                                 s/Num
@@ -138,13 +142,36 @@
       (when required
         (throw (Exception. (format "'%s' is a required param." display_name))))))
 
-(s/defn ^:private ^:always-validate value->number :- s/Num
+(s/defn ^:private ^:always-validate parse-number :- s/Num
+  "Parse a string like `1` or `2.0` into a valid number. Done mostly to keep people from passing in
+   things that aren't numbers, like SQL identifiers."
+  [s :- s/Str]
+  (.parse (NumberFormat/getInstance) ^String s))
+
+(s/defn ^:private ^:always-validate value->number :- (s/cond-pre s/Num CommaSeparatedNumbers)
+  "Parse a 'numeric' param value. Normally this returns an integer or floating-point number,
+   but as a somewhat undocumented feature it also accepts comma-separated lists of numbers. This was a side-effect of the
+   old parameter code that unquestioningly substituted any parameter passed in as a number directly into the SQL. This has
+   long been changed for security purposes (avoiding SQL injection), but since users have come to expect comma-separated
+   numeric values to work we'll allow that (with validation) and return an instance of `CommaSeperatedNumbers`. (That
+   is converted to SQL as a simple comma-separated list.)"
   [value]
-  (if (string? value)
-    (.parse (NumberFormat/getInstance) ^String value)
-    value))
+  (cond
+    ;; if not a string it's already been parsed
+    (number? value) value
+    ;; same goes for an instance of CommanSepe
+    (instance? CommaSeparatedNumbers value) value
+    value
+    ;; if the value is a string, then split it by commas in the string. Usually there should be none.
+    ;; Parse each part as a number.
+    (let [parts (for [part (str/split value #",")]
+                  (parse-number part))]
+      (if (> (count parts) 1)
+        ;; If there's more than one number return an instance of `CommaSeparatedNumbers`
+        (strict-map->CommaSeparatedNumbers {:numbers parts})
+        ;; otherwise just return the single number
+        (first parts)))))
 
-;; TODO - this should probably be converting strings to numbers (issue #3816)
 (s/defn ^:private ^:always-validate parse-value-for-type :- ParamValue
   [param-type value]
   (cond
@@ -252,6 +279,10 @@
   SqlCall (->replacement-snippet-info [this] (honeysql->replacement-snippet-info this))
   NoValue (->replacement-snippet-info [_]    {:replacement-snippet ""})
 
+  CommaSeparatedNumbers
+  (->replacement-snippet-info [{:keys [numbers]}]
+    {:replacement-snippet (str/join ", " numbers)})
+
   Date
   (->replacement-snippet-info [{:keys [s]}]
     (honeysql->replacement-snippet-info (u/->Timestamp s)))
diff --git a/src/metabase/query_processor/resolve.clj b/src/metabase/query_processor/middleware/resolve.clj
similarity index 77%
rename from src/metabase/query_processor/resolve.clj
rename to src/metabase/query_processor/middleware/resolve.clj
index 667bd5230951332897ce8b8b31510e3ae0154187..7541a930a14beb5e82eb1bd529565ed8ac393697 100644
--- a/src/metabase/query_processor/resolve.clj
+++ b/src/metabase/query_processor/middleware/resolve.clj
@@ -1,4 +1,4 @@
-(ns metabase.query-processor.resolve
+(ns metabase.query-processor.middleware.resolve
   "Resolve references to `Fields`, `Tables`, and `Databases` in an expanded query dictionary."
   (:refer-clojure :exclude [resolve])
   (:require [clojure
@@ -10,10 +10,14 @@
              [util :as u]]
             [metabase.models
              [field :as field]
-             [table :refer [Table]]]
-            [metabase.query-processor.interface :as i]
+             [table :refer [Table]]
+             [database :refer [Database]]]
+            [metabase.query-processor
+             [interface :as i]
+             [util :as qputil]]
             [schema.core :as s]
-            [toucan.db :as db])
+            [toucan.db :as db]
+            [toucan.hydrate :refer [hydrate]])
   (:import [metabase.query_processor.interface DateTimeField DateTimeValue ExpressionRef Field FieldPlaceholder RelativeDatetime RelativeDateTimeValue Value ValuePlaceholder]))
 
 ;; # ---------------------------------------------------------------------- UTIL FNS ------------------------------------------------------------
@@ -30,6 +34,41 @@
                                     :table_id        :table-id
                                     :parent_id       :parent-id}))
 
+(defn- rename-dimension-keys
+  [dimension]
+  (set/rename-keys (into {} dimension)
+                   {:id                      :dimension-id
+                    :name                    :dimension-name
+                    :type                    :dimension-type
+                    :field_id                :field-id
+                    :human_readable_field_id :human-readable-field-id
+                    :created_at              :created-at
+                    :updated_at              :updated-at}))
+
+(defn- rename-field-value-keys
+  [field-values]
+  (set/rename-keys (into {} field-values)
+                   {:id                      :field-value-id
+                    :field_id                :field-id
+                    :human_readable_values   :human-readable-values
+                    :updated_at              :updated-at
+                    :created_at              :created-at}))
+
+(defn convert-db-field
+  "Converts a field map from that database to a Field instance"
+  [db-field]
+  (-> db-field
+      rename-mb-field-keys
+      i/map->Field
+      (update :values (fn [vals]
+                        (if (seq vals)
+                          (-> vals rename-field-value-keys i/map->FieldValues)
+                          vals)))
+      (update :dimensions (fn [dims]
+                            (if (seq dims)
+                              (-> dims rename-dimension-keys i/map->Dimensions )
+                              dims)))))
+
 ;;; # ------------------------------------------------------------ IRESOLVE PROTOCOL ------------------------------------------------------------
 
 (defprotocol ^:private IResolve
@@ -98,17 +137,42 @@
 
 ;;; ## ------------------------------------------------------------ FIELD PLACEHOLDER ------------------------------------------------------------
 
-(defn- field-ph-resolve-field [{:keys [field-id datetime-unit fk-field-id], :as this} field-id->field]
+(defn- resolve-binned-field [{:keys [binning-strategy binning-param] :as field-ph} field]
+  (let [binned-field (i/map->BinnedField {:field    field
+                                          :strategy binning-strategy})]
+    (case binning-strategy
+      :num-bins
+      (assoc binned-field :num-bins binning-param)
+
+      :bin-width
+      (assoc binned-field :bin-width binning-param)
+
+      :default
+      binned-field
+
+      :else
+      (throw (Exception. (format "Unregonized binning strategy '%s'" binning-strategy))))))
+
+(defn- merge-non-nils
+  "Like `clojure.core/merge` but only merges non-nil values"
+  [& maps]
+  (apply merge-with #(or %2 %1) maps))
+
+(defn- field-ph-resolve-field [{:keys [field-id datetime-unit binning-strategy binning-param], :as this} field-id->field]
   (if-let [{:keys [base-type special-type], :as field} (some-> (field-id->field field-id)
-                                                               i/map->Field
-                                                               (assoc :fk-field-id fk-field-id))]
+                                                               convert-db-field
+                                                               (merge-non-nils (select-keys this [:fk-field-id :remapped-from :remapped-to :field-display-name])))]
     ;; try to resolve the Field with the ones available in field-id->field
-    (let [datetime-field? (or (isa? base-type :type/DateTime)
-                              (isa? special-type :type/DateTime))]
-      (if-not datetime-field?
-        field
-        (i/map->DateTimeField {:field field
-                               :unit  (or datetime-unit :day)}))) ; default to `:day` if a unit wasn't specified
+    (cond
+      (or (isa? base-type :type/DateTime)
+          (isa? special-type :type/DateTime))
+      (i/map->DateTimeField {:field field
+                             :unit  (or datetime-unit :day)}) ; default to `:day` if a unit wasn't specified
+
+      binning-strategy
+      (resolve-binned-field this field)
+
+      :else field)
     ;; If that fails just return ourselves as-is
     this))
 
@@ -192,9 +256,11 @@
         ;; If there are no more Field IDs to resolve we're done.
         expanded-query-dict
         ;; Otherwise fetch + resolve the Fields in question
-        (let [fields (->> (u/key-by :id (db/select [field/Field :name :display_name :base_type :special_type :visibility_type :table_id :parent_id :description :id]
-                                          :visibility_type [:not= "sensitive"]
-                                          :id              [:in field-ids]))
+        (let [fields (->> (u/key-by :id (-> (db/select [field/Field :name :display_name :base_type :special_type :visibility_type :table_id :parent_id :description :id :fingerprint]
+                                              :visibility_type [:not= "sensitive"]
+                                              :id              [:in field-ids])
+                                            (hydrate :values)
+                                            (hydrate :dimensions)))
                           (m/map-vals rename-mb-field-keys)
                           (m/map-vals #(assoc % :parent (when-let [parent-id (:parent-id %)]
                                                           (i/map->FieldPlaceholder {:field-id parent-id})))))]
@@ -244,7 +310,7 @@
 
 (defn- resolve-tables
   "Resolve the `Tables` in an EXPANDED-QUERY-DICT."
-  [{{source-table-id :source-table} :query, :keys [table-ids fk-field-ids], :as expanded-query-dict}]
+  [{{{ source-table-id :id :as source-table} :source-table} :query, :keys [table-ids fk-field-ids], :as expanded-query-dict}]
   (if-not source-table-id
     ;; if we have a `source-query`, recurse and resolve tables in that
     (update-in expanded-query-dict [:query :source-query] (fn [source-query]
@@ -253,19 +319,15 @@
                                                               (:query (resolve-tables (assoc expanded-query-dict :query source-query))))))
     ;; otherwise we can resolve tables in the (current) top-level
     (let [table-ids             (conj table-ids source-table-id)
-          source-table          (or (db/select-one [Table :schema :name :id], :id source-table-id)
-                                    (throw (Exception. (format "Query expansion failed: could not find source table %d." source-table-id))))
           joined-tables         (fk-field-ids->joined-tables source-table-id fk-field-ids)
           fk-id+table-id->table (into {[nil source-table-id] source-table}
                                       (for [{:keys [source-field table-id join-alias]} joined-tables]
                                         {[(:field-id source-field) table-id] {:name join-alias
                                                                               :id   table-id}}))]
       (as-> expanded-query-dict <>
-        (assoc-in <> [:query :source-table] source-table)
         (assoc-in <> [:query :join-tables]  joined-tables)
         (walk/postwalk #(resolve-table % fk-id+table-id->table) <>)))))
 
-
 ;;; # ------------------------------------------------------------ PUBLIC INTERFACE ------------------------------------------------------------
 
 (defn resolve
@@ -275,3 +337,13 @@
           record-fk-field-ids
           resolve-fields
           resolve-tables))
+
+(defn resolve-middleware
+  "Wraps the `resolve` function in a query-processor middleware"
+  [qp]
+  (fn [{database-id :database, :as query}]
+    (let [resolved-db (db/select-one [Database :name :id :engine :details], :id database-id)
+          query       (if (qputil/mbql-query? query)
+                        (resolve query)
+                        query)]
+      (qp (assoc query :database resolved-db)))))
diff --git a/src/metabase/query_processor/middleware/source_table.clj b/src/metabase/query_processor/middleware/source_table.clj
new file mode 100644
index 0000000000000000000000000000000000000000..047323ade341d4ed6699f7ee1287e07139761b3e
--- /dev/null
+++ b/src/metabase/query_processor/middleware/source_table.clj
@@ -0,0 +1,28 @@
+(ns metabase.query-processor.middleware.source-table
+  (:require [metabase.models.table :refer [Table]]
+            [metabase.query-processor.util :as qputil]
+            [toucan.db :as db]))
+
+(defn- resolve-source-table
+  [{{source-table-id :source-table} :query :as expanded-query-dict}]
+  (cond
+    (not (qputil/mbql-query? expanded-query-dict))
+    expanded-query-dict
+
+    (nil? source-table-id)
+    (update-in expanded-query-dict [:query :source-query] (fn [source-query]
+                                                            (if (:native source-query)
+                                                              source-query
+                                                              (:query (resolve-source-table (assoc expanded-query-dict :query source-query))))))
+
+    :else
+    (let [source-table (or (db/select-one [Table :schema :name :id], :id source-table-id)
+                           (throw (Exception. (format "Query expansion failed: could not find source table %d." source-table-id))))]
+      (assoc-in expanded-query-dict [:query :source-table] source-table))))
+
+(defn resolve-source-table-middleware
+  "Middleware that will take the source-table (an integer) and hydrate
+  that source table from the the database and attach it as
+  `:source-table`"
+  [qp]
+  (comp qp resolve-source-table))
diff --git a/src/metabase/sample_data.clj b/src/metabase/sample_data.clj
index 4d839df062376e4c7e025cc1c46e613cb47cfaa4..8aa650724971c7e5b7dfddee3797482492698eac 100644
--- a/src/metabase/sample_data.clj
+++ b/src/metabase/sample_data.clj
@@ -3,7 +3,7 @@
             [clojure.string :as s]
             [clojure.tools.logging :as log]
             [metabase
-             [sync-database :as sync-database]
+             [sync :as sync]
              [util :as u]]
             [metabase.models.database :refer [Database]]
             [toucan.db :as db]))
@@ -27,11 +27,11 @@
   (when-not (db/exists? Database :is_sample true)
     (try
       (log/info "Loading sample dataset...")
-      (sync-database/sync-database! (db/insert! Database
-                                      :name      sample-dataset-name
-                                      :details   (db-details)
-                                      :engine    :h2
-                                      :is_sample true))
+      (sync/sync-database! (db/insert! Database
+                             :name      sample-dataset-name
+                             :details   (db-details)
+                             :engine    :h2
+                             :is_sample true))
       (catch Throwable e
         (log/error (u/format-color 'red "Failed to load sample dataset: %s\n%s" (.getMessage e) (u/pprint-to-str (u/filtered-stacktrace e))))))))
 
diff --git a/src/metabase/sync.clj b/src/metabase/sync.clj
new file mode 100644
index 0000000000000000000000000000000000000000..4dde3530da20d8a6752f30d8730489919be2435b
--- /dev/null
+++ b/src/metabase/sync.clj
@@ -0,0 +1,45 @@
+(ns metabase.sync
+  "Combined functions for running the entire Metabase sync process.
+   This delegates to a few distinct steps, which in turn are broken out even further:
+
+   1.  Sync Metadata      (`metabase.sync.sync-metadata`)
+   2.  Analysis           (`metabase.sync.analyze`)
+   3.  Cache Field Values (`metabase.sync.field-values`)
+
+   In the near future these steps will be scheduled individually, meaning those functions will
+   be called directly instead of calling the `sync-database!` function to do all three at once."
+  (:require [metabase.sync
+             [analyze :as analyze]
+             [field-values :as field-values]
+             [interface :as i]
+             [sync-metadata :as sync-metadata]]
+            [schema.core :as s]
+            [metabase.sync.util :as sync-util]))
+
+(def ^:private SyncDatabaseOptions
+  {(s/optional-key :full-sync?) s/Bool})
+
+(s/defn ^:always-validate sync-database!
+  "Perform all the different sync operations synchronously for DATABASE.
+   You may optionally supply OPTIONS, which can be used to disable so-called 'full-sync',
+   meaning just metadata will be synced, but no 'analysis' (special type determination and
+   FieldValues syncing) will be done."
+  ([database]
+   (sync-database! database {:full-sync? true}))
+  ([database :- i/DatabaseInstance, options :- SyncDatabaseOptions]
+   (sync-util/sync-operation :sync database (format "Sync %s with options: %s" (sync-util/name-for-logging database) options)
+     ;; First make sure Tables, Fields, and FK information is up-to-date
+     (sync-metadata/sync-db-metadata! database)
+     (when (:full-sync? options)
+       ;; Next, run the 'analysis' step where we do things like scan values of fields and update special types accordingly
+       (analyze/analyze-db! database)
+       ;; Finally, update FieldValues
+       (field-values/update-field-values! database)))))
+
+
+(s/defn ^:always-validate sync-table!
+  "Perform all the different sync operations synchronously for a given TABLE."
+  [table :- i/TableInstance]
+  (sync-metadata/sync-table-metadata! table)
+  (analyze/analyze-table! table)
+  (field-values/update-field-values-for-table! table))
diff --git a/src/metabase/sync/analyze.clj b/src/metabase/sync/analyze.clj
new file mode 100644
index 0000000000000000000000000000000000000000..88cc7da22a12d873a1ec19313a92ebb850cea5b4
--- /dev/null
+++ b/src/metabase/sync/analyze.clj
@@ -0,0 +1,49 @@
+(ns metabase.sync.analyze
+  "Logic responsible for doing deep 'analysis' of the data inside a database.
+   This is significantly more expensive than the basic sync-metadata step, and involves things
+   like running MBQL queries and fetching values to do things like determine Table row counts
+   and infer field special types."
+  (:require [clojure.tools.logging :as log]
+            [metabase.models.field :refer [Field]]
+            [metabase.sync
+             [interface :as i]
+             [util :as sync-util]]
+            [metabase.sync.analyze
+             [classify :as classify]
+             [fingerprint :as fingerprint]
+             [table-row-count :as table-row-count]]
+            [metabase.util :as u]
+            [schema.core :as s]
+            [toucan.db :as db]))
+
+(s/defn ^:private ^:always-validate update-fields-last-analyzed!
+  "Update the `last_analyzed` date for all the fields in TABLE."
+  [table :- i/TableInstance]
+  (db/update-where! Field {:table_id        (u/get-id table)
+                           :active          true
+                           :visibility_type [:not= "retired"]
+                           :preview_display true
+                           :last_analyzed   nil}
+    :last_analyzed (u/new-sql-timestamp)))
+
+
+(s/defn ^:always-validate analyze-table!
+  "Perform in-depth analysis for a TABLE."
+  [table :- i/TableInstance]
+  (table-row-count/update-row-count! table)
+  (fingerprint/fingerprint-fields! table)
+  (classify/classify-fields! table)
+  (update-fields-last-analyzed! table))
+
+
+(s/defn ^:always-validate analyze-db!
+  "Perform in-depth analysis on the data for all Tables in a given DATABASE.
+   This is dependent on what each database driver supports, but includes things like cardinality testing and table row counting.
+   This also updates the `:last_analyzed` value for each affected Field."
+  [database :- i/DatabaseInstance]
+  (sync-util/sync-operation :analyze database (format "Analyze data for %s" (sync-util/name-for-logging database))
+    (let [tables (sync-util/db->sync-tables database)]
+      (sync-util/with-emoji-progress-bar [emoji-progress-bar (count tables)]
+        (doseq [table tables]
+          (analyze-table! table)
+          (log/info (u/format-color 'blue "%s Analyzed %s" (emoji-progress-bar) (sync-util/name-for-logging table))))))))
diff --git a/src/metabase/sync/analyze/classifiers/category.clj b/src/metabase/sync/analyze/classifiers/category.clj
new file mode 100644
index 0000000000000000000000000000000000000000..1c61b63d17ffae4724b011bc764fd6402b3daac8
--- /dev/null
+++ b/src/metabase/sync/analyze/classifiers/category.clj
@@ -0,0 +1,30 @@
+(ns metabase.sync.analyze.classifiers.category
+  "Classifier that determines whether a Field should be marked as a `:type/Category` based on the number of distinct values it has."
+  (:require [clojure.tools.logging :as log]
+            [metabase.models.field-values :as field-values]
+            [metabase.sync
+             [interface :as i]
+             [util :as sync-util]]
+            [metabase.util.schema :as su]
+            [schema.core :as s]))
+
+
+(s/defn ^:private ^:always-validate cannot-be-category? :- s/Bool
+  [base-type :- su/FieldType]
+  (or (isa? base-type :type/DateTime)
+      (isa? base-type :type/Collection)))
+
+(s/defn ^:always-validate infer-is-category :- (s/maybe i/FieldInstance)
+  "Classifier that attempts to determine whether FIELD ought to be marked as a Category based on its distinct count."
+  [field :- i/FieldInstance, fingerprint :- (s/maybe i/Fingerprint)]
+  (when-not (:special_type field)
+    (when fingerprint
+      (when-not (cannot-be-category? (:base_type field))
+        (when-let [distinct-count (get-in fingerprint [:global :distinct-count])]
+          (when (< distinct-count field-values/low-cardinality-threshold)
+            (log/debug (format "%s has %d distinct values. Since that is less than %d, we're marking it as a category."
+                               (sync-util/name-for-logging field)
+                               distinct-count
+                               field-values/low-cardinality-threshold))
+            (assoc field
+              :special_type :type/Category)))))))
diff --git a/src/metabase/sync/analyze/classifiers/name.clj b/src/metabase/sync/analyze/classifiers/name.clj
new file mode 100644
index 0000000000000000000000000000000000000000..96920577fe1a2dca0f1ff84a38d01c5501bcc3ab
--- /dev/null
+++ b/src/metabase/sync/analyze/classifiers/name.clj
@@ -0,0 +1,89 @@
+(ns metabase.sync.analyze.classifiers.name
+  "Classifier that infers the special type of a Field based on its name and base type."
+  (:require [clojure.string :as str]
+            [clojure.tools.logging :as log]
+            [metabase
+             [config :as config]
+             [util :as u]]
+            [metabase.models.field :refer [Field]]
+            [metabase.sync
+             [interface :as i]
+             [util :as sync-util]]
+            [metabase.util.schema :as su]
+            [schema.core :as s]
+            [toucan.db :as db]))
+
+(def ^:private bool-or-int-type #{:type/Boolean :type/Integer})
+(def ^:private float-type       #{:type/Float})
+(def ^:private int-or-text-type #{:type/Integer :type/Text})
+(def ^:private text-type        #{:type/Text})
+
+(def ^:private pattern+base-types+special-type
+  "Tuples of `[name-pattern set-of-valid-base-types special-type]`.
+   Fields whose name matches the pattern and one of the base types should be given the special type.
+
+   *  Convert field name to lowercase before matching against a pattern
+   *  Consider a nil set-of-valid-base-types to mean \"match any base type\""
+  [[#"^.*_lat$"       float-type       :type/Latitude]
+   [#"^.*_lon$"       float-type       :type/Longitude]
+   [#"^.*_lng$"       float-type       :type/Longitude]
+   [#"^.*_long$"      float-type       :type/Longitude]
+   [#"^.*_longitude$" float-type       :type/Longitude]
+   [#"^.*_rating$"    int-or-text-type :type/Category]
+   [#"^.*_type$"      int-or-text-type :type/Category]
+   [#"^.*_url$"       text-type        :type/URL]
+   [#"^_latitude$"    float-type       :type/Latitude]
+   [#"^active$"       bool-or-int-type :type/Category]
+   [#"^city$"         text-type        :type/City]
+   [#"^country$"      text-type        :type/Country]
+   [#"^countryCode$"  text-type        :type/Country]
+   [#"^currency$"     int-or-text-type :type/Category]
+   [#"^first_name$"   text-type        :type/Name]
+   [#"^full_name$"    text-type        :type/Name]
+   [#"^gender$"       int-or-text-type :type/Category]
+   [#"^last_name$"    text-type        :type/Name]
+   [#"^lat$"          float-type       :type/Latitude]
+   [#"^latitude$"     float-type       :type/Latitude]
+   [#"^lon$"          float-type       :type/Longitude]
+   [#"^lng$"          float-type       :type/Longitude]
+   [#"^long$"         float-type       :type/Longitude]
+   [#"^longitude$"    float-type       :type/Longitude]
+   [#"^name$"         text-type        :type/Name]
+   [#"^postalCode$"   int-or-text-type :type/ZipCode]
+   [#"^postal_code$"  int-or-text-type :type/ZipCode]
+   [#"^rating$"       int-or-text-type :type/Category]
+   [#"^role$"         int-or-text-type :type/Category]
+   [#"^sex$"          int-or-text-type :type/Category]
+   [#"^state$"        text-type        :type/State]
+   [#"^status$"       int-or-text-type :type/Category]
+   [#"^type$"         int-or-text-type :type/Category]
+   [#"^url$"          text-type        :type/URL]
+   [#"^zip_code$"     int-or-text-type :type/ZipCode]
+   [#"^zipcode$"      int-or-text-type :type/ZipCode]])
+
+;; Check that all the pattern tuples are valid
+(when-not config/is-prod?
+  (doseq [[name-pattern base-types special-type] pattern+base-types+special-type]
+    (assert (instance? java.util.regex.Pattern name-pattern))
+    (assert (every? (u/rpartial isa? :type/*) base-types))
+    (assert (isa? special-type :type/*))))
+
+
+(s/defn ^:private ^:always-validate special-type-for-name-and-base-type :- (s/maybe su/FieldType)
+  "If `name` and `base-type` matches a known pattern, return the `special_type` we should assign to it."
+  [field-name :- su/NonBlankString, base-type :- su/FieldType]
+  (or (when (= "id" (str/lower-case field-name)) :type/PK)
+      (some (fn [[name-pattern valid-base-types special-type]]
+              (when (and (some (partial isa? base-type) valid-base-types)
+                         (re-matches name-pattern (str/lower-case field-name)))
+                special-type))
+            pattern+base-types+special-type)))
+
+(s/defn ^:always-validate infer-special-type :- (s/maybe i/FieldInstance)
+  "Classifer that infers the special type of a FIELD based on its name and base type."
+  [field :- i/FieldInstance, _ :- (s/maybe i/Fingerprint)]
+  (when-let [inferred-special-type (special-type-for-name-and-base-type (:name field) (:base_type field))]
+    (log/debug (format "Based on the name of %s, we're giving it a special type of %s."
+                       (sync-util/name-for-logging field)
+                       inferred-special-type))
+    (assoc field :special_type inferred-special-type)))
diff --git a/src/metabase/sync/analyze/classifiers/no_preview_display.clj b/src/metabase/sync/analyze/classifiers/no_preview_display.clj
new file mode 100644
index 0000000000000000000000000000000000000000..cd1c3c6a39750fcbb008c922fa7262d6c0c9c5ef
--- /dev/null
+++ b/src/metabase/sync/analyze/classifiers/no_preview_display.clj
@@ -0,0 +1,18 @@
+(ns metabase.sync.analyze.classifiers.no-preview-display
+  "Classifier that decides whether a Field should be marked `:no_preview_display.`"
+  (:require [metabase.sync.interface :as i]
+            [schema.core :as s]))
+
+(def ^:private ^:const ^Integer average-length-no-preview-threshold
+  "Fields whose values' average length is greater than this amount should be marked as `preview_display = false`."
+  50)
+
+(s/defn ^:always-validate infer-no-preview-display :- (s/maybe i/FieldInstance)
+  "Classifier that determines whether FIELD should be marked `:no_preview_display`.
+   If FIELD is textual and its average length is too great, mark it so it isn't displayed in the UI."
+  [field :- i/FieldInstance, fingerprint :- (s/maybe i/Fingerprint)]
+  (when (isa? (:base_type field) :type/Text)
+    (when-let [average-length (get-in fingerprint [:type :type/Text :average-length])]
+      (when (> average-length average-length-no-preview-threshold)
+        (assoc field
+          :no_preview_display true)))))
diff --git a/src/metabase/sync/analyze/classifiers/text_fingerprint.clj b/src/metabase/sync/analyze/classifiers/text_fingerprint.clj
new file mode 100644
index 0000000000000000000000000000000000000000..a67bee524a17c4fe32c336c908673aff594f96d4
--- /dev/null
+++ b/src/metabase/sync/analyze/classifiers/text_fingerprint.clj
@@ -0,0 +1,50 @@
+(ns metabase.sync.analyze.classifiers.text-fingerprint
+  "Logic for inferring the special types of *Text* fields based on their TextFingerprints.
+   These tests only run against Fields that *don't* have existing special types."
+  (:require [clojure.tools.logging :as log]
+            [metabase.sync
+             [interface :as i]
+             [util :as sync-util]]
+            [metabase.util.schema :as su]
+            [schema.core :as s]))
+
+(def ^:private ^:const ^Float percent-valid-threshold
+  "Fields that have at least this percent of values that are satisfy some predicate (such as `u/is-email?`)
+   should be given the corresponding special type (such as `:type/Email`)."
+  0.95)
+
+(s/defn ^:private ^:always-validate percent-key-below-threshold? :- s/Bool
+  "Is the value of PERCENT-KEY inside TEXT-FINGERPRINT above the `percent-valid-threshold`?"
+  [text-fingerprint :- i/TextFingerprint, percent-key :- s/Keyword]
+  (boolean
+   (when-let [percent (get text-fingerprint percent-key)]
+     (>= percent percent-valid-threshold))))
+
+
+(def ^:private percent-key->special-type
+  "Map of keys inside the `TextFingerprint` to the corresponding special types we should mark a Field as if the value of the key
+   is over `percent-valid-thresold`."
+  {:percent-json  :type/SerializedJSON
+   :percent-url   :type/URL
+   :percent-email :type/Email})
+
+(s/defn ^:private ^:always-validate infer-special-type-for-text-fingerprint :- (s/maybe su/FieldType)
+  "Check various percentages inside the TEXT-FINGERPRINT and return the corresponding special type to mark the Field as if the percent passes the threshold."
+  [text-fingerprint :- i/TextFingerprint]
+  (some (fn [[percent-key special-type]]
+          (when (percent-key-below-threshold? text-fingerprint percent-key)
+            special-type))
+        (seq percent-key->special-type)))
+
+
+(s/defn ^:always-validate infer-special-type :- (s/maybe i/FieldInstance)
+  "Do classification for `:type/Text` Fields with a valid `TextFingerprint`.
+   Currently this only checks the various recorded percentages, but this is subject to change in the future."
+  [field :- i/FieldInstance, fingerprint :- (s/maybe i/Fingerprint)]
+  (when (isa? (:base_type field) :type/Text)
+    (when-not (:special_type field)
+      (when-let [text-fingerprint (get-in fingerprint [:type :type/Text])]
+        (when-let [inferred-special-type (infer-special-type-for-text-fingerprint text-fingerprint)]
+          (log/debug (format "Based on the fingerprint of %s, we're marking it as %s." (sync-util/name-for-logging field) inferred-special-type))
+          (assoc field
+            :special_type inferred-special-type))))))
diff --git a/src/metabase/sync/analyze/classify.clj b/src/metabase/sync/analyze/classify.clj
new file mode 100644
index 0000000000000000000000000000000000000000..919cd714a6fb66924f2e2d58ca0d976b10514b13
--- /dev/null
+++ b/src/metabase/sync/analyze/classify.clj
@@ -0,0 +1,110 @@
+(ns metabase.sync.analyze.classify
+  "Analysis sub-step that takes a fingerprint for a Field and infers and saves appropriate information like special type.
+   Each 'classifier' takes the information available to it and decides whether or not to run.
+   We currently have the following classifiers:
+
+   1.  `name`: Looks at the name of a Field and infers a special type if possible
+   2.  `no-preview-display`: Looks at average length of text Field recorded in fingerprint and decides whether or not we should hide this Field
+   3.  `category`: Looks at the number of distinct values of Field and determines whether it can be a Category
+   4.  `text-fingerprint`: Looks at percentages recorded in a text Fields' TextFingerprint and infers a special type if possible
+
+   All classifier functions take two arguments, a `FieldInstance` and a possibly `nil` `Fingerprint`, and should return the Field
+   with any appropriate changes (such as a new special type). If no changes are appropriate, a classifier may return nil.
+   Error handling is handled by `run-classifiers` below, so individual classiers do not need to handle errors themselves.
+
+   In the future, we plan to add more classifiers, including ML ones that run offline."
+  (:require [clojure.data :as data]
+            [clojure.tools.logging :as log]
+            [metabase.models.field :refer [Field]]
+            [metabase.sync
+             [interface :as i]
+             [util :as sync-util]]
+            [metabase.sync.analyze.classifiers
+             [category :as category]
+             [name :as name]
+             [no-preview-display :as no-preview-display]
+             [text-fingerprint :as text-fingerprint]]
+            [metabase.util :as u]
+            [schema.core :as s]
+            [toucan.db :as db]))
+
+;;; +------------------------------------------------------------------------------------------------------------------------+
+;;; |                                             CLASSIFYING INDIVIDUAL FIELDS                                              |
+;;; +------------------------------------------------------------------------------------------------------------------------+
+
+(def ^:private values-that-can-be-set
+  "Columns of Field that classifiers are allowed to set."
+  #{:special_type :preview_display})
+
+(s/defn ^:private ^:always-validate save-field-updates!
+  "Save the updates in UPDATED-FIELD."
+  [original-field :- i/FieldInstance, updated-field :- i/FieldInstance]
+  (let [[_ values-to-set] (data/diff original-field updated-field)]
+    (log/debug (format "Based on classification, updating these values of %s: %s" (sync-util/name-for-logging original-field) values-to-set))
+    ;; Check that we're not trying to set anything that we're not allowed to
+    (doseq [k (keys values-to-set)]
+      (when-not (contains? values-that-can-be-set k)
+        (throw (Exception. (format "Classifiers are not allowed to set the value of %s." k)))))
+    ;; cool, now we should be ok to update the Field
+    (db/update! Field (u/get-id original-field)
+      values-to-set)))
+
+
+(def ^:private classifiers
+  "Various classifier functions available. These should all take two args, a `FieldInstance` and a possibly `nil` `Fingerprint`,
+   and return `FieldInstance` with any inferred property changes, or `nil` if none could be inferred.
+   Order is important!"
+  [name/infer-special-type
+   category/infer-is-category
+   no-preview-display/infer-no-preview-display
+   text-fingerprint/infer-special-type])
+
+(s/defn ^:private ^:always-validate run-classifiers :- i/FieldInstance
+  "Run all the available `classifiers` against FIELD and FINGERPRINT, and return the resulting FIELD with changes
+   decided upon by the classifiers."
+  [field :- i/FieldInstance, fingerprint :- (s/maybe i/Fingerprint)]
+  (loop [field field, [classifier & more] classifiers]
+    (if-not classifier
+      field
+      (recur (or (sync-util/with-error-handling (format "Error running classifier on %s" (sync-util/name-for-logging field))
+                   (classifier field fingerprint))
+                 field)
+             more))))
+
+
+(s/defn ^:private ^:always-validate classify!
+  "Run various classifiers on FIELD and its FINGERPRINT, and save any detected changes."
+  ([field :- i/FieldInstance]
+   (classify! field (or (:fingerprint field)
+                        (db/select-one-field :fingerprint Field :id (u/get-id field)))))
+  ([field :- i/FieldInstance, fingerprint :- (s/maybe i/Fingerprint)]
+   (sync-util/with-error-handling (format "Error classifying %s" (sync-util/name-for-logging field))
+     (let [updated-field (run-classifiers field fingerprint)]
+       (when-not (= field updated-field)
+         (save-field-updates! field updated-field))))))
+
+
+;;; +------------------------------------------------------------------------------------------------------------------------+
+;;; |                                           CLASSIFYING ALL FIELDS IN A TABLE                                            |
+;;; +------------------------------------------------------------------------------------------------------------------------+
+
+(s/defn ^:private ^:always-validate fields-to-classify :- (s/maybe [i/FieldInstance])
+  "Return a sequences of Fields belonging to TABLE for which we should attempt to determine special type.
+   This should include NEW fields that are active, visibile, and without an existing special type."
+  [table :- i/TableInstance]
+  (seq (db/select Field
+         :table_id        (u/get-id table)
+         :special_type    nil
+         :active          true
+         :visibility_type [:not= "retired"]
+         :preview_display true
+         :last_analyzed   nil)))
+
+(s/defn ^:always-validate classify-fields!
+  "Run various classifiers on the appropriate FIELDS in a TABLE that have not been previously analyzed.
+   These do things like inferring (and setting) the special types and preview display status for Fields
+   belonging to TABLE."
+  [table :- i/TableInstance]
+  (when-let [fields (fields-to-classify table)]
+    (doseq [field fields]
+      (classify! field))))
diff --git a/src/metabase/sync/analyze/fingerprint.clj b/src/metabase/sync/analyze/fingerprint.clj
new file mode 100644
index 0000000000000000000000000000000000000000..ab9b5f5bab7d42a636de595071b9c49e18172731
--- /dev/null
+++ b/src/metabase/sync/analyze/fingerprint.clj
@@ -0,0 +1,70 @@
+(ns metabase.sync.analyze.fingerprint
+  "Analysis sub-step that takes a sample of values for a Field and saving a non-identifying fingerprint
+   used for classification. This fingerprint is saved as a column on the Field it belongs to."
+  (:require [clojure.tools.logging :as log]
+            [metabase.models.field :refer [Field]]
+            [metabase.sync
+             [interface :as i]
+             [util :as sync-util]]
+            [metabase.sync.analyze.fingerprint
+             [global :as global]
+             [number :as number]
+             [sample :as sample]
+             [text :as text]]
+            [metabase.util :as u]
+            [schema.core :as s]
+            [toucan.db :as db]))
+
+(s/defn ^:private ^:always-validate type-specific-fingerprint :- (s/maybe i/TypeSpecificFingerprint)
+  "Return type-specific fingerprint info for FIELD and a sample of VALUES if it has an elligible base type
+   such as a derivative of `:type/Text` or of `:type/Number`."
+  [field :- i/FieldInstance, values :- i/ValuesSample]
+  (condp #(isa? %2 %1) (:base_type field)
+    :type/Text   {:type/Text (text/text-fingerprint values)}
+    :type/Number {:type/Number (number/number-fingerprint values)}
+    nil))
+
+(s/defn ^:private ^:always-validate fingerprint :- (s/maybe i/Fingerprint)
+  "Generate a 'fingerprint' from a SAMPLE of values."
+  ([field :- i/FieldInstance]
+   (when-let [values (sample/basic-sample field)]
+     (fingerprint field values)))
+  ([field :- i/FieldInstance, values :- i/ValuesSample]
+   (merge
+    (when-let [global-fingerprint (global/global-fingerprint values)]
+      {:global global-fingerprint})
+    (when-let [type-specific-fingerprint (type-specific-fingerprint field values)]
+      {:type type-specific-fingerprint}))))
+
+
+(s/defn ^:private ^:always-validate fingerprint!
+  "Generate and save a fingerprint for a FIELD."
+  [field :- i/FieldInstance]
+  (sync-util/with-error-handling (format "Error generating fingerprint for %s" (sync-util/name-for-logging field))
+    (when-let [fingerprint (fingerprint field)]
+      (log/debug (format "Saving fingerprint for %s" (sync-util/name-for-logging field)))
+      (db/update! Field (u/get-id field)
+        :fingerprint fingerprint))))
+
+
+;;; +------------------------------------------------------------------------------------------------------------------------+
+;;; |                                          FINGERPRINTING ALL FIELDS IN A TABLE                                          |
+;;; +------------------------------------------------------------------------------------------------------------------------+
+
+(s/defn ^:private ^:always-validate fields-to-fingerprint :- (s/maybe [i/FieldInstance])
+  "Return a sequences of Fields belonging to TABLE for which we should generate (and save) fingerprints.
+   This should include NEW fields that are active and visibile."
+  [table :- i/TableInstance]
+  (seq (db/select Field
+         :table_id        (u/get-id table)
+         :active          true
+         :visibility_type [:not= "retired"]
+         :preview_display true
+         :last_analyzed   nil)))
+
+(s/defn ^:always-validate fingerprint-fields!
+  "Generate and save fingerprints for all the Fields in TABLE that have not been previously analyzed."
+  [table :- i/TableInstance]
+  (when-let [fields (fields-to-fingerprint table)]
+    (doseq [field fields]
+      (fingerprint! field))))
diff --git a/src/metabase/sync/analyze/fingerprint/global.clj b/src/metabase/sync/analyze/fingerprint/global.clj
new file mode 100644
index 0000000000000000000000000000000000000000..996b3ebd8d0a26287e437aa0ee880a6f85fcf538
--- /dev/null
+++ b/src/metabase/sync/analyze/fingerprint/global.clj
@@ -0,0 +1,13 @@
+(ns metabase.sync.analyze.fingerprint.global
+  "Logic for generating a `GlobalFingerprint` from a sequence of values for a Field of *any* type."
+  (:require [metabase.sync.interface :as i]
+            [schema.core :as s]))
+
+(s/defn ^:always-validate global-fingerprint :- i/GlobalFingerprint
+  "Generate a fingerprint of global information for Fields of all types."
+  [values :- i/ValuesSample]
+  ;; TODO - this logic isn't as nice as the old logic that actually called the DB
+  ;; We used to do (queries/field-distinct-count field field-values/low-cardinality-threshold)
+  ;; Consider whether we are so married to the idea of only generating fingerprints from samples that we
+  ;; are ok with inaccurate counts like the one we'll surely be getting here
+  {:distinct-count (count (distinct values))})
diff --git a/src/metabase/sync/analyze/fingerprint/number.clj b/src/metabase/sync/analyze/fingerprint/number.clj
new file mode 100644
index 0000000000000000000000000000000000000000..5fb5ce21dec4406d8865475e25b06220b1615faa
--- /dev/null
+++ b/src/metabase/sync/analyze/fingerprint/number.clj
@@ -0,0 +1,17 @@
+(ns metabase.sync.analyze.fingerprint.number
+  "Logic for generating a `NumberFingerprint` from a sequence of values for a `:type/Number` Field."
+  (:require [metabase.sync.interface :as i]
+            [schema.core :as s]))
+
+(s/defn ^:private ^:always-validate average :- s/Num
+  "Return the average of VALUES."
+  [values :- i/ValuesSample]
+  (/ (double (reduce + values))
+     (double (count values))))
+
+(s/defn ^:always-validate number-fingerprint :- i/NumberFingerprint
+  "Generate a fingerprint containing information about values that belong to a `:type/Number` Field."
+  [values :- i/ValuesSample]
+  {:min (apply min values)
+   :max (apply max values)
+   :avg (average values)})
diff --git a/src/metabase/sync/analyze/fingerprint/sample.clj b/src/metabase/sync/analyze/fingerprint/sample.clj
new file mode 100644
index 0000000000000000000000000000000000000000..a76ce2e28fa7d97d10885a2e75a274f4cf076471
--- /dev/null
+++ b/src/metabase/sync/analyze/fingerprint/sample.clj
@@ -0,0 +1,26 @@
+(ns metabase.sync.analyze.fingerprint.sample
+  "Analysis sub-step that fetches a sample of values for a given Field, which is used to generate a fingerprint for it.
+   Currently this is dumb and just fetches a contiguous sequence of values, but in the future we plan to make this
+   more sophisticated and have different types of samples for different Fields."
+  (:require [metabase.driver :as driver]
+            [metabase.models
+             [database :refer [Database]]
+             [table :refer [Table]]]
+            [metabase.sync.interface :as i]
+            [schema.core :as s]
+            [toucan.db :as db]))
+
+(s/defn ^:always-validate basic-sample :- (s/maybe i/ValuesSample)
+  "Procure a sequence of non-nil values, up to `max-sync-lazy-seq-results` (10,000 at the time of this writing), for use
+   in the various tests above. Maybe return `nil` if no values are available."
+  [field :- i/FieldInstance]
+  ;; TODO - we should make `->driver` a method so we can pass things like Fields into it
+  (let [db-id    (db/select-one-field :db_id Table :id (:table_id field))
+        driver   (driver/->driver db-id)
+        database (Database db-id)]
+    (driver/sync-in-context driver database
+      (fn []
+        (->> (driver/field-values-lazy-seq driver field)
+             (take driver/max-sync-lazy-seq-results)
+             (filter (complement nil?))
+             seq)))))
diff --git a/src/metabase/sync/analyze/fingerprint/text.clj b/src/metabase/sync/analyze/fingerprint/text.clj
new file mode 100644
index 0000000000000000000000000000000000000000..5129e192696b85e9cb40f882da5fb853e7a0f621
--- /dev/null
+++ b/src/metabase/sync/analyze/fingerprint/text.clj
@@ -0,0 +1,39 @@
+(ns metabase.sync.analyze.fingerprint.text
+  "Logic for generating a `TextFingerprint` from a sequence of values for a `:type/Text` Field."
+  (:require [cheshire.core :as json]
+            [metabase.sync.interface :as i]
+            [metabase.util :as u]
+            [schema.core :as s]))
+
+(s/defn ^:private ^:always-validate average-length :- (s/constrained Double #(>= % 0))
+  "Return the average length of VALUES."
+  [values :- i/ValuesSample]
+  (let [total-length (reduce + (for [value values]
+                                 (count (str value))))]
+    (/ (double total-length)
+       (double (count values)))))
+
+(s/defn ^:private ^:always-validate percent-satisfying-predicate :- i/Percent
+  "Return the percentage of VALUES that satisfy PRED."
+  [pred :- (s/pred fn?), values :- i/ValuesSample]
+  (let [total-count    (count values)
+        pred           #(boolean (u/ignore-exceptions (pred %)))
+        matching-count (count (get (group-by pred values) true []))]
+    (/ (double matching-count)
+       (double total-count))))
+
+(defn- valid-serialized-json?
+  "True if X is a serialized JSON dictionary or array."
+  [x]
+  (boolean
+   (when-let [parsed-json (json/parse-string x)]
+     (or (map? parsed-json)
+         (sequential? parsed-json)))))
+
+(s/defn ^:always-validate text-fingerprint :- i/TextFingerprint
+  "Generate a fingerprint containing information about values that belong to a `:type/Text` Field."
+  [values :- i/ValuesSample]
+  {:percent-json   (percent-satisfying-predicate valid-serialized-json? values)
+   :percent-url    (percent-satisfying-predicate u/is-url? values)
+   :percent-email  (percent-satisfying-predicate u/is-email? values)
+   :average-length (average-length values)})
diff --git a/src/metabase/sync/analyze/table_row_count.clj b/src/metabase/sync/analyze/table_row_count.clj
new file mode 100644
index 0000000000000000000000000000000000000000..a4aa5021f3ea3c79800914241708af3babc1371f
--- /dev/null
+++ b/src/metabase/sync/analyze/table_row_count.clj
@@ -0,0 +1,26 @@
+(ns metabase.sync.analyze.table-row-count
+  "Logic for updating a Table's row count by running appropriate MBQL queries."
+  (:require [clojure.tools.logging :as log]
+            [metabase.db.metadata-queries :as queries]
+            [metabase.models.table :refer [Table]]
+            [metabase.sync
+             [interface :as i]
+             [util :as sync-util]]
+            [metabase.util :as u]
+            [schema.core :as s]
+            [toucan.db :as db]))
+
+(s/defn ^:private ^:always-validate table-row-count :- (s/maybe s/Int)
+  "Determine the count of rows in TABLE by running a simple structured MBQL query."
+  [table :- i/TableInstance]
+  (sync-util/with-error-handling (format "Unable to determine row count for %s" (sync-util/name-for-logging table))
+    (queries/table-row-count table)))
+
+(s/defn ^:always-validate update-row-count!
+  "Update the cached row count (`rows`) for a single TABLE."
+  [table :- i/TableInstance]
+  (sync-util/with-error-handling (format "Error setting table row count for %s" (sync-util/name-for-logging table))
+    (when-let [row-count (table-row-count table)]
+      (log/debug (format "Set table row count for %s to %d" (sync-util/name-for-logging table) row-count))
+      (db/update! Table (u/get-id table)
+        :rows row-count))))
diff --git a/src/metabase/sync/fetch_metadata.clj b/src/metabase/sync/fetch_metadata.clj
new file mode 100644
index 0000000000000000000000000000000000000000..8201d8f2068b29ecac92a9d3d2f116d2d12729b7
--- /dev/null
+++ b/src/metabase/sync/fetch_metadata.clj
@@ -0,0 +1,24 @@
+(ns metabase.sync.fetch-metadata
+  "Fetch metadata functions fetch 'snapshots' of the schema for a data warehouse database, including
+   information about tables, schemas, and fields, and their types.
+   For example, with SQL databases, these functions use the JDBC DatabaseMetaData to get this information."
+  (:require [metabase.driver :as driver]
+            [metabase.sync.interface :as i]
+            [schema.core :as s]))
+
+(s/defn ^:always-validate db-metadata :- i/DatabaseMetadata
+  "Get basic Metadata about a DATABASE and its Tables. Doesn't include information about the Fields."
+  [database :- i/DatabaseInstance]
+  (driver/describe-database (driver/->driver database) database))
+
+(s/defn ^:always-validate table-metadata :- i/TableMetadata
+  "Get more detailed information about a TABLE belonging to DATABASE. Includes information about the Fields."
+  [database :- i/DatabaseInstance, table :- i/TableInstance]
+  (driver/describe-table (driver/->driver database) database table))
+
+(s/defn ^:always-validate fk-metadata :- i/FKMetadata
+  "Get information about the foreign keys belonging to TABLE."
+  [database :- i/DatabaseInstance, table :- i/TableInstance]
+  (let [driver (driver/->driver database)]
+    (when (driver/driver-supports? driver :foreign-keys)
+      (driver/describe-table-fks driver database table))))
diff --git a/src/metabase/sync/field_values.clj b/src/metabase/sync/field_values.clj
new file mode 100644
index 0000000000000000000000000000000000000000..234b28e6347727b0c0ae9bfdff2b37165c9ce8ff
--- /dev/null
+++ b/src/metabase/sync/field_values.clj
@@ -0,0 +1,42 @@
+(ns metabase.sync.field-values
+  "Logic for updating cached FieldValues for fields in a database."
+  (:require [clojure.tools.logging :as log]
+            [metabase.models
+             [field :refer [Field]]
+             [field-values :refer [FieldValues] :as field-values]]
+            [metabase.sync
+             [interface :as i]
+             [util :as sync-util]]
+            [metabase.util :as u]
+            [schema.core :as s]
+            [toucan.db :as db]))
+
+(s/defn ^:private ^:always-validate clear-field-values-for-field! [field :- i/FieldInstance]
+  (when (db/exists? FieldValues :field_id (u/get-id field))
+    (log/debug (format "Based on type info, %s should no longer have field values.\n" (sync-util/name-for-logging field))
+               (format "(base type: %s, special type: %s, visibility type: %s)\n" (:base_type field) (:special_type field) (:visibility_type field))
+               "Deleting FieldValues...")
+    (db/delete! FieldValues :field_id (u/get-id field))))
+
+(s/defn ^:private ^:always-validate update-field-values-for-field! [field :- i/FieldInstance]
+  (log/debug (u/format-color 'green "Looking into updating FieldValues for %s" (sync-util/name-for-logging field)))
+  (field-values/create-or-update-field-values! field))
+
+
+(s/defn ^:always-validate update-field-values-for-table!
+  "Update the cached FieldValues for all Fields (as needed) for TABLE."
+  [table :- i/TableInstance]
+  (doseq [field (db/select Field :table_id (u/get-id table), :active true, :visibility_type "normal")]
+    (sync-util/with-error-handling (format "Error updating field values for %s" (sync-util/name-for-logging field))
+      (if (field-values/field-should-have-field-values? field)
+        (update-field-values-for-field! field)
+        (clear-field-values-for-field! field)))))
+
+
+(s/defn ^:always-validate update-field-values!
+  "Update the cached FieldValues (distinct values for categories and certain other fields that are shown
+   in widgets like filters) for the Tables in DATABASE (as needed)."
+  [database :- i/DatabaseInstance]
+  (sync-util/sync-operation :cache-field-values database (format "Cache field values in %s" (sync-util/name-for-logging database))
+    (doseq [table (sync-util/db->sync-tables database)]
+      (update-field-values-for-table! table))))
diff --git a/src/metabase/sync/interface.clj b/src/metabase/sync/interface.clj
new file mode 100644
index 0000000000000000000000000000000000000000..6ba02b7b9e4fa469644df39aed82ac7a3ec91b32
--- /dev/null
+++ b/src/metabase/sync/interface.clj
@@ -0,0 +1,102 @@
+(ns metabase.sync.interface
+  "Schemas and constants used by the sync code."
+  (:require [metabase.models
+             [database :refer [Database]]
+             [field :refer [Field]]
+             [table :refer [Table]]]
+            [metabase.util.schema :as su]
+            [schema.core :as s]))
+
+
+(def DatabaseMetadataTable
+  "Schema for the expected output of `describe-database` for a Table."
+  {:name   su/NonBlankString
+   :schema (s/maybe su/NonBlankString)})
+
+(def DatabaseMetadata
+  "Schema for the expected output of `describe-database`."
+  {:tables #{DatabaseMetadataTable}})
+
+
+(def TableMetadataField
+  "Schema for a given Field as provided in `describe-table`."
+  {:name                           su/NonBlankString
+   :base-type                      su/FieldType
+   (s/optional-key :special-type)  (s/maybe su/FieldType)
+   (s/optional-key :pk?)           s/Bool
+   (s/optional-key :nested-fields) #{(s/recursive #'TableMetadataField)}
+   (s/optional-key :custom)        {s/Any s/Any}})
+
+(def TableMetadata
+  "Schema for the expected output of `describe-table`."
+  {:name   su/NonBlankString
+   :schema (s/maybe su/NonBlankString)
+   :fields #{TableMetadataField}})
+
+(def FKMetadataEntry
+  "Schema for an individual entry in `FKMetadata`."
+  {:fk-column-name   su/NonBlankString
+   :dest-table       {:name   su/NonBlankString
+                      :schema (s/maybe su/NonBlankString)}
+   :dest-column-name su/NonBlankString})
+
+(def FKMetadata
+  "Schema for the expected output of `describe-table-fks`."
+  (s/maybe #{FKMetadataEntry}))
+
+;; These schemas are provided purely as conveniences since adding `:import` statements to get the corresponding classes from the model namespaces
+;; also requires a `:require`, which `clj-refactor` seems more than happy to strip out from the ns declaration when running `cljr-clean-ns`.
+;; Plus as a bonus in the future we could add additional validations to these, e.g. requiring that a Field have a base_type
+
+(def DatabaseInstance "Schema for a valid instance of a Metabase Database." (class Database))
+(def TableInstance    "Schema for a valid instance of a Metabase Table."    (class Table))
+(def FieldInstance    "Schema for a valid instance of a Metabase Field."    (class Field))
+
+
+;;; +------------------------------------------------------------------------------------------------------------------------+
+;;; |                                                SAMPLING & FINGERPRINTS                                                 |
+;;; +------------------------------------------------------------------------------------------------------------------------+
+
+(def ValuesSample
+  "Schema for a sample of VALUES returned by the `sample` sub-stage of analysis and passed into the `fingerprint` stage.
+   Guaranteed to be non-empty and non-nil."
+  ;; Validating against this is actually pretty quick, in the order of microseconds even for a 10,000 value sequence
+  (s/constrained [(s/pred (complement nil?))] seq "Non-empty sequence of non-nil values."))
+
+
+(def GlobalFingerprint
+  "Fingerprint values that Fields of all types should have."
+  {(s/optional-key :distinct-count) s/Int})
+
+(def Percent
+  "Schema for something represting a percentage. A floating-point value between (inclusive) 0 and 1."
+  (s/constrained s/Num #(<= 0 % 1) "Valid percentage between (inclusive) 0 and 1."))
+
+(def NumberFingerprint
+  "Schema for fingerprint information for Fields deriving from `:type/Number`."
+  {(s/optional-key :min) s/Num
+   (s/optional-key :max) s/Num
+   (s/optional-key :avg) s/Num})
+
+(def TextFingerprint
+  "Schema for fingerprint information for Fields deriving from `:type/Text`."
+  {(s/optional-key :percent-json)   Percent
+   (s/optional-key :percent-url)    Percent
+   (s/optional-key :percent-email)  Percent
+   (s/optional-key :average-length) (s/constrained Double #(>= % 0) "Valid number greater than or equal to zero")})
+
+(def TypeSpecificFingerprint
+  "Schema for type-specific fingerprint information."
+  (s/constrained
+   {(s/optional-key :type/Number) NumberFingerprint
+    (s/optional-key :type/Text)   TextFingerprint}
+   (fn [m]
+     (= 1 (count (keys m))))
+   "Type-specific fingerprint with exactly one key"))
+
+(def Fingerprint
+  "Schema for a Field 'fingerprint' generated as part of the analysis stage. Used to power the 'classification' sub-stage of
+   analysis. Stored as the `fingerprint` column of Field."
+  {(s/optional-key :global)       GlobalFingerprint
+   (s/optional-key :type)         TypeSpecificFingerprint
+   (s/optional-key :experimental) {s/Keyword s/Any}})
diff --git a/src/metabase/sync/sync_metadata.clj b/src/metabase/sync/sync_metadata.clj
new file mode 100644
index 0000000000000000000000000000000000000000..9975f7c055d480d2eeb6339acebc1027e57ad516
--- /dev/null
+++ b/src/metabase/sync/sync_metadata.clj
@@ -0,0 +1,36 @@
+(ns metabase.sync.sync-metadata
+  "Logic responsible for syncing the metadata for an entire database.
+   Delegates to different subtasks:
+
+   1.  Sync tables (`metabase.sync.sync-metadata.tables`)
+   2.  Sync fields (`metabase.sync.sync-metadata.fields`)
+   3.  Sync FKs    (`metabase.sync.sync-metadata.fks`)
+   4.  Sync Metabase Metadata table (`metabase.sync.sync-metadata.metabase-metadata`)"
+  (:require [metabase.sync
+             [interface :as i]
+             [util :as sync-util]]
+            [metabase.sync.sync-metadata
+             [fields :as sync-fields]
+             [fks :as sync-fks]
+             [metabase-metadata :as metabase-metadata]
+             [tables :as sync-tables]]
+            [schema.core :as s]))
+
+(s/defn ^:always-validate sync-db-metadata!
+  "Sync the metadata for a Metabase DATABASE. This makes sure child Table & Field objects are synchronized."
+  [database :- i/DatabaseInstance]
+  (sync-util/sync-operation :sync-metadata database (format "Sync metadata for %s" (sync-util/name-for-logging database))
+    ;; Make sure the relevant table models are up-to-date
+    (sync-tables/sync-tables! database)
+    ;; Now for each table, sync the fields
+    (sync-fields/sync-fields! database)
+    ;; Now for each table, sync the FKS. This has to be done after syncing all the fields to make sure target fields exist
+    (sync-fks/sync-fks! database)
+    ;; finally, sync the metadata metadata table if it exists.
+    (metabase-metadata/sync-metabase-metadata! database)))
+
+(s/defn ^:always-validatge sync-table-metadata!
+  "Sync the metadata for an individual TABLE -- make sure Fields and FKs are up-to-date."
+  [table :- i/TableInstance]
+  (sync-fields/sync-fields-for-table! table)
+  (sync-fks/sync-fks-for-table! table))
diff --git a/src/metabase/sync/sync_metadata/fields.clj b/src/metabase/sync/sync_metadata/fields.clj
new file mode 100644
index 0000000000000000000000000000000000000000..10e9651145db17824b24874f489c76170e9780f6
--- /dev/null
+++ b/src/metabase/sync/sync_metadata/fields.clj
@@ -0,0 +1,236 @@
+(ns metabase.sync.sync-metadata.fields
+  "Logic for updating Metabase Field models from metadata fetched from a physical DB.
+   The basic idea here is to look at the metadata we get from calling `describe-table` on a connected database,
+   then construct an identical set of metadata from what we have about that Table in the Metabase DB. Then we
+   iterate over both sets of Metadata and perform whatever steps are needed to make sure the things in the DB
+   match the things that came back from `describe-table`."
+  (:require [clojure.string :as str]
+            [clojure.tools.logging :as log]
+            [medley.core :as m]
+            [metabase.models
+             [field :as field :refer [Field]]
+             [humanization :as humanization]
+             [table :as table]]
+            [metabase.sync
+             [fetch-metadata :as fetch-metadata]
+             [interface :as i]
+             [util :as sync-util]]
+            [metabase.util :as u]
+            [metabase.util.schema :as su]
+            [schema.core :as s]
+            [toucan.db :as db]))
+
+(def ^:private ParentID (s/maybe su/IntGreaterThanZero))
+
+(def ^:private TableMetadataFieldWithID
+  "Schema for `TableMetadataField` with an included ID of the corresponding Metabase Field object.
+   `our-metadata` is always returned in this format. (The ID is needed in certain places so we know
+   which Fields to retire, and the parent ID of any nested-fields.)"
+  (assoc i/TableMetadataField
+    :id                             su/IntGreaterThanZero
+    (s/optional-key :nested-fields) #{(s/recursive #'TableMetadataFieldWithID)}))
+
+(def ^:private TableMetadataFieldWithOptionalID
+  "Schema for either `i/TableMetadataField` (`db-metadata`) or `TableMetadataFieldWithID` (`our-metadata`)."
+  (assoc i/TableMetadataField
+    (s/optional-key :id)            su/IntGreaterThanZero
+    (s/optional-key :nested-fields) #{(s/recursive #'TableMetadataFieldWithOptionalID)}))
+
+
+;;; +------------------------------------------------------------------------------------------------------------------------+
+;;; |                                             CREATING / REACTIVATING FIELDS                                             |
+;;; +------------------------------------------------------------------------------------------------------------------------+
+
+(s/defn ^:private ^:always-validate matching-inactive-field :- (s/maybe i/FieldInstance)
+  "Return an inactive metabase Field that matches NEW-FIELD-METADATA, if any such Field existis."
+  [table :- i/TableInstance, new-field-metadata :- i/TableMetadataField, parent-id :- ParentID]
+  (db/select-one Field
+    :table_id    (u/get-id table)
+    :%lower.name (str/lower-case (:name new-field-metadata))
+    :parent_id   parent-id
+    :active     false))
+
+(s/defn ^:private ^:always-validate ->metabase-field! :- i/FieldInstance
+  "Return an active Metabase Field instance that matches NEW-FIELD-METADATA. This object will be created or reactivated as a side effect of calling this function."
+  [table :- i/TableInstance, new-field-metadata :- i/TableMetadataField, parent-id :- ParentID]
+  (if-let [matching-inactive-field (matching-inactive-field table new-field-metadata parent-id)]
+    ;; if the field already exists but was just marked inactive then reäctivate it
+    (do (db/update! Field (u/get-id matching-inactive-field)
+          :active true)
+        ;; now return the Field in question
+        (Field (u/get-id matching-inactive-field)))
+    ;; otherwise insert a new field
+    (let [{field-name :name, :keys [base-type special-type pk? raw-column-id]} new-field-metadata]
+      (db/insert! Field
+        :table_id     (u/get-id table)
+        :name         field-name
+        :display_name (humanization/name->human-readable-name field-name)
+        :base_type    base-type
+        :special_type (or special-type
+                          (when pk? :type/PK))
+        :parent_id    parent-id))))
+
+
+(s/defn ^:private ^:always-validate create-or-reactivate-field!
+  "Create (or reactivate) a Metabase Field object(s) for NEW-FIELD-METABASE and any nested fields."
+  [table :- i/TableInstance, new-field-metadata :- i/TableMetadataField, parent-id :- ParentID]
+  ;; Create (or reactivate) the Metabase Field entry for NEW-FIELD-METADATA...
+  (let [metabase-field (->metabase-field! table new-field-metadata parent-id)]
+    ;; ...then recursively do the same for any nested fields that belong to it.
+    (doseq [nested-field (:nested-fields new-field-metadata)]
+      (create-or-reactivate-field! table nested-field (u/get-id metabase-field)))))
+
+
+;;; +------------------------------------------------------------------------------------------------------------------------+
+;;; |                                               "RETIRING" INACTIVE FIELDS                                               |
+;;; +------------------------------------------------------------------------------------------------------------------------+
+
+(s/defn ^:private ^:always-validate retire-field!
+  "Mark an OLD-FIELD belonging to TABLE as inactive if corresponding Field object exists."
+  [table :- i/TableInstance, old-field :- TableMetadataFieldWithID]
+  (log/info (format "Marking %s Field '%s' as inactive." (sync-util/name-for-logging table) (:name old-field)))
+  (db/update! Field (:id old-field)
+    :active false)
+  ;; Now recursively mark and nested fields as inactive
+  (doseq [nested-field (:nested-fields old-field)]
+    (retire-field! table nested-field)))
+
+
+;;; +------------------------------------------------------------------------------------------------------------------------+
+;;; |                               SYNCING FIELDS IN DB (CREATING, REACTIVATING, OR RETIRING)                               |
+;;; +------------------------------------------------------------------------------------------------------------------------+
+
+(s/defn ^:private ^:always-validate matching-field-metadata :- (s/maybe TableMetadataFieldWithOptionalID)
+  "Find Metadata that matches FIELD-METADATA from a set of OTHER-METADATA, if any exists."
+  [field-metadata :- TableMetadataFieldWithOptionalID, other-metadata :- #{TableMetadataFieldWithOptionalID}]
+  (some (fn [other-field-metadata]
+          (when (= (str/lower-case (:name field-metadata))
+                   (str/lower-case (:name other-field-metadata)))
+              other-field-metadata))
+        other-metadata))
+
+(s/defn ^:private ^:always-validate sync-field-instances!
+  "Make sure the instances of Metabase Field are in-sync with the DB-METADATA."
+  [table :- i/TableInstance, db-metadata :- #{i/TableMetadataField}, our-metadata :- #{TableMetadataFieldWithID}, parent-id :- ParentID]
+  ;; Loop thru fields in DB-METADATA. Create/reactivate any fields that don't exist in OUR-METADATA.
+  (doseq [db-field db-metadata]
+    (sync-util/with-error-handling (format "Error checking if Field '%s' needs to be created or reactivated" (:name db-field))
+      (if-let [our-field (matching-field-metadata db-field our-metadata)]
+        ;; if field exists in both metadata sets then recursively check the nested fields
+        (when-let [db-nested-fields (seq (:nested-fields db-field))]
+          (sync-field-instances! table (set db-nested-fields) (:nested-fields our-field) (:id our-field)))
+        ;; otherwise if field doesn't exist, create or reactivate it
+        (create-or-reactivate-field! table db-field parent-id))))
+  ;; ok, loop thru Fields in OUR-METADATA. Mark Fields as inactive if they don't exist in DB-METADATA.
+  (doseq [our-field our-metadata]
+    (sync-util/with-error-handling (format "Error checking if '%s' needs to be retired" (:name our-field))
+      (if-let [db-field (matching-field-metadata our-field db-metadata)]
+        ;; if field exists in both metadata sets we just need to recursively check the nested fields
+        (when-let [our-nested-fields (seq (:nested-fields our-field))]
+          (sync-field-instances! table (:nested-fields db-field) (set our-nested-fields) (:id our-field)))
+        ;; otherwise if field exists in our metadata but not DB metadata time to make it inactive
+        (retire-field! table our-field)))))
+
+
+;;; +------------------------------------------------------------------------------------------------------------------------+
+;;; |                                                UPDATING FIELD METADATA                                                 |
+;;; +------------------------------------------------------------------------------------------------------------------------+
+
+(s/defn ^:private ^:always-validate update-metadata!
+  "Make sure things like PK status and base-type are in sync with what has come back from the DB."
+  [table :- i/TableInstance, db-metadata :- #{i/TableMetadataField}, parent-id :- ParentID]
+  (let [existing-fields      (db/select [Field :base_type :special_type :name :id]
+                               :table_id  (u/get-id table)
+                               :active    true
+                               :parent_id parent-id)
+        field-name->db-metadata (u/key-by (comp str/lower-case :name) db-metadata)]
+    ;; Make sure special types are up-to-date for all the fields
+    (doseq [field existing-fields]
+      (when-let [db-field (get field-name->db-metadata (str/lower-case (:name field)))]
+        ;; update special type if one came back from DB metadata but Field doesn't currently have one
+        (db/update! Field (u/get-id field)
+          (merge {:base_type (:base-type db-field)}
+                 (when-not (:special_type field)
+                   {:special_type (or (:special-type db-field)
+                                      (when (:pk? db-field) :type/PK))})))
+        ;; now recursively do the same for any nested fields
+        (when-let [db-nested-fields (seq (:nested-fields db-field))]
+          (update-metadata! table (set db-nested-fields) (u/get-id field)))))))
+
+
+;;; +------------------------------------------------------------------------------------------------------------------------+
+;;; |                                             FETCHING OUR CURRENT METADATA                                              |
+;;; +------------------------------------------------------------------------------------------------------------------------+
+
+(s/defn ^:private ^:always-validate add-nested-fields :- TableMetadataFieldWithID
+  "Recursively add entries for any nested-fields to FIELD."
+  [field-metadata :- TableMetadataFieldWithID, parent-id->fields :- {ParentID #{TableMetadataFieldWithID}}]
+  (let [nested-fields (get parent-id->fields (u/get-id field-metadata))]
+    (if-not (seq nested-fields)
+      field-metadata
+      (assoc field-metadata :nested-fields (set (for [nested-field nested-fields]
+                                                  (add-nested-fields nested-field parent-id->fields)))))))
+
+(s/defn ^:private ^:always-validate parent-id->fields :- {ParentID #{TableMetadataFieldWithID}}
+  "Build a map of the Metabase Fields we have for TABLE, keyed by their parent id (usually `nil`)."
+  [table :- i/TableInstance]
+  (->> (for [field (db/select [Field :name :base_type :special_type :parent_id :id]
+                     :table_id (u/get-id table)
+                     :active   true)]
+         {:parent-id    (:parent_id field)
+          :id           (:id field)
+          :name         (:name field)
+          :base-type    (:base_type field)
+          :special-type (:special_type field)
+          :pk?          (isa? (:special_type field) :type/PK)})
+       ;; make a map of parent-id -> set of
+       (group-by :parent-id)
+       ;; remove the parent ID because the Metadata from `describe-table` won't have it. Save the results as a set
+       (m/map-vals (fn [fields]
+                     (set (for [field fields]
+                            (dissoc field :parent-id)))))))
+
+(s/defn ^:private ^:always-validate our-metadata :- #{TableMetadataFieldWithID}
+  "Return information we have about Fields for a TABLE currently in the application database
+   in (almost) exactly the same `TableMetadataField` format returned by `describe-table`."
+  [table :- i/TableInstance]
+  ;; Fetch all the Fields for this TABLE. Then group them by their parent ID, which we'll use to construct our metadata in the correct format
+  (let [parent-id->fields (parent-id->fields table)]
+    ;; get all the top-level fields, then call `add-nested-fields` to recursively add the fields
+    (set (for [field (get parent-id->fields nil)]
+           (add-nested-fields field parent-id->fields)))))
+
+
+;;; +------------------------------------------------------------------------------------------------------------------------+
+;;; |                                          FETCHING METADATA FROM CONNECTED DB                                           |
+;;; +------------------------------------------------------------------------------------------------------------------------+
+
+(s/defn ^:private ^:always-validate db-metadata :- #{i/TableMetadataField}
+  "Fetch metadata about Fields belonging to a given TABLE directly from an external database by calling its
+   driver's implementation of `describe-table`."
+  [database :- i/DatabaseInstance, table :- i/TableInstance]
+  (:fields (fetch-metadata/table-metadata database table)))
+
+
+;;; +------------------------------------------------------------------------------------------------------------------------+
+;;; |                                                PUTTING IT ALL TOGETHER                                                 |
+;;; +------------------------------------------------------------------------------------------------------------------------+
+
+(s/defn ^:always-validate sync-fields-for-table!
+  "Sync the Fields in the Metabase application database for a specific TABLE."
+  ([table :- i/TableInstance]
+   (sync-fields-for-table! (table/database table) table))
+  ([database :- i/DatabaseInstance, table :- i/TableInstance]
+   (sync-util/with-error-handling (format "Error syncing fields for %s" (sync-util/name-for-logging table))
+     (let [db-metadata (db-metadata database table)]
+       ;; make sure the instances of Field are in-sync
+       (sync-field-instances! table db-metadata (our-metadata table) nil)
+       ;; now that tables are synced and fields created as needed make sure field properties are in sync
+       (update-metadata! table db-metadata nil)))))
+
+
+(s/defn ^:always-validate sync-fields!
+  "Sync the Fields in the Metabase application database for all the Tables in a DATABASE."
+  [database :- i/DatabaseInstance]
+  (doseq [table (sync-util/db->sync-tables database)]
+    (sync-fields-for-table! database table)))
diff --git a/src/metabase/sync/sync_metadata/fks.clj b/src/metabase/sync/sync_metadata/fks.clj
new file mode 100644
index 0000000000000000000000000000000000000000..5cee048f9421655a1b4d634f0ca7ca3f24d51162
--- /dev/null
+++ b/src/metabase/sync/sync_metadata/fks.clj
@@ -0,0 +1,75 @@
+(ns metabase.sync.sync-metadata.fks
+  "Logic for updating FK properties of Fields from metadata fetched from a physical DB."
+  (:require [clojure.string :as str]
+            [clojure.tools.logging :as log]
+            [metabase.models
+             [field :refer [Field]]
+             [table :as table :refer [Table]]]
+            [metabase.sync
+             [fetch-metadata :as fetch-metadata]
+             [interface :as i]
+             [util :as sync-util]]
+            [metabase.util :as u]
+            [schema.core :as s]
+            [toucan.db :as db]))
+
+(def ^:private FKRelationshipObjects
+  "Relevant objects for a foreign key relationship."
+  {:source-field i/FieldInstance
+   :dest-table   i/TableInstance
+   :dest-field   i/FieldInstance})
+
+(s/defn ^:private ^:always-validate fetch-fk-relationship-objects :- (s/maybe FKRelationshipObjects)
+  "Fetch the Metabase objects (Tables and Fields) that are relevant to a foreign key relationship described by FK."
+  [database :- i/DatabaseInstance, table :- i/TableInstance, fk :- i/FKMetadataEntry]
+  (when-let [source-field (db/select-one Field
+                            :table_id           (u/get-id table)
+                            :%lower.name        (str/lower-case (:fk-column-name fk))
+                            :fk_target_field_id nil
+                            :active             true
+                            :visibility_type    [:not= "retired"])]
+    (when-let [dest-table (db/select-one Table
+                            :db_id           (u/get-id database)
+                            :%lower.name     (str/lower-case (-> fk :dest-table :name))
+                            :%lower.schema   (when-let [schema (-> fk :dest-table :schema)]
+                                               (str/lower-case schema))
+                            :active          true
+                            :visibility_type nil)]
+      (when-let [dest-field (db/select-one Field
+                              :table_id           (u/get-id dest-table)
+                              :%lower.name        (str/lower-case (:dest-column-name fk))
+                              :active             true
+                              :visibility_type    [:not= "retired"])]
+        {:source-field source-field
+         :dest-table   dest-table
+         :dest-field   dest-field}))))
+
+
+(s/defn ^:private ^:always-validate mark-fk!
+  [database :- i/DatabaseInstance, table :- i/TableInstance, fk :- i/FKMetadataEntry]
+  (when-let [{:keys [source-field dest-table dest-field]} (fetch-fk-relationship-objects database table fk)]
+    (log/info (u/format-color 'cyan "Marking foreign key from %s %s -> %s %s"
+                (sync-util/name-for-logging table)
+                (sync-util/name-for-logging source-field)
+                (sync-util/name-for-logging dest-table)
+                (sync-util/name-for-logging dest-field)))
+    (db/update! Field (u/get-id source-field)
+      :special_type       :type/FK
+      :fk_target_field_id (u/get-id dest-field))))
+
+
+(s/defn ^:always-validate sync-fks-for-table!
+  "Sync the foreign keys for a specific TABLE."
+  ([table :- i/TableInstance]
+   (sync-fks-for-table! (table/database table) table))
+  ([database :- i/DatabaseInstance, table :- i/TableInstance]
+   (sync-util/with-error-handling (format "Error syncing FKs for %s" (sync-util/name-for-logging table))
+     (doseq [fk (fetch-metadata/fk-metadata database table)]
+       (mark-fk! database table fk)))))
+
+(s/defn ^:always-validate sync-fks!
+  "Sync the foreign keys in a DATABASE. This sets appropriate values for relevant Fields in the Metabase application DB
+   based on values from the `FKMetadata` returned by `describe-table-fks`."
+  [database :- i/DatabaseInstance]
+  (doseq [table (sync-util/db->sync-tables database)]
+    (sync-fks-for-table! database table)))
diff --git a/src/metabase/sync/sync_metadata/metabase_metadata.clj b/src/metabase/sync/sync_metadata/metabase_metadata.clj
new file mode 100644
index 0000000000000000000000000000000000000000..58e0513abe355da8cde26acbdf68643bfc1ccf39
--- /dev/null
+++ b/src/metabase/sync/sync_metadata/metabase_metadata.clj
@@ -0,0 +1,98 @@
+(ns metabase.sync.sync-metadata.metabase-metadata
+  "Logic for syncing the special `_metabase_metadata` table, which is a way for datasets
+   such as the Sample Dataset to specific properties such as special types that should
+   be applied during sync.
+
+   Currently, this is only used by the Sample Dataset, but theoretically in the future we could
+   add additional sample datasets and preconfigure them by populating this Table; or 3rd-party
+   applications or users can add this table to their database for an enhanced Metabase experience
+   out-of-the box."
+  (:require [clojure.string :as str]
+            [clojure.tools.logging :as log]
+            [metabase
+             [driver :as driver]
+             [util :as u]]
+            [metabase.models
+             [field :refer [Field]]
+             [table :refer [Table]]]
+            [metabase.sync
+             [fetch-metadata :as fetch-metadata]
+             [interface :as i]
+             [util :as sync-util]]
+            [metabase.util.schema :as su]
+            [schema.core :as s]
+            [toucan.db :as db]))
+
+(def ^:private KeypathComponents
+  {:table-name su/NonBlankString
+   :field-name (s/maybe su/NonBlankString)
+   :k          s/Keyword})
+
+(s/defn ^:private ^:always-validate parse-keypath :- KeypathComponents
+  "Parse a KEYPATH into components for easy use."
+  ;; TODO: this does not support schemas in dbs :(
+  [keypath :- su/NonBlankString]
+  ;; keypath will have one of two formats:
+  ;; table_name.property
+  ;; table_name.field_name.property
+  (let [[table-name second-part third-part] (str/split keypath #"\.")]
+    {:table-name table-name
+     :field-name (when third-part second-part)
+     :k          (keyword (or third-part second-part))}))
+
+(s/defn ^:private ^:always-validate set-property! :- s/Bool
+  "Set a property for a Field or Table in DATABASE. Returns `true` if a property was successfully set."
+  [database :- i/DatabaseInstance, {:keys [table-name field-name k]} :- KeypathComponents, value]
+  (boolean
+   ;; ignore legacy entries that try to set field_type since it's no longer part of Field
+   (when-not (= k :field_type)
+     ;; fetch the corresponding Table, then set the Table or Field property
+     (when-let [table-id (db/select-one-id Table
+                           ;; TODO: this needs to support schemas
+                           :db_id  (u/get-id database)
+                           :name   table-name
+                           :active true)]
+       (if field-name
+         (db/update-where! Field {:name field-name, :table_id table-id}
+           k value)
+         (db/update! Table table-id
+           k value))))))
+
+(s/defn ^:private ^:always-validate sync-metabase-metadata-table!
+  "Databases may include a table named `_metabase_metadata` (case-insentive) which includes descriptions or other metadata about the `Tables` and `Fields`
+   it contains. This table is *not* synced normally, i.e. a Metabase `Table` is not created for it. Instead, *this* function is called, which reads the data it
+   contains and updates the relevant Metabase objects.
+
+   The table should have the following schema:
+
+     column  | type    | example
+     --------+---------+-------------------------------------------------
+     keypath | varchar | \"products.created_at.description\"
+     value   | varchar | \"The date the product was added to our catalog.\"
+
+   `keypath` is of the form `table-name.key` or `table-name.field-name.key`, where `key` is the name of some property of `Table` or `Field`.
+
+   This functionality is currently only used by the Sample Dataset. In order to use this functionality, drivers must implement optional fn `:table-rows-seq`."
+  [driver, database :- i/DatabaseInstance, metabase-metadata-table :- i/DatabaseMetadataTable]
+  (doseq [{:keys [keypath value]} (driver/table-rows-seq driver database metabase-metadata-table)]
+    (sync-util/with-error-handling (format "Error handling metabase metadata entry: set %s -> %s" keypath value)
+      (or (set-property! database (parse-keypath keypath) value)
+          (log/error (u/format-color 'red "Error syncing _metabase_metadata: no matching keypath: %s" keypath))))))
+
+
+(s/defn ^:always-validate is-metabase-metadata-table? :- s/Bool
+  "Is this TABLE the special `_metabase_metadata` table?"
+  [table :- i/DatabaseMetadataTable]
+  (= "_metabase_metadata" (str/lower-case (:name table))))
+
+(s/defn ^:always-validate sync-metabase-metadata!
+  "Sync the `_metabase_metadata` table, a special table with Metabase metadata, if present.
+   This table contains information about type information, descriptions, and other properties that
+   should be set for Metabase objects like Tables and Fields."
+  [database :- i/DatabaseInstance]
+  (sync-util/with-error-handling (format "Error syncing _metabase_metadata table for %s" (sync-util/name-for-logging database))
+    ;; If there's more than one metabase metadata table (in different schemas) we'll sync each one in turn.
+    ;; Hopefully this is never the case.
+    (doseq [table (:tables (fetch-metadata/db-metadata database))]
+      (when (is-metabase-metadata-table? table)
+        (sync-metabase-metadata-table! (driver/->driver database) database table)))))
diff --git a/src/metabase/sync/sync_metadata/tables.clj b/src/metabase/sync/sync_metadata/tables.clj
new file mode 100644
index 0000000000000000000000000000000000000000..f156fe285da73fca3755b10c624531d3f05c822c
--- /dev/null
+++ b/src/metabase/sync/sync_metadata/tables.clj
@@ -0,0 +1,152 @@
+(ns metabase.sync.sync-metadata.tables
+  "Logic for updating Metabase Table models from metadata fetched from a physical DB."
+  (:require [clojure
+             [data :as data]
+             [string :as str]]
+            [clojure.tools.logging :as log]
+            [metabase.models
+             [humanization :as humanization]
+             [table :as table :refer [Table]]]
+            [metabase.sync
+             [fetch-metadata :as fetch-metadata]
+             [interface :as i]
+             [util :as sync-util]]
+            [metabase.sync.sync-metadata.metabase-metadata :as metabase-metadata]
+            [metabase.util :as u]
+            [schema.core :as s]
+            [toucan.db :as db]))
+
+;;; ------------------------------------------------------------  "Crufty" Tables ------------------------------------------------------------
+
+;; Crufty tables are ones we know are from frameworks like Rails or Django and thus automatically mark as `:cruft`
+
+(def ^:private crufty-table-patterns
+  "Regular expressions that match Tables that should automatically given the `visibility-type` of `:cruft`.
+   This means they are automatically hidden to users (but can be unhidden in the admin panel).
+   These `Tables` are known to not contain useful data, such as migration or web framework internal tables."
+  #{;; Django
+    #"^auth_group$"
+    #"^auth_group_permissions$"
+    #"^auth_permission$"
+    #"^django_admin_log$"
+    #"^django_content_type$"
+    #"^django_migrations$"
+    #"^django_session$"
+    #"^django_site$"
+    #"^south_migrationhistory$"
+    #"^user_groups$"
+    #"^user_user_permissions$"
+    ;; Drupal
+    #".*_cache$"
+    #".*_revision$"
+    #"^advagg_.*"
+    #"^apachesolr_.*"
+    #"^authmap$"
+    #"^autoload_registry.*"
+    #"^batch$"
+    #"^blocked_ips$"
+    #"^cache.*"
+    #"^captcha_.*"
+    #"^config$"
+    #"^field_revision_.*"
+    #"^flood$"
+    #"^node_revision.*"
+    #"^queue$"
+    #"^rate_bot_.*"
+    #"^registry.*"
+    #"^router.*"
+    #"^semaphore$"
+    #"^sequences$"
+    #"^sessions$"
+    #"^watchdog$"
+    ;; Rails / Active Record
+    #"^schema_migrations$"
+    ;; PostGIS
+    #"^spatial_ref_sys$"
+    ;; nginx
+    #"^nginx_access_log$"
+    ;; Liquibase
+    #"^databasechangelog$"
+    #"^databasechangeloglock$"
+    ;; Lobos
+    #"^lobos_migrations$"})
+
+(s/defn ^:private ^:always-validate is-crufty-table? :- s/Bool
+  "Should we give newly created TABLE a `visibility_type` of `:cruft`?"
+  [table :- i/DatabaseMetadataTable]
+  (boolean (some #(re-find % (str/lower-case (:name table))) crufty-table-patterns)))
+
+
+;;; ------------------------------------------------------------ Syncing ------------------------------------------------------------
+
+;; TODO - should we make this logic case-insensitive like it is for fields?
+
+(s/defn ^:private ^:always-validate create-or-reactivate-tables!
+  "Create NEW-TABLES for database, or if they already exist, mark them as active."
+  [database :- i/DatabaseInstance, new-tables :- #{i/DatabaseMetadataTable}]
+  (log/info "Found new tables:"
+            (for [table new-tables]
+              (sync-util/name-for-logging (table/map->TableInstance table))))
+  (doseq [{schema :schema, table-name :name, :as table} new-tables]
+    (if-let [existing-id (db/select-one-id Table
+                           :db_id  (u/get-id database)
+                           :schema schema
+                           :name   table-name
+                           :active false)]
+      ;; if the table already exists but is marked *inactive*, mark it as *active*
+      (db/update! Table existing-id
+        :active true)
+      ;; otherwise create a new Table
+      (db/insert! Table
+        :db_id           (u/get-id database)
+        :schema          schema
+        :name            table-name
+        :display_name    (humanization/name->human-readable-name table-name)
+        :active          true
+        :visibility_type (when (is-crufty-table? table)
+                           :cruft)))))
+
+
+(s/defn ^:private ^:always-validate retire-tables!
+  "Mark any OLD-TABLES belonging to DATABASE as inactive."
+  [database :- i/DatabaseInstance, old-tables :- #{i/DatabaseMetadataTable}]
+  (log/info "Marking tables as inactive:"
+            (for [table old-tables]
+              (sync-util/name-for-logging (table/map->TableInstance table))))
+  (doseq [{schema :schema, table-name :name, :as table} old-tables]
+    (db/update-where! Table {:db_id  (u/get-id database)
+                             :schema schema
+                             :active true}
+      :active false)))
+
+
+(s/defn ^:private ^:always-validate db-metadata :- #{i/DatabaseMetadataTable}
+  "Return information about DATABASE by calling its driver's implementation of `describe-database`."
+  [database :- i/DatabaseInstance]
+  (set (for [table (:tables (fetch-metadata/db-metadata database))
+             :when (not (metabase-metadata/is-metabase-metadata-table? table))]
+         table)))
+
+(s/defn ^:private ^:always-validate our-metadata :- #{i/DatabaseMetadataTable}
+  "Return information about what Tables we have for this DB in the Metabase application DB."
+  [database :- i/DatabaseInstance]
+  (set (map (partial into {})
+            (db/select [Table :name :schema]
+              :db_id  (u/get-id database)
+              :active true))))
+
+(s/defn ^:always-validate sync-tables!
+  "Sync the Tables recorded in the Metabase application database with the ones obtained by calling DATABASE's driver's implementation of `describe-database`."
+  [database :- i/DatabaseInstance]
+  ;; determine what's changed between what info we have and what's in the DB
+  (let [db-metadata             (db-metadata database)
+        our-metadata            (our-metadata database)
+        [new-tables old-tables] (data/diff db-metadata our-metadata)]
+    ;; create new tables as needed or mark them as active again
+    (when (seq new-tables)
+      (sync-util/with-error-handling (format "Error creating/reactivating tables for %s" (sync-util/name-for-logging database))
+        (create-or-reactivate-tables! database new-tables)))
+    ;; mark old tables as inactive
+    (when (seq old-tables)
+      (sync-util/with-error-handling (format "Error retiring tables for %s" (sync-util/name-for-logging database))
+        (retire-tables! database old-tables)))))
diff --git a/src/metabase/sync/util.clj b/src/metabase/sync/util.clj
new file mode 100644
index 0000000000000000000000000000000000000000..e1e8d809caab3a32d157fa9f83f44d39c099a042
--- /dev/null
+++ b/src/metabase/sync/util.clj
@@ -0,0 +1,233 @@
+(ns metabase.sync.util
+  "Utility functions and macros to abstract away some common patterns and operations across the sync processes, such as logging start/end messages."
+  (:require [clojure.math.numeric-tower :as math]
+            [clojure.string :as str]
+            [clojure.tools.logging :as log]
+            [medley.core :as m]
+            [metabase
+             [driver :as driver]
+             [events :as events]
+             [util :as u]]
+            [metabase.models.table :refer [Table]]
+            [metabase.query-processor.interface :as qpi]
+            [metabase.sync.interface :as i]
+            [toucan.db :as db]))
+
+;;; +------------------------------------------------------------------------------------------------------------------------+
+;;; |                                              SYNC OPERATION "MIDDLEWARE"                                               |
+;;; +------------------------------------------------------------------------------------------------------------------------+
+
+;; When using the `sync-operation` macro below the BODY of the macro will be executed in the context of several different
+;; functions below that do things like prevent duplicate operations from being ran simultaneously and taking care of
+;; things like event publishing, error handling, and logging.
+;;
+;; These basically operate in a middleware pattern, where the various different steps take a function, and return a new function
+;; that will execute the original in whatever context or with whatever side effects appropriate for that step.
+
+
+;; This looks something like {:sync #{1 2}, :cache #{2 3}} when populated.
+;; Key is a type of sync operation, e.g. `:sync` or `:cache`; vals are sets of DB IDs undergoing that operation.
+;; TODO - as @salsakran mentioned it would be nice to do this via the DB so we could better support multi-instance setups in the future
+(defonce ^:private operation->db-ids (atom {}))
+
+(defn with-duplicate-ops-prevented
+  "Run F in a way that will prevent it from simultaneously being ran more for a single database more than once for a given OPERATION.
+   This prevents duplicate sync-like operations from taking place for a given DB, e.g. if a user hits the `Sync` button in the admin panel multiple times.
+
+     ;; Only one `sync-db!` for `database-id` will be allowed at any given moment; duplicates will be ignored
+     (with-duplicate-ops-prevented :sync database-id
+       #(sync-db! database-id))"
+  {:style/indent 2}
+  [operation database-or-id f]
+  (fn []
+    (when-not (contains? (@operation->db-ids operation) (u/get-id database-or-id))
+      (try
+        ;; mark this database as currently syncing so we can prevent duplicate sync attempts (#2337)
+        (swap! operation->db-ids update operation #(conj (or % #{}) (u/get-id database-or-id)))
+        (log/debug "Sync operations in flight:" (m/filter-vals seq @operation->db-ids))
+        ;; do our work
+        (f)
+        ;; always take the ID out of the set when we are through
+        (finally
+          (swap! operation->db-ids update operation #(disj % (u/get-id database-or-id))))))))
+
+
+(defn- with-sync-events
+  "Publish events related to beginning and ending a sync-like process, e.g. `:sync-database` or `:cache-values`, for a DATABASE-ID.
+   F is executed between the logging of the two events."
+  ;; we can do everyone a favor and infer the name of the individual begin and sync events
+  ([event-name-prefix database-or-id f]
+   (with-sync-events
+    (keyword (str (name event-name-prefix) "-begin"))
+    (keyword (str (name event-name-prefix) "-end"))
+    database-or-id
+    f))
+  ([begin-event-name end-event-name database-or-id f]
+   (fn []
+     (let [start-time    (System/nanoTime)
+           tracking-hash (str (java.util.UUID/randomUUID))]
+       (events/publish-event! begin-event-name {:database_id (u/get-id database-or-id), :custom_id tracking-hash})
+       (f)
+       (let [total-time-ms (int (/ (- (System/nanoTime) start-time)
+                                   1000000.0))]
+         (events/publish-event! end-event-name {:database_id  (u/get-id database-or-id)
+                                                :custom_id    tracking-hash
+                                                :running_time total-time-ms}))
+       nil))))
+
+
+(defn- with-start-and-finish-logging
+  "Log MESSAGE about a process starting, then run F, and then log a MESSAGE about it finishing.
+   (The final message includes a summary of how long it took to run F.)"
+  {:style/indent 1}
+  [message f]
+  (fn []
+    (let [start-time (System/nanoTime)]
+      (log/info (u/format-color 'magenta "STARTING: %s" message))
+      (f)
+      (log/info (u/format-color 'magenta "FINISHED: %s (%s)" message (u/format-nanoseconds (- (System/nanoTime) start-time)))))))
+
+
+(defn- with-db-logging-disabled
+  "Disable all QP and DB logging when running BODY. (This should be done for *all* sync-like processes to avoid cluttering the logs.)"
+  {:style/indent 0}
+  [f]
+  (fn []
+    (binding [qpi/*disable-qp-logging* true
+              db/*disable-db-logging*  true]
+      (f))))
+
+(defn- sync-in-context
+  "Pass the sync operation defined by BODY to the DATABASE's driver's implementation of `sync-in-context`.
+   This method is used to do things like establish a connection or other driver-specific steps needed for sync operations."
+  {:style/indent 1}
+  [database f]
+  (fn []
+    (driver/sync-in-context (driver/->driver database) database
+      f)))
+
+
+(defn do-with-error-handling
+  "Internal implementation of `with-error-handling`; use that instead of calling this directly."
+  ([f]
+   (do-with-error-handling "Error running sync step" f))
+  ([message f]
+   (try (f)
+        (catch Throwable e
+          (log/error (u/format-color 'red "%s: %s\n%s"
+                       message
+                       (or (.getMessage e) (class e))
+                       (u/pprint-to-str (or (seq (u/filtered-stacktrace e))
+                                            (.getStackTrace e)))))))))
+
+(defmacro with-error-handling
+  "Execute BODY in a way that catches and logs any Exceptions thrown, and returns `nil` if they do so.
+   Pass a MESSAGE to help provide information about what failed for the log message."
+  {:style/indent 1}
+  [message & body]
+  `(do-with-error-handling ~message (fn [] ~@body)))
+
+(defn do-sync-operation
+  "Internal implementation of `sync-operation`; use that instead of calling this directly."
+  [operation database message f]
+  ((with-duplicate-ops-prevented operation database
+     (with-sync-events operation database
+       (with-start-and-finish-logging message
+         (with-db-logging-disabled
+           (sync-in-context database
+             (partial do-with-error-handling f))))))))
+
+(defmacro sync-operation
+  "Perform the operations in BODY as a sync operation, which wraps the code in several special macros that do things like
+   error handling, logging, duplicate operation prevention, and event publishing.
+   Intended for use with the various top-level sync operations, such as `sync-metadata` or `analyze`."
+  {:style/indent 3}
+  [operation database message & body]
+  `(do-sync-operation ~operation ~database ~message (fn [] ~@body)))
+
+;;; +------------------------------------------------------------------------------------------------------------------------+
+;;; |                                                  EMOJI PROGRESS METER                                                  |
+;;; +------------------------------------------------------------------------------------------------------------------------+
+
+;; This is primarily provided because it makes sync more fun to look at. The functions below make it fairly simple to log a
+;; progress bar with a corresponding emoji when iterating over a sequence of objects during sync, e.g. syncing all the Tables
+;; in a given Database.
+
+(def ^:private ^:const ^Integer emoji-meter-width 50)
+
+(def ^:private progress-emoji
+  ["😱"   ; face screaming in fear
+   "😢"   ; crying face
+   "😞"   ; disappointed face
+   "😒"   ; unamused face
+   "😕"   ; confused face
+   "😐"   ; neutral face
+   "😬"   ; grimacing face
+   "😌"   ; relieved face
+   "😏"   ; smirking face
+   "😋"   ; face savouring delicious food
+   "😊"   ; smiling face with smiling eyes
+   "😍"   ; smiling face with heart shaped eyes
+   "😎"]) ; smiling face with sunglasses
+
+(defn- percent-done->emoji [percent-done]
+  (progress-emoji (int (math/round (* percent-done (dec (count progress-emoji)))))))
+
+(defn emoji-progress-bar
+  "Create a string that shows progress for something, e.g. a database sync process.
+
+     (emoji-progress-bar 10 40)
+       -> \"[************······································] 😒   25%"
+  [completed total]
+  (let [percent-done (float (/ completed total))
+        filleds      (int (* percent-done emoji-meter-width))
+        blanks       (- emoji-meter-width filleds)]
+    (str "["
+         (str/join (repeat filleds "*"))
+         (str/join (repeat blanks "·"))
+         (format "] %s  %3.0f%%" (u/emoji (percent-done->emoji percent-done)) (* percent-done 100.0)))))
+
+(defmacro with-emoji-progress-bar
+  "Run BODY with access to a function that makes using our amazing emoji-progress-bar easy like Sunday morning.
+   Calling the function will return the approprate string output for logging and automatically increment an internal counter as needed.
+     (with-emoji-progress-bar [progress-bar 10]
+       (dotimes [i 10]
+         (println (progress-bar))))"
+  {:style/indent 1}
+  [[emoji-progress-fn-binding total-count] & body]
+  `(let [finished-count#            (atom 0)
+         total-count#               ~total-count
+         ~emoji-progress-fn-binding (fn [] (emoji-progress-bar (swap! finished-count# inc) total-count#))]
+     ~@body))
+
+
+;;; +------------------------------------------------------------------------------------------------------------------------+
+;;; |                                              OTHER SYNC UTILITY FUNCTIONS                                              |
+;;; +------------------------------------------------------------------------------------------------------------------------+
+
+(defn db->sync-tables
+  "Return all the Tables that should go through the sync processes for DATABASE-OR-ID."
+  [database-or-id]
+  (db/select Table, :db_id (u/get-id database-or-id), :active true, :visibility_type nil))
+
+
+;; The `name-for-logging` function is used all over the sync code to make sure we have easy access to consistently formatted
+;; descriptions of various objects.
+
+(defprotocol ^:private INameForLogging
+  (name-for-logging [this]
+    "Return an appropriate string for logging an object in sync logging messages.
+     Should be something like \"postgres Database 'test-data'\""))
+
+(extend-protocol INameForLogging
+  i/DatabaseInstance
+  (name-for-logging [{database-name :name, id :id, engine :engine,}]
+    (format "%s Database %s '%s'" (name engine) (or id "") database-name))
+
+  i/TableInstance
+  (name-for-logging [{schema :schema, id :id, table-name :name}]
+    (format "Table %s '%s'" (or id "") (str (when (seq schema) (str schema ".")) table-name)))
+
+  i/FieldInstance
+  (name-for-logging [{field-name :name, id :id}]
+    (format "Field %s '%s'" (or id "") field-name)))
diff --git a/src/metabase/sync_database.clj b/src/metabase/sync_database.clj
deleted file mode 100644
index 1e85c474c2cbe11d7027765964016618776a3216..0000000000000000000000000000000000000000
--- a/src/metabase/sync_database.clj
+++ /dev/null
@@ -1,117 +0,0 @@
-(ns metabase.sync-database
-  "The logic for doing DB and Table syncing itself."
-  (:require [clojure.tools.logging :as log]
-            [metabase
-             [driver :as driver]
-             [events :as events]
-             [util :as u]]
-            [metabase.models
-             [raw-table :as raw-table]
-             [table :as table]]
-            [metabase.query-processor.interface :as i]
-            [metabase.sync-database
-             [analyze :as analyze]
-             [introspect :as introspect]
-             [sync :as sync]
-             [sync-dynamic :as sync-dynamic]]
-            [toucan.db :as db]))
-
-(declare sync-database-with-tracking!
-         sync-table-with-tracking!)
-
-(defonce ^:private currently-syncing-dbs (atom #{}))
-
-
-(defn sync-database!
-  "Sync DATABASE and all its Tables and Fields.
-
-   Takes an optional kwarg `:full-sync?` which determines if we execute our table analysis work.  If this is not specified
-   then we default to using the `:is_full_sync` attribute of the database."
-  [{database-id :id, :as database} & {:keys [full-sync?]}]
-  {:pre [(map? database)]}
-  ;; if this database is already being synced then bail now
-  (when-not (contains? @currently-syncing-dbs database-id)
-    (binding [i/*disable-qp-logging*  true
-              db/*disable-db-logging* true]
-      (let [db-driver  (driver/engine->driver (:engine database))
-            full-sync? (if-not (nil? full-sync?)
-                         full-sync?
-                         (:is_full_sync database))]
-        (try
-          ;; mark this database as currently syncing so we can prevent duplicate sync attempts (#2337)
-          (swap! currently-syncing-dbs conj database-id)
-          ;; do our work
-          (driver/sync-in-context db-driver database (partial sync-database-with-tracking! db-driver database full-sync?))
-          (finally
-            ;; always cleanup our tracking when we are through
-            (swap! currently-syncing-dbs disj database-id)))))))
-
-(defn sync-table!
-  "Sync a *single* TABLE and all of its Fields.
-   This is used *instead* of `sync-database!` when syncing just one Table is desirable.
-
-   Takes an optional kwarg `:full-sync?` which determines if we execute our table analysis work.  If this is not specified
-   then we default to using the `:is_full_sync` attribute of the tables parent database."
-  [table & {:keys [full-sync?]}]
-  {:pre [(map? table)]}
-  (binding [i/*disable-qp-logging* true]
-    (let [database   (table/database table)
-          db-driver  (driver/engine->driver (:engine database))
-          full-sync? (if-not (nil? full-sync?)
-                       full-sync?
-                       (:is_full_sync database))]
-      (driver/sync-in-context db-driver database (partial sync-table-with-tracking! db-driver database table full-sync?)))))
-
-
-;;; ## ---------------------------------------- IMPLEMENTATION ----------------------------------------
-
-
-(defn- sync-database-with-tracking! [driver database full-sync?]
-  (let [start-time (System/nanoTime)
-        tracking-hash (str (java.util.UUID/randomUUID))]
-    (log/info (u/format-color 'magenta "Syncing %s database '%s'..." (name driver) (:name database)))
-    (events/publish-event! :database-sync-begin {:database_id (:id database) :custom_id tracking-hash})
-
-    (binding [i/*disable-qp-logging*  true
-              db/*disable-db-logging* true]
-      ;; start with capturing a full introspection of the database
-      (introspect/introspect-database-and-update-raw-tables! driver database)
-
-      ;; use the introspected schema information and update our working data models
-      (if (driver/driver-supports? driver :dynamic-schema)
-        (sync-dynamic/scan-database-and-update-data-model! driver database)
-        (sync/update-data-models-from-raw-tables! database))
-
-      ;; now do any in-depth data analysis which requires querying the tables (if enabled)
-      (when full-sync?
-        (analyze/analyze-data-shape-for-tables! driver database)))
-
-    (events/publish-event! :database-sync-end {:database_id  (:id database)
-                                              :custom_id    tracking-hash
-                                              :running_time (int (/ (- (System/nanoTime) start-time) 1000000.0))}) ; convert to ms
-    (log/info (u/format-color 'magenta "Finished syncing %s database '%s'. (%s)" (name driver) (:name database)
-                              (u/format-nanoseconds (- (System/nanoTime) start-time))))))
-
-
-(defn- sync-table-with-tracking! [driver database table full-sync?]
-  (let [start-time (System/nanoTime)]
-    (log/info (u/format-color 'magenta "Syncing table '%s' from %s database '%s'..." (:display_name table) (name driver) (:name database)))
-
-    (binding [i/*disable-qp-logging* true
-              db/*disable-db-logging* true]
-      ;; if the Table has a RawTable backing it then do an introspection and sync
-      (when-let [raw-table (raw-table/RawTable (:raw_table_id table))]
-        (introspect/introspect-raw-table-and-update! driver database raw-table)
-        (sync/update-data-models-for-table! table))
-
-      ;; if this table comes from a dynamic schema db then run that sync process now
-      (when (driver/driver-supports? driver :dynamic-schema)
-        (sync-dynamic/scan-table-and-update-data-model! driver database table))
-
-      ;; analyze if we are supposed to
-      (when full-sync?
-        (analyze/analyze-table-data-shape! driver table)))
-
-    (events/publish-event! :table-sync {:table_id (:id table)})
-    (log/info (u/format-color 'magenta "Finished syncing table '%s' from %s database '%s'. (%s)" (:display_name table) (name driver) (:name database)
-                              (u/format-nanoseconds (- (System/nanoTime) start-time))))))
diff --git a/src/metabase/sync_database/analyze.clj b/src/metabase/sync_database/analyze.clj
deleted file mode 100644
index 8881ba24d65522e8bac4223f19a0b242d2bd43fa..0000000000000000000000000000000000000000
--- a/src/metabase/sync_database/analyze.clj
+++ /dev/null
@@ -1,261 +0,0 @@
-(ns metabase.sync-database.analyze
-  "Functions which handle the in-depth data shape analysis portion of the sync process."
-  (:require [cheshire.core :as json]
-            [clojure.math.numeric-tower :as math]
-            [clojure.string :as s]
-            [clojure.tools.logging :as log]
-            [metabase
-             [driver :as driver]
-             [util :as u]]
-            [metabase.db.metadata-queries :as queries]
-            [metabase.models
-             [field :as field]
-             [field-values :as field-values]
-             [table :as table]]
-            [metabase.sync-database.interface :as i]
-            [schema.core :as schema]
-            [toucan.db :as db]))
-
-(def ^:private ^:const ^Float percent-valid-url-threshold
-  "Fields that have at least this percent of values that are valid URLs should be given a special type of `:type/URL`."
-  0.95)
-
-(def ^:private ^:const ^Integer low-cardinality-threshold
-  "Fields with less than this many distinct values should automatically be given a special type of `:type/Category`."
-  300)
-
-(def ^:private ^:const ^Integer field-values-entry-max-length
-  "The maximum character length for a stored `FieldValues` entry."
-  100)
-
-(def ^:private ^:const ^Integer field-values-total-max-length
-  "Maximum total length for a FieldValues entry (combined length of all values for the field)."
-  (* low-cardinality-threshold field-values-entry-max-length))
-
-(def ^:private ^:const ^Integer average-length-no-preview-threshold
-  "Fields whose values' average length is greater than this amount should be marked as `preview_display = false`."
-  50)
-
-
-(defn table-row-count
-  "Determine the count of rows in TABLE by running a simple structured MBQL query."
-  [table]
-  {:pre [(integer? (:id table))]}
-  (try
-    (queries/table-row-count table)
-    (catch Throwable e
-      (log/warn (u/format-color 'red "Unable to determine row count for '%s': %s\n%s" (:name table) (.getMessage e) (u/pprint-to-str (u/filtered-stacktrace e)))))))
-
-(defn test-for-cardinality?
-  "Should FIELD should be tested for cardinality?"
-  [field is-new?]
-  (or (field-values/field-should-have-field-values? field)
-      (and (nil? (:special_type field))
-           is-new?
-           (not (isa? (:base_type field) :type/DateTime))
-           (not (isa? (:base_type field) :type/Collection))
-           (not (= (:base_type field) :type/*)))))
-
-(defn- field-values-below-low-cardinality-threshold? [non-nil-values]
-  (and (<= (count non-nil-values) low-cardinality-threshold)
-      ;; very simple check to see if total length of field-values exceeds (total values * max per value)
-       (let [total-length (reduce + (map (comp count str) non-nil-values))]
-         (<= total-length field-values-total-max-length))))
-
-(defn test:cardinality-and-extract-field-values
-  "Extract field-values for FIELD.  If number of values exceeds `low-cardinality-threshold` then we return an empty set of values."
-  [field field-stats]
-  ;; TODO: we need some way of marking a field as not allowing field-values so that we can skip this work if it's not appropriate
-  ;;       for example, :type/Category fields with more than MAX values don't need to be rescanned all the time
-  (let [non-nil-values  (filter identity (queries/field-distinct-values field (inc low-cardinality-threshold)))
-        ;; only return the list if we didn't exceed our MAX values and if the the total character count of our values is reasable (#2332)
-        distinct-values (when (field-values-below-low-cardinality-threshold? non-nil-values)
-                          non-nil-values)]
-    (cond-> (assoc field-stats :values distinct-values)
-      (and (nil? (:special_type field))
-           (pos? (count distinct-values))) (assoc :special-type :type/Category))))
-
-(defn- test:no-preview-display
-  "If FIELD's is textual and its average length is too great, mark it so it isn't displayed in the UI."
-  [driver field field-stats]
-  (if-not (and (= :normal (:visibility_type field))
-               (isa? (:base_type field) :type/Text))
-    ;; this field isn't suited for this test
-    field-stats
-    ;; test for avg length
-    (let [avg-len (u/try-apply (:field-avg-length driver) field)]
-      (if-not (and avg-len (> avg-len average-length-no-preview-threshold))
-        field-stats
-        (do
-          (log/debug (u/format-color 'green "Field '%s' has an average length of %d. Not displaying it in previews." (field/qualified-name field) avg-len))
-          (assoc field-stats :preview-display false))))))
-
-(defn- test:url-special-type
-  "If FIELD is texual, doesn't have a `special_type`, and its non-nil values are primarily URLs, mark it as `special_type` `:type/URL`."
-  [driver field field-stats]
-  (if-not (and (not (:special_type field))
-               (isa? (:base_type field) :type/Text))
-    ;; this field isn't suited for this test
-    field-stats
-    ;; test for url values
-    (let [percent-urls (u/try-apply (:field-percent-urls driver) field)]
-      (if-not (and (float? percent-urls)
-                   (>= percent-urls 0.0)
-                   (<= percent-urls 100.0)
-                   (> percent-urls percent-valid-url-threshold))
-        field-stats
-        (do
-          (log/debug (u/format-color 'green "Field '%s' is %d%% URLs. Marking it as a URL." (field/qualified-name field) (int (math/round (* 100 percent-urls)))))
-          (assoc field-stats :special-type :url))))))
-
-(defn- values-are-valid-json?
-  "`true` if at every item in VALUES is `nil` or a valid string-encoded JSON dictionary or array, and at least one of those is non-nil."
-  [values]
-  (try
-    (loop [at-least-one-non-nil-value? false, [val & more] values]
-      (cond
-        (and (not val)
-             (not (seq more))) at-least-one-non-nil-value?
-        (s/blank? val)         (recur at-least-one-non-nil-value? more)
-        ;; If val is non-nil, check that it's a JSON dictionary or array. We don't want to mark Fields containing other
-        ;; types of valid JSON values as :json (e.g. a string representation of a number or boolean)
-        :else                  (do (u/prog1 (json/parse-string val)
-                                     (assert (or (map? <>)
-                                                 (sequential? <>))))
-                                   (recur true more))))
-    (catch Throwable _
-      false)))
-
-(defn- test:json-special-type
-  "Mark FIELD as `:json` if it's textual, doesn't already have a special type, the majority of it's values are non-nil, and all of its non-nil values
-   are valid serialized JSON dictionaries or arrays."
-  [driver field field-stats]
-  (if (or (:special_type field)
-          (not (isa? (:base_type field) :type/Text)))
-    ;; this field isn't suited for this test
-    field-stats
-    ;; check for json values
-    (if-not (values-are-valid-json? (take driver/max-sync-lazy-seq-results (driver/field-values-lazy-seq driver field)))
-      field-stats
-      (do
-        (log/debug (u/format-color 'green "Field '%s' looks like it contains valid JSON objects. Setting special_type to :type/SerializedJSON." (field/qualified-name field)))
-        (assoc field-stats :special-type :type/SerializedJSON, :preview-display false)))))
-
-(defn- values-are-valid-emails?
-  "`true` if at every item in VALUES is `nil` or a valid email, and at least one of those is non-nil."
-  [values]
-  (try
-    (loop [at-least-one-non-nil-value? false, [val & more] values]
-      (cond
-        (and (not val)
-             (not (seq more))) at-least-one-non-nil-value?
-        (s/blank? val)         (recur at-least-one-non-nil-value? more)
-        ;; If val is non-nil, check that it's a JSON dictionary or array. We don't want to mark Fields containing other
-        ;; types of valid JSON values as :json (e.g. a string representation of a number or boolean)
-        :else                  (do (assert (u/is-email? val))
-                                   (recur true more))))
-    (catch Throwable _
-      false)))
-
-(defn- test:email-special-type
-  "Mark FIELD as `:email` if it's textual, doesn't already have a special type, the majority of it's values are non-nil, and all of its non-nil values
-   are valid emails."
-  [driver field field-stats]
-  (if (or (:special_type field)
-          (not (isa? (:base_type field) :type/Text)))
-    ;; this field isn't suited for this test
-    field-stats
-    ;; check for emails
-    (if-not (values-are-valid-emails? (take driver/max-sync-lazy-seq-results (driver/field-values-lazy-seq driver field)))
-      field-stats
-      (do
-        (log/debug (u/format-color 'green "Field '%s' looks like it contains valid email addresses. Setting special_type to :type/Email." (field/qualified-name field)))
-        (assoc field-stats :special-type :type/Email, :preview-display true)))))
-
-(defn- test:new-field
-  "Do the various tests that should only be done for a new `Field`.
-   We only run most of the field analysis work when the field is NEW in order to favor performance of the sync process."
-  [driver field field-stats]
-  (->> field-stats
-       (test:no-preview-display driver field)
-       (test:url-special-type   driver field)
-       (test:json-special-type  driver field)
-       (test:email-special-type driver field)))
-
-;; TODO - It's weird that this one function requires other functions as args when the whole rest of the Metabase driver system
-;;        is built around protocols and record types. These functions should be put back in the `IDriver` protocol (where they
-;;        were originally) or in a special `IAnalyzeTable` protocol).
-(defn make-analyze-table
-  "Make a generic implementation of `analyze-table`."
-  {:style/indent 1}
-  [driver & {:keys [field-avg-length-fn field-percent-urls-fn calculate-row-count?]
-             :or   {field-avg-length-fn   (partial driver/default-field-avg-length driver)
-                    field-percent-urls-fn (partial driver/default-field-percent-urls driver)
-                    calculate-row-count?  true}}]
-  (fn [driver table new-field-ids]
-    (let [driver (assoc driver :field-avg-length field-avg-length-fn, :field-percent-urls field-percent-urls-fn)]
-      {:row_count (when calculate-row-count? (u/try-apply table-row-count table))
-       :fields    (for [{:keys [id] :as field} (table/fields table)]
-                    (let [new-field? (contains? new-field-ids id)]
-                      (cond->> {:id id}
-                               (test-for-cardinality? field new-field?) (test:cardinality-and-extract-field-values field)
-                               new-field?                               (test:new-field driver field))))})))
-
-(defn generic-analyze-table
-  "An implementation of `analyze-table` using the defaults (`default-field-avg-length` and `field-percent-urls`)."
-  [driver table new-field-ids]
-  ((make-analyze-table driver) driver table new-field-ids))
-
-
-
-(defn analyze-table-data-shape!
-  "Analyze the data shape for a single `Table`."
-  [driver {table-id :id, :as table}]
-  (let [new-field-ids (db/select-ids field/Field, :table_id table-id, :visibility_type [:not= "retired"], :last_analyzed nil)]
-    ;; TODO: this call should include the database
-    (when-let [table-stats (u/prog1 (driver/analyze-table driver table new-field-ids)
-                             (when <>
-                               (schema/validate i/AnalyzeTable <>)))]
-      ;; update table row count
-      (when (:row_count table-stats)
-        (db/update! table/Table table-id, :rows (:row_count table-stats)))
-
-      ;; update individual fields
-      (doseq [{:keys [id preview-display special-type values]} (:fields table-stats)]
-        ;; set Field metadata we may have detected
-        (when (and id (or preview-display special-type))
-          (db/update-non-nil-keys! field/Field id
-            ;; if a field marked `preview-display` as false then set the visibility type to `:details-only` (see models.field/visibility-types)
-            :visibility_type (when (false? preview-display) :details-only)
-            :special_type    special-type))
-        ;; handle field values, setting them if applicable otherwise clearing them
-        (if (and id values (pos? (count (filter identity values))))
-          (field-values/save-field-values! id values)
-          (field-values/clear-field-values! id))))
-
-    ;; update :last_analyzed for all fields in the table
-    (db/update-where! field/Field {:table_id        table-id
-                                   :visibility_type [:not= "retired"]}
-      :last_analyzed (u/new-sql-timestamp))))
-
-(defn analyze-data-shape-for-tables!
-  "Perform in-depth analysis on the data shape for all `Tables` in a given DATABASE.
-   This is dependent on what each database driver supports, but includes things like cardinality testing and table row counting.
-   The bulk of the work is done by the `(analyze-table ...)` function on the IDriver protocol."
-  [driver {database-id :id, :as database}]
-  (log/info (u/format-color 'blue "Analyzing data in %s database '%s' (this may take a while) ..." (name driver) (:name database)))
-
-  (let [start-time-ns         (System/nanoTime)
-        tables                (db/select table/Table, :db_id database-id, :active true, :visibility_type nil)
-        tables-count          (count tables)
-        finished-tables-count (atom 0)]
-    (doseq [{table-name :name, :as table} tables]
-      (try
-        (analyze-table-data-shape! driver table)
-        (catch Throwable t
-          (log/error "Unexpected error analyzing table" t))
-        (finally
-          (u/prog1 (swap! finished-tables-count inc)
-            (log/info (u/format-color 'blue "%s Analyzed table '%s'." (u/emoji-progress-bar <> tables-count) table-name))))))
-
-    (log/info (u/format-color 'blue "Analysis of %s database '%s' completed (%s)." (name driver) (:name database) (u/format-nanoseconds (- (System/nanoTime) start-time-ns))))))
diff --git a/src/metabase/sync_database/interface.clj b/src/metabase/sync_database/interface.clj
deleted file mode 100644
index bc931754bb0409478d29d9214457411d90b20f71..0000000000000000000000000000000000000000
--- a/src/metabase/sync_database/interface.clj
+++ /dev/null
@@ -1,39 +0,0 @@
-(ns metabase.sync-database.interface
-  "Schemas describing the output expected from different DB sync functions."
-  (:require [metabase.util.schema :as su]
-            [schema.core :as s]))
-
-(def AnalyzeTable
-  "Schema for the expected output of `analyze-table`."
-  {(s/optional-key :row_count) (s/maybe s/Int)
-   (s/optional-key :fields)    [{:id                               su/IntGreaterThanZero
-                                 (s/optional-key :special-type)    su/FieldType
-                                 (s/optional-key :preview-display) s/Bool
-                                 (s/optional-key :values)          [s/Any]}]})
-
-(def DescribeDatabase
-  "Schema for the expected output of `describe-database`."
-  {:tables #{{:name   s/Str
-              :schema (s/maybe s/Str)}}})
-
-(def DescribeTableField
-  "Schema for a given Field as provided in `describe-table` or `analyze-table`."
-  {:name                           su/NonBlankString
-   :base-type                      su/FieldType
-   (s/optional-key :special-type)  su/FieldType
-   (s/optional-key :pk?)           s/Bool
-   (s/optional-key :nested-fields) #{(s/recursive #'DescribeTableField)}
-   (s/optional-key :custom)        {s/Any s/Any}})
-
-(def DescribeTable
-  "Schema for the expected output of `describe-table`."
-  {:name   su/NonBlankString
-   :schema (s/maybe su/NonBlankString)
-   :fields #{DescribeTableField}})
-
-(def DescribeTableFKs
-  "Schema for the expected output of `describe-table-fks`."
-  (s/maybe #{{:fk-column-name   su/NonBlankString
-              :dest-table       {:name   su/NonBlankString
-                                 :schema (s/maybe su/NonBlankString)}
-              :dest-column-name su/NonBlankString}}))
diff --git a/src/metabase/sync_database/introspect.clj b/src/metabase/sync_database/introspect.clj
deleted file mode 100644
index aaa0d2800a36b5a474cf40879781cc14d4b7e5c8..0000000000000000000000000000000000000000
--- a/src/metabase/sync_database/introspect.clj
+++ /dev/null
@@ -1,211 +0,0 @@
-(ns metabase.sync-database.introspect
-  "Functions which handle the raw sync process."
-  (:require [clojure.set :as set]
-            [clojure.tools.logging :as log]
-            [metabase
-             [driver :as driver]
-             [util :as u]]
-            [metabase.models
-             [raw-column :refer [RawColumn]]
-             [raw-table :refer [RawTable]]]
-            [metabase.sync-database.interface :as i]
-            [schema.core :as schema]
-            [toucan.db :as db]))
-
-(defn- named-table
-  ([table]
-    (named-table (:schema table) (:name table)))
-  ([table-schema table-name]
-   (str (when table-schema (str table-schema ".")) table-name)))
-
-(defn- save-all-table-fks!
-  "Save *all* foreign-key data for a given RAW-TABLE.
-   NOTE: this function assumes that FKS is the complete set of fks in the RAW-TABLE."
-  [{table-id :id, database-id :database_id, :as table} fks]
-  {:pre [(integer? table-id) (integer? database-id)]}
-  (db/transaction
-   ;; start by simply resetting all fks and then we'll add them back as defined
-   (db/update-where! RawColumn {:raw_table_id table-id}
-     :fk_target_column_id nil)
-
-    ;; now lookup column-ids and set the fks on this table as needed
-    (doseq [{:keys [fk-column-name dest-column-name dest-table]} fks]
-      (when-let [source-column-id (db/select-one-id RawColumn, :raw_table_id table-id, :name fk-column-name)]
-        (when-let [dest-table-id (db/select-one-id RawTable, :database_id database-id, :schema (:schema dest-table), :name (:name dest-table))]
-          (when-let [dest-column-id (db/select-one-id RawColumn, :raw_table_id dest-table-id, :name dest-column-name)]
-            (log/debug (u/format-color 'cyan "Marking foreign key '%s.%s' -> '%s.%s'." (named-table table) fk-column-name (named-table dest-table) dest-column-name))
-            (db/update! RawColumn source-column-id
-              :fk_target_column_id dest-column-id)))))))
-
-(defn- save-all-table-columns!
-  "Save *all* `RawColumns` for a given RAW-TABLE.
-   NOTE: this function assumes that COLUMNS is the complete set of columns in the RAW-TABLE."
-  [{:keys [id]} columns]
-  {:pre [(integer? id) (coll? columns) (every? map? columns)]}
-  (db/transaction
-    (let [raw-column-name->id (db/select-field->id :name RawColumn, :raw_table_id id)]
-
-      ;; deactivate any columns which were removed
-      (doseq [[column-name column-id] (sort-by first raw-column-name->id)]
-        (when-not (some #(= column-name (:name %)) columns)
-          (log/debug (u/format-color 'cyan "Marked column %s as inactive." column-name))
-          (db/update! RawColumn column-id, :active false)))
-
-      ;; insert or update the remaining columns
-      (doseq [{column-name :name, :keys [base-type pk? special-type details]} (sort-by :name columns)]
-        (let [details (merge (or details {})
-                             {:base-type base-type}
-                             (when special-type {:special-type special-type}))
-              is_pk   (true? pk?)]
-          (if-let [column-id (get raw-column-name->id column-name)]
-            ;; column already exists, update it
-            (db/update! RawColumn column-id
-              :name    column-name
-              :is_pk   is_pk
-              :details details
-              :active  true)
-            ;; must be a new column, insert it
-            (db/insert! RawColumn
-              :raw_table_id id
-              :name         column-name
-              :is_pk        is_pk
-              :details      details
-              :active       true)))))))
-
-(defn- create-raw-table!
-  "Create a new `RawTable`, includes saving all specified `:columns`."
-  [database-id {table-name :name, table-schema :schema, :keys [details fields]}]
-  {:pre [(integer? database-id) (string? table-name)]}
-  (log/debug (u/format-color 'cyan "Found new table: %s" (named-table table-schema table-name)))
-  (let [table (db/insert! RawTable
-                :database_id  database-id
-                :schema       table-schema
-                :name         table-name
-                :details      (or details {})
-                :active       true)]
-    (save-all-table-columns! table fields)))
-
-(defn- update-raw-table!
-  "Update an existing `RawTable`, includes saving all specified `:columns`."
-  [{table-id :id, :as table} {:keys [details fields]}]
-  ;; NOTE: the schema+name of a table makes up the natural key and cannot be modified on update
-  ;;       if they were to be different we'd simply assume that's a new table instead
-  (db/update! RawTable table-id
-    :details (or details {})
-    :active  true)
-  ;; save columns
-  (save-all-table-columns! table fields))
-
-(defn- disable-raw-tables!
-  "Disable a list of `RawTable` ids, including all `RawColumns` associated with those tables."
-  [table-ids]
-  {:pre [(coll? table-ids) (every? integer? table-ids)]}
-  (let [table-ids (filter identity table-ids)]
-    (db/transaction
-     ;; disable the tables
-     (db/update-where! RawTable {:id [:in table-ids]}
-       :active false)
-     ;; whenever a table is disabled we need to disable all of its fields too (and remove fk references)
-     (db/update-where! RawColumn {:raw_table_id [:in table-ids]}
-       :active              false
-       :fk_target_column_id nil))))
-
-
-(defn introspect-raw-table-and-update!
-  "Introspect a single `RawTable` and persist the results as `RawTables` and `RawColumns`.
-   Uses the various `describe-*` functions on the IDriver protocol to gather information."
-  [driver database raw-table]
-  (let [table-def (select-keys raw-table [:schema :name])
-        table-def (if (contains? (driver/features driver) :dynamic-schema)
-                    ;; dynamic schemas are handled differently, we'll handle them elsewhere
-                    (assoc table-def :fields [])
-                    ;; static schema databases get introspected now
-                    (u/prog1 (driver/describe-table driver database table-def)
-                      (schema/validate i/DescribeTable <>)))]
-
-    ;; save the latest updates from the introspection
-    (if table-def
-      (update-raw-table! raw-table table-def)
-      ;; if we didn't get back a table-def then this table must not exist anymore
-      (disable-raw-tables! [(:id raw-table)]))
-
-    ;; if we support FKs then try updating those as well
-    (when (and table-def
-               (contains? (driver/features driver) :foreign-keys))
-      (when-let [table-fks (u/prog1 (driver/describe-table-fks driver database table-def)
-                             (schema/validate i/DescribeTableFKs <>))]
-        (save-all-table-fks! raw-table table-fks)))))
-
-
-;;; ------------------------------------------------------------ INTROSPECT-DATABASE-AND-UPDATE-RAW-TABLES! ------------------------------------------------------------
-
-(defn- introspect-tables!
-  "Introspect each table and save off the schema details we find."
-  [driver database tables existing-tables]
-  (let [tables-count          (count tables)
-        finished-tables-count (atom 0)]
-    (doseq [{table-schema :schema, table-name :name, :as table-def} tables]
-      (try
-        (let [table-def (if (contains? (driver/features driver) :dynamic-schema)
-                          ;; dynamic schemas are handled differently, we'll handle them elsewhere
-                          (assoc table-def :fields [])
-                          ;; static schema databases get introspected now
-                          (u/prog1 (driver/describe-table driver database table-def)
-                            (schema/validate i/DescribeTable <>)))]
-          (if-let [raw-table (get existing-tables (select-keys table-def [:schema :name]))]
-            (update-raw-table! raw-table table-def)
-            (create-raw-table! (:id database) table-def)))
-        (catch Throwable t
-          (log/error (u/format-color 'red "Unexpected error introspecting table schema: %s" (named-table table-schema table-name)) t))
-        (finally
-          (swap! finished-tables-count inc)
-          (log/info (u/format-color 'magenta "%s Synced table '%s'." (u/emoji-progress-bar @finished-tables-count tables-count) (named-table table-schema table-name))))))))
-
-(defn- disable-old-tables!
-  "Any tables/columns that previously existed but aren't included any more get disabled."
-  [tables existing-tables]
-  (when-let [tables-to-disable (not-empty (set/difference (set (keys existing-tables))
-                                                          (set (mapv #(select-keys % [:schema :name]) tables))))]
-    (log/info (u/format-color 'cyan "Disabled tables: %s" (mapv #(named-table (:schema %) (:name %)) tables-to-disable)))
-    (disable-raw-tables! (for [table-to-disable tables-to-disable]
-                           (:id (get existing-tables table-to-disable))))))
-
-
-(defn- sync-fks!
-  "Handle any FK syncing. This takes place after tables/fields are in place because we need the ids of the tables/fields to do FK references."
-  [driver database tables]
-  (when (contains? (driver/features driver) :foreign-keys)
-    (doseq [{table-schema :schema, table-name :name, :as table-def} tables]
-      (try
-        (when-let [table-fks (u/prog1 (driver/describe-table-fks driver database table-def)
-                               (schema/validate i/DescribeTableFKs <>))]
-          (when-let [raw-table (RawTable :database_id (:id database), :schema table-schema, :name table-name)]
-            (save-all-table-fks! raw-table table-fks)))
-        (catch Throwable t
-          (log/error (u/format-color 'red "Unexpected error introspecting table fks: %s" (named-table table-schema table-name)) t))))))
-
-(defn- db->tables [driver database]
-  (let [{:keys [tables]} (u/prog1 (driver/describe-database driver database)
-                           (schema/validate i/DescribeDatabase <>))]
-    ;; This is a protection against cases where the returned table-def has no :schema key
-    (map (u/rpartial update :schema identity) tables)))
-
-(defn- db->name+schema->table [database]
-  (into {} (for [{:keys [name schema] :as table} (db/select [RawTable :id :schema :name], :database_id (:id database))]
-             {{:name name, :schema schema} table})))
-
-
-(defn introspect-database-and-update-raw-tables!
-  "Introspect a `Database` and persist the results as `RawTables` and `RawColumns`.
-   Uses the various `describe-*` functions on the IDriver protocol to gather information."
-  [driver database]
-  (log/info (u/format-color 'magenta "Introspecting schema on %s database '%s' ..." (name driver) (:name database)))
-  (let [start-time-ns      (System/nanoTime)
-        tables             (db->tables driver database)
-        name+schema->table (db->name+schema->table database)]
-
-    (introspect-tables! driver database tables name+schema->table)
-    (disable-old-tables! tables name+schema->table)
-    (sync-fks! driver database tables)
-
-    (log/info (u/format-color 'magenta "Introspection completed on %s database '%s' (%s)" (name driver) (:name database) (u/format-nanoseconds (- (System/nanoTime) start-time-ns))))))
diff --git a/src/metabase/sync_database/sync.clj b/src/metabase/sync_database/sync.clj
deleted file mode 100644
index a97187a098a0321bfc1472f6e739556c5fa7b50f..0000000000000000000000000000000000000000
--- a/src/metabase/sync_database/sync.clj
+++ /dev/null
@@ -1,277 +0,0 @@
-(ns metabase.sync-database.sync
-  (:require [clojure
-             [set :as set]
-             [string :as s]]
-            [clojure.tools.logging :as log]
-            [metabase
-             [db :as mdb]
-             [driver :as driver]
-             [util :as u]]
-            [metabase.models
-             [field :as field :refer [Field]]
-             [raw-column :refer [RawColumn]]
-             [raw-table :as raw-table :refer [RawTable]]
-             [table :as table :refer [Table]]]
-            [toucan.db :as db])
-  (:import metabase.models.raw_table.RawTableInstance))
-
-;;; ------------------------------------------------------------ FKs ------------------------------------------------------------
-
-(defn- save-fks!
-  "Update all of the FK relationships present in DATABASE based on what's captured in the raw schema.
-   This will set :special_type :type/FK and :fk_target_field_id <field-id> for each found FK relationship.
-   NOTE: we currently overwrite any previously defined metadata when doing this."
-  [fk-sources]
-  {:pre [(coll? fk-sources)
-         (every? map? fk-sources)]}
-  (doseq [{fk-source-id :source-column, fk-target-id :target-column} fk-sources]
-    ;; TODO: eventually limit this to just "core" schema tables
-    (when-let [source-field-id (db/select-one-id Field, :raw_column_id fk-source-id, :visibility_type [:not= "retired"])]
-      (when-let [target-field-id (db/select-one-id Field, :raw_column_id fk-target-id, :visibility_type [:not= "retired"])]
-        (db/update! Field source-field-id
-          :special_type       :type/FK
-          :fk_target_field_id target-field-id)))))
-
-(defn- set-fk-relationships!
-  "Handle setting any FK relationships for a DATABASE. This must be done after fully syncing the tables/fields because we need all tables/fields in place."
-  [database]
-  (when-let [db-fks (db/select [RawColumn [:id :source-column] [:fk_target_column_id :target-column]]
-                      (mdb/join [RawColumn :raw_table_id] [RawTable :id])
-                      (db/qualify RawTable :database_id) (:id database)
-                      (db/qualify RawColumn :fk_target_column_id) [:not= nil])]
-    (save-fks! db-fks)))
-
-(defn- set-table-fk-relationships!
-  "Handle setting FK relationships for a specific TABLE."
-  [database-id raw-table-id]
-  (when-let [table-fks (db/select [RawColumn [:id :source-column] [:fk_target_column_id :target-column]]
-                         (mdb/join [RawColumn :raw_table_id] [RawTable :id])
-                         (db/qualify RawTable :database_id) database-id
-                         (db/qualify RawTable :id) raw-table-id
-                         (db/qualify RawColumn :fk_target_column_id) [:not= nil])]
-    (save-fks! table-fks)))
-
-
-;;; ------------------------------------------------------------ _metabase_metadata table ------------------------------------------------------------
-
-;; the _metabase_metadata table is a special table that can include Metabase metadata about the rest of the DB. This is used by the sample dataset
-
-(defn sync-metabase-metadata-table!
-  "Databases may include a table named `_metabase_metadata` (case-insentive) which includes descriptions or other metadata about the `Tables` and `Fields`
-   it contains. This table is *not* synced normally, i.e. a Metabase `Table` is not created for it. Instead, *this* function is called, which reads the data it
-   contains and updates the relevant Metabase objects.
-
-   The table should have the following schema:
-
-     column  | type    | example
-     --------+---------+-------------------------------------------------
-     keypath | varchar | \"products.created_at.description\"
-     value   | varchar | \"The date the product was added to our catalog.\"
-
-   `keypath` is of the form `table-name.key` or `table-name.field-name.key`, where `key` is the name of some property of `Table` or `Field`.
-
-   This functionality is currently only used by the Sample Dataset. In order to use this functionality, drivers must implement optional fn `:table-rows-seq`."
-  [driver database, ^RawTableInstance metabase-metadata-table]
-  (doseq [{:keys [keypath value]} (driver/table-rows-seq driver database metabase-metadata-table)]
-    ;; TODO: this does not support schemas in dbs :(
-    (let [[_ table-name field-name k] (re-matches #"^([^.]+)\.(?:([^.]+)\.)?([^.]+)$" keypath)]
-      ;; ignore legacy entries that try to set field_type since it's no longer part of Field
-      (when-not (= (keyword k) :field_type)
-        (try (when-not (if field-name
-                         (when-let [table-id (db/select-one-id Table
-                                               ;; TODO: this needs to support schemas
-                                               ;; TODO: eventually limit this to "core" schema tables
-                                               :db_id  (:id database)
-                                               :name   table-name
-                                               :active true)]
-                           (db/update-where! Field {:name     field-name
-                                                    :table_id table-id}
-                             (keyword k) value))
-                         (db/update-where! Table {:name  table-name
-                                                  :db_id (:id database)}
-                           (keyword k) value))
-               (log/error (u/format-color 'red "Error syncing _metabase_metadata: no matching keypath: %s" keypath)))
-             (catch Throwable e
-               (log/error (u/format-color 'red "Error in _metabase_metadata: %s" (.getMessage e)))))))))
-
-
-(defn is-metabase-metadata-table?
-  "Is this TABLE the special `_metabase_metadata` table?"
-  [table]
-  (= "_metabase_metadata" (s/lower-case (:name table))))
-
-
-(defn- maybe-sync-metabase-metadata-table!
-  "Sync the `_metabase_metadata` table, a special table with Metabase metadata, if present.
-   If per chance there were multiple `_metabase_metadata` tables in different schemas, just sync the first one we find."
-  [database raw-tables]
-  (when-let [metadata-table (first (filter is-metabase-metadata-table? raw-tables))]
-    (sync-metabase-metadata-table! (driver/engine->driver (:engine database)) database metadata-table)))
-
-
-;;; ------------------------------------------------------------ Fields ------------------------------------------------------------
-
-(defn- save-table-fields!
-  "Refresh all `Fields` in a given `Table` based on what's available in the associated `RawColumns`.
-
-   If a raw column has been disabled, the field is retired.
-   If there is a new raw column, then a new field is created.
-   If a raw column has been updated, then we update the values for the field."
-  [{table-id :id, raw-table-id :raw_table_id}]
-  (let [active-raw-columns   (raw-table/active-columns {:id raw-table-id})
-        active-column-ids    (set (map :id active-raw-columns))
-        raw-column-id->field (u/key-by :raw_column_id (db/select Field, :table_id table-id, :visibility_type [:not= "retired"], :parent_id nil))]
-    ;; retire any fields which were disabled in the schema (including child nested fields)
-    (doseq [[raw-column-id {field-id :id}] raw-column-id->field]
-      (when-not (contains? active-column-ids raw-column-id)
-        (db/update! Field {:where [:or [:= :id field-id]
-                                       [:= :parent_id field-id]]
-                           :set   {:visibility_type "retired"}})))
-
-    ;; create/update the active columns
-    (doseq [{raw-column-id :id, :keys [details], :as column} active-raw-columns]
-      ;; do a little bit of key renaming to match what's expected for a call to update/create-field
-      (let [column (-> (set/rename-keys column {:id    :raw-column-id
-                                                :is_pk :pk?})
-                       (assoc :base-type    (keyword (:base-type details))
-                              :special-type (keyword (:special-type details))))]
-        (if-let [existing-field (get raw-column-id->field raw-column-id)]
-          ;; field already exists, so we UPDATE it
-          (field/update-field-from-field-def! existing-field column)
-          ;; looks like a new field, so we CREATE it
-          (field/create-field-from-field-def! table-id (assoc column :raw-column-id raw-column-id)))))))
-
-
-;;; ------------------------------------------------------------  "Crufty" Tables ------------------------------------------------------------
-
-;; Crufty tables are ones we know are from frameworks like Rails or Django and thus automatically mark as `:cruft`
-
-(def ^:private ^:const crufty-table-patterns
-  "Regular expressions that match Tables that should automatically given the `visibility-type` of `:cruft`.
-   This means they are automatically hidden to users (but can be unhidden in the admin panel).
-   These `Tables` are known to not contain useful data, such as migration or web framework internal tables."
-  #{;; Django
-    #"^auth_group$"
-    #"^auth_group_permissions$"
-    #"^auth_permission$"
-    #"^django_admin_log$"
-    #"^django_content_type$"
-    #"^django_migrations$"
-    #"^django_session$"
-    #"^django_site$"
-    #"^south_migrationhistory$"
-    #"^user_groups$"
-    #"^user_user_permissions$"
-    ;; Drupal
-    #".*_cache$"
-    #".*_revision$"
-    #"^advagg_.*"
-    #"^apachesolr_.*"
-    #"^authmap$"
-    #"^autoload_registry.*"
-    #"^batch$"
-    #"^blocked_ips$"
-    #"^cache.*"
-    #"^captcha_.*"
-    #"^config$"
-    #"^field_revision_.*"
-    #"^flood$"
-    #"^node_revision.*"
-    #"^queue$"
-    #"^rate_bot_.*"
-    #"^registry.*"
-    #"^router.*"
-    #"^semaphore$"
-    #"^sequences$"
-    #"^sessions$"
-    #"^watchdog$"
-    ;; Rails / Active Record
-    #"^schema_migrations$"
-    ;; PostGIS
-    #"^spatial_ref_sys$"
-    ;; nginx
-    #"^nginx_access_log$"
-    ;; Liquibase
-    #"^databasechangelog$"
-    #"^databasechangeloglock$"
-    ;; Lobos
-    #"^lobos_migrations$"})
-
-(defn- is-crufty-table?
-  "Should we give newly created TABLE a `visibility_type` of `:cruft`?"
-  [table]
-  (boolean (some #(re-find % (s/lower-case (:name table))) crufty-table-patterns)))
-
-
-;;; ------------------------------------------------------------ Table Syncing + Saving ------------------------------------------------------------
-
-(defn- table-ids-to-remove
-  "Return a set of active `Table` IDs for Database with DATABASE-ID whose backing RawTable is now inactive."
-  [database-id]
-  (db/select-ids Table
-    (mdb/join [Table :raw_table_id] [RawTable :id])
-    :db_id database-id
-    (db/qualify Table :active) true
-    (db/qualify RawTable :active) false))
-
-(defn retire-tables!
-  "Retire any `Table` who's `RawTable` has been deactivated.
-  This occurs when a database introspection reveals the table is no longer available."
-  [{database-id :id}]
-  {:pre [(integer? database-id)]}
-  ;; retire tables (and their fields) as needed
-  (table/retire-tables! (table-ids-to-remove database-id)))
-
-
-(defn update-data-models-for-table!
-  "Update the working `Table` and `Field` metadata for a given `Table` based on the latest raw schema information.
-   This function uses the data in `RawTable` and `RawColumn` to update the working data models as needed."
-  [{raw-table-id :raw_table_id, table-id :id, :as existing-table}]
-  (when-let [{database-id :database_id, :as raw-table} (RawTable raw-table-id)]
-    (try
-      (if-not (:active raw-table)
-        ;; looks like the table has been deactivated, so lets retire this Table and its fields
-        (table/retire-tables! #{table-id})
-        ;; otherwise update based on the RawTable/RawColumn information
-        (do
-          (save-table-fields! (table/update-table-from-tabledef! existing-table raw-table))
-          (set-table-fk-relationships! database-id raw-table-id)))
-      (catch Throwable t
-        (log/error (u/format-color 'red "Unexpected error syncing table") t)))))
-
-
-(defn- create-and-update-tables!
-  "Create/update tables (and their fields)."
-  [database existing-tables raw-tables]
-  (doseq [{raw-table-id :id, :as raw-table} raw-tables
-          :when                             (not (is-metabase-metadata-table? raw-table))]
-    (try
-      (save-table-fields! (if-let [existing-table (get existing-tables raw-table-id)]
-                            ;; table already exists, update it
-                            (table/update-table-from-tabledef! existing-table raw-table)
-                            ;; must be a new table, insert it
-                            (table/create-table-from-tabledef! (:id database) (assoc raw-table
-                                                                                :raw-table-id    raw-table-id
-                                                                                :visibility-type (when (is-crufty-table? raw-table)
-                                                                                                   :cruft)))))
-      (catch Throwable e
-        (log/error (u/format-color 'red "Unexpected error syncing table") e)))))
-
-
-(defn update-data-models-from-raw-tables!
-  "Update the working `Table` and `Field` metadata for *all* tables in a `Database` based on the latest raw schema information.
-   This function uses the data in `RawTable` and `RawColumn` to update the working data models as needed."
-  [{database-id :id, :as database}]
-  {:pre [(integer? database-id)]}
-  ;; quick sanity check that this is indeed a :dynamic-schema database
-  (when (driver/driver-supports? (driver/engine->driver (:engine database)) :dynamic-schema)
-    (throw (IllegalStateException. "This function cannot be called on databases which are :dynamic-schema")))
-  ;; retire any tables which were disabled
-  (retire-tables! database)
-  ;; ok, now create new tables as needed and set FK relationships
-  (let [raw-tables          (raw-table/active-tables database-id)
-        raw-table-id->table (u/key-by :raw_table_id (db/select Table, :db_id database-id, :active true))]
-    (create-and-update-tables! database raw-table-id->table raw-tables)
-    (set-fk-relationships! database)
-    ;; HACK! we can't sync the _metabase_metadata table until all the "Raw" Tables/Columns are backed
-    (maybe-sync-metabase-metadata-table! database raw-tables)))
diff --git a/src/metabase/sync_database/sync_dynamic.clj b/src/metabase/sync_database/sync_dynamic.clj
deleted file mode 100644
index 1d3d7c7a1dd57d41641e78c1b9bcd47b1d2e1ef6..0000000000000000000000000000000000000000
--- a/src/metabase/sync_database/sync_dynamic.clj
+++ /dev/null
@@ -1,113 +0,0 @@
-(ns metabase.sync-database.sync-dynamic
-  "Functions for syncing drivers with `:dynamic-schema` which have no fixed definition of their data."
-  (:require [clojure.set :as set]
-            [clojure.tools.logging :as log]
-            [metabase
-             [driver :as driver]
-             [util :as u]]
-            [metabase.models
-             [field :as field :refer [Field]]
-             [raw-table :as raw-table :refer [RawTable]]
-             [table :as table :refer [Table]]]
-            [metabase.sync-database
-             [interface :as i]
-             [sync :as sync]]
-            [schema.core :as schema]
-            [toucan.db :as db]))
-
-(defn- save-nested-fields!
-  "Save any nested `Fields` for a given parent `Field`.
-   All field-defs provided are assumed to be children of the given FIELD."
-  [{parent-id :id, table-id :table_id, :as parent-field} nested-field-defs]
-  ;; NOTE: remember that we never retire any fields in dynamic-schema tables
-  (let [existing-field-name->field (u/key-by :name (db/select Field, :parent_id parent-id))]
-    (u/prog1 (set/difference (set (map :name nested-field-defs)) (set (keys existing-field-name->field)))
-      (when (seq <>)
-        (log/debug (u/format-color 'blue "Found new nested fields for field '%s': %s" (:name parent-field) <>))))
-
-    (doseq [nested-field-def nested-field-defs]
-      (let [{:keys [nested-fields], :as nested-field-def} (assoc nested-field-def :parent-id parent-id)]
-        ;; NOTE: this recursively creates fields until we hit the end of the nesting
-        (if-let [existing-field (existing-field-name->field (:name nested-field-def))]
-          ;; field already exists, so we UPDATE it
-          (cond-> (field/update-field-from-field-def! existing-field nested-field-def)
-                  nested-fields (save-nested-fields! nested-fields))
-          ;; looks like a new field, so we CREATE it
-          (cond-> (field/create-field-from-field-def! table-id nested-field-def)
-                  nested-fields (save-nested-fields! nested-fields)))))))
-
-
-(defn- save-table-fields!
-  "Save a collection of `Fields` for the given `Table`.
-   NOTE: we never retire/disable any fields in a dynamic schema database, so this process will only add/update `Fields`."
-  [{table-id :id} field-defs]
-  {:pre [(integer? table-id)
-         (coll? field-defs)
-         (every? map? field-defs)]}
-  (let [field-name->field (u/key-by :name (db/select Field, :table_id table-id, :parent_id nil))]
-    ;; NOTE: with dynamic schemas we never disable fields
-    ;; create/update the fields
-    (doseq [{field-name :name, :keys [nested-fields], :as field-def} field-defs]
-      (if-let [existing-field (get field-name->field field-name)]
-        ;; field already exists, so we UPDATE it
-        (cond-> (field/update-field-from-field-def! existing-field field-def)
-                nested-fields (save-nested-fields! nested-fields))
-        ;; looks like a new field, so we CREATE it
-        (cond-> (field/create-field-from-field-def! table-id field-def)
-                nested-fields (save-nested-fields! nested-fields))))))
-
-
-(defn scan-table-and-update-data-model!
-  "Update the working `Table` and `Field` metadata for the given `Table`."
-  [driver database {raw-table-id :raw_table_id, table-id :id, :as existing-table}]
-  (when-let [raw-table (RawTable raw-table-id)]
-    (try
-      (if-not (:active raw-table)
-        ;; looks like table was deactivated, so lets retire this Table
-        (table/retire-tables! #{table-id})
-        ;; otherwise we ask the driver for an updated table description and save that info
-        (let [table-def (u/prog1 (driver/describe-table driver database (select-keys existing-table [:name :schema]))
-                          (schema/validate i/DescribeTable <>))]
-          (-> (table/update-table-from-tabledef! existing-table raw-table)
-              (save-table-fields! (:fields table-def)))))
-      ;; NOTE: dynamic schemas don't have FKs
-      (catch Throwable t
-        (log/error (u/format-color 'red "Unexpected error scanning table") t)))))
-
-
-(defn scan-database-and-update-data-model!
-  "Update the working `Table` and `Field` metadata for *all* tables in the given `Database`."
-  [driver {database-id :id, :as database}]
-  {:pre [(integer? database-id)]}
-
-  ;; quick sanity check that this is indeed a :dynamic-schema database
-  (when-not (driver/driver-supports? driver :dynamic-schema)
-    (throw (IllegalStateException. "This function cannot be called on databases which are not :dynamic-schema")))
-
-  ;; retire any tables which are no longer with us
-  (sync/retire-tables! database)
-
-  (let [raw-tables          (raw-table/active-tables database-id)
-        raw-table-id->table (u/key-by :raw_table_id (db/select Table, :db_id database-id, :active true))]
-    ;; create/update tables (and their fields)
-    ;; NOTE: we make sure to skip the _metabase_metadata table here.  it's not a normal table.
-    (doseq [{raw-table-id :id, :as raw-table} raw-tables
-            :when                             (not (sync/is-metabase-metadata-table? raw-table))]
-      (try
-        (let [table-def (u/prog1 (driver/describe-table driver database (select-keys raw-table [:name :schema]))
-                          (schema/validate i/DescribeTable <>))]
-          (if-let [existing-table (get raw-table-id->table raw-table-id)]
-            ;; table already exists, update it
-            (-> (table/update-table-from-tabledef! existing-table raw-table)
-                (save-table-fields! (:fields table-def)))
-            ;; must be a new table, insert it
-            (-> (table/create-table-from-tabledef! database-id (assoc raw-table :raw-table-id raw-table-id))
-                (save-table-fields! (:fields table-def)))))
-        (catch Throwable t
-          (log/error (u/format-color 'red "Unexpected error scanning table") t))))
-
-    ;; NOTE: dynamic schemas don't have FKs
-
-    ;; NOTE: if per chance there were multiple _metabase_metadata tables in different schemas, we just take the first
-    (when-let [metabase-metadata-table (first (filter sync/is-metabase-metadata-table? raw-tables))]
-      (sync/sync-metabase-metadata-table! driver database metabase-metadata-table))))
diff --git a/src/metabase/task/sync_databases.clj b/src/metabase/task/sync_databases.clj
index 9fd4816d2b518bfc1ab77ca234d036d33f24145e..0589d11391169fa77284d77b1c30b1eb1b13d920 100644
--- a/src/metabase/task/sync_databases.clj
+++ b/src/metabase/task/sync_databases.clj
@@ -7,7 +7,7 @@
             [clojurewerkz.quartzite.schedule.cron :as cron]
             [metabase
              [driver :as driver]
-             [sync-database :as sync-database]
+             [sync :as sync]
              [task :as task]]
             [metabase.models.database :refer [Database]]
             [toucan.db :as db]))
@@ -23,12 +23,11 @@
   (doseq [database (db/select Database, :is_sample false)] ; skip Sample Dataset DB
     (try
       ;; NOTE: this happens synchronously for now to avoid excessive load if there are lots of databases
-      (if-not (and (zero? (t/hour (t/now)))
-                   (driver/driver-supports? (driver/engine->driver (:engine database)) :dynamic-schema))
-        ;; most of the time we do a quick sync and avoid the lengthy analysis process
-        (sync-database/sync-database! database :full-sync? false)
-        ;; at midnight we run the full sync
-        (sync-database/sync-database! database :full-sync? true))
+      ;; most of the time we do a quick sync and avoid the lengthy analysis process
+      ;; at midnight we run the full sync
+      (let [full-sync? (not (and (zero? (t/hour (t/now)))
+                                 (driver/driver-supports? (driver/engine->driver (:engine database)) :dynamic-schema)))]
+        (sync/sync-database! database {:full-sync? full-sync?}))
       (catch Throwable e
         (log/error (format "Error syncing database %d: " (:id database)) e)))))
 
diff --git a/src/metabase/types.clj b/src/metabase/types.clj
index 5c7fe72d3e5561f01073af9a73856c713fec82e1..6c7c8d450319ff1b18acb2b774c85d39e92be8d6 100644
--- a/src/metabase/types.clj
+++ b/src/metabase/types.clj
@@ -5,7 +5,6 @@
 (derive :type/Dictionary :type/Collection)
 (derive :type/Array :type/Collection)
 
-
 ;;; Numeric Types
 
 (derive :type/Number :type/*)
@@ -60,6 +59,7 @@
 ;;; Other
 
 (derive :type/Boolean :type/*)
+(derive :type/Enum :type/*)
 
 ;;; Text-Like Types: Things that should be displayed as text for most purposes but that *shouldn't* support advanced filter options like starts with / contains
 
diff --git a/src/metabase/util.clj b/src/metabase/util.clj
index 36e10258a69fa49e8ec82e96f3519b75ada5deff..a8d3dbb0888d901744b698fbf4f7a14ce694b75f 100644
--- a/src/metabase/util.clj
+++ b/src/metabase/util.clj
@@ -540,36 +540,6 @@
   (^String [color-symb x]
    (colorize color-symb (pprint-to-str x))))
 
-(def emoji-progress-bar
-  "Create a string that shows progress for something, e.g. a database sync process.
-
-     (emoji-progress-bar 10 40)
-       -> \"[************······································] 😒   25%"
-  (let [^:const meter-width    50
-        ^:const progress-emoji ["😱"  ; face screaming in fear
-                                "😢"  ; crying face
-                                "😞"  ; disappointed face
-                                "😒"  ; unamused face
-                                "😕"  ; confused face
-                                "😐"  ; neutral face
-                                "😬"  ; grimacing face
-                                "😌"  ; relieved face
-                                "😏"  ; smirking face
-                                "😋"  ; face savouring delicious food
-                                "😊"  ; smiling face with smiling eyes
-                                "😍"  ; smiling face with heart shaped eyes
-                                "😎"] ; smiling face with sunglasses
-        percent-done->emoji    (fn [percent-done]
-                                 (progress-emoji (int (math/round (* percent-done (dec (count progress-emoji)))))))]
-    (fn [completed total]
-      (let [percent-done (float (/ completed total))
-            filleds      (int (* percent-done meter-width))
-            blanks       (- meter-width filleds)]
-        (str "["
-             (s/join (repeat filleds "*"))
-             (s/join (repeat blanks "·"))
-             (format "] %s  %3.0f%%" (emoji (percent-done->emoji percent-done)) (* percent-done 100.0)))))))
-
 
 (defprotocol ^:private IFilteredStacktrace
   (filtered-stacktrace [this]
@@ -758,12 +728,6 @@
   `(do-with-auto-retries ~num-retries
      (fn [] ~@body)))
 
-(defn string-or-keyword?
-  "Is X a `String` or a `Keyword`?"
-  [x]
-  (or (string? x)
-      (keyword? x)))
-
 (defn key-by
   "Convert a sequential COLL to a map of `(f item)` -> `item`.
    This is similar to `group-by`, but the resultant map's values are single items from COLL rather than sequences of items.
@@ -886,3 +850,11 @@
   [m & {:keys [present non-nil]}]
   (merge (select-keys m present)
          (select-non-nil-keys m non-nil)))
+
+(defn order-of-magnitude
+  "Return the order of magnitude as a power of 10 of a given number."
+  [x]
+  (if (zero? x)
+    0
+    (long (math/floor (/ (Math/log (math/abs x))
+                         (Math/log 10))))))
diff --git a/test/metabase/api/card_test.clj b/test/metabase/api/card_test.clj
index 1553fa6bebef29291cf8b4e457e784715dc2ba8d..1aaff3c520d77933a3fa6f18cdcbaf6ecb377f6e 100644
--- a/test/metabase/api/card_test.clj
+++ b/test/metabase/api/card_test.clj
@@ -197,7 +197,21 @@
             :dataset_query          (mbql-count-query database-id table-id)
             :visualization_settings {:global {:title nil}}
             :database_id            database-id ; these should be inferred automatically
-            :table_id               table-id})
+            :table_id               table-id
+            :labels                 []
+            :can_write              true,
+            :dashboard_count        0,
+            :collection             nil
+            :creator                (match-$ (fetch-user :rasta)
+                                      {:common_name  "Rasta Toucan"
+                                       :is_superuser false
+                                       :is_qbnewb    true
+                                       :last_login   $
+                                       :last_name    "Toucan"
+                                       :first_name   "Rasta"
+                                       :date_joined  $
+                                       :email        "rasta@metabase.com"
+                                       :id           $})})
     (with-self-cleaning-random-card-name [_ card-name]
       (dissoc ((user->client :rasta) :post 200 "card" (card-with-name-and-query card-name (mbql-count-query database-id table-id)))
               :created_at :updated_at :id))))
diff --git a/test/metabase/api/database_test.clj b/test/metabase/api/database_test.clj
index 9e95787fc78d9e7c6b72d21575dde3f08b17c320..52086a920eda4aa379cd31a604608502563bdf84 100644
--- a/test/metabase/api/database_test.clj
+++ b/test/metabase/api/database_test.clj
@@ -73,7 +73,7 @@
              :id         $
              :details    $
              :updated_at $
-             :features   (mapv name (driver/features (driver/engine->driver (:engine db))))}))))
+             :features   (map name (driver/features (driver/engine->driver (:engine db))))}))))
 
 
 ;; # DB LIFECYCLE ENDPOINTS
@@ -156,16 +156,23 @@
 
 
 ;; TODO - this is a test code smell, each test should clean up after itself and this step shouldn't be neccessary. One day we should be able to remove this!
-;; If you're writing a test that needs this, fix your brain and your test
+;; If you're writing a NEW test that needs this, fix your brain and your test!
+;; To reïterate, this is BAD BAD BAD BAD BAD BAD! It will break tests if you use it! Don't use it!
 (defn- ^:deprecated delete-randomly-created-databases!
   "Delete all the randomly created Databases we've made so far. Optionally specify one or more IDs to SKIP."
   [& {:keys [skip]}]
-  (db/delete! Database :id [:not-in (into (set skip)
-                                          (for [engine datasets/all-valid-engines
-                                                :let   [id (datasets/when-testing-engine engine
-                                                             (:id (get-or-create-test-data-db! (driver/engine->driver engine))))]
-                                                :when  id]
-                                            id))]))
+  (let [ids-to-skip (into (set skip)
+                          (for [engine datasets/all-valid-engines
+                                :let   [id (datasets/when-testing-engine engine
+                                             (:id (get-or-create-test-data-db! (driver/engine->driver engine))))]
+                                :when  id]
+                            id))]
+    (when-let [dbs (seq (db/select [Database :name :engine :id] :id [:not-in ids-to-skip]))]
+      (println (u/format-color 'red (str "\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
+                                         "WARNING: deleting randomly created databases:\n%s\n"
+                                         "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n\n")
+                 (u/pprint-to-str dbs))))
+    (db/delete! Database :id [:not-in ids-to-skip])))
 
 
 ;; ## GET /api/database
@@ -238,6 +245,19 @@
    :preview_display    true
    :parent_id          nil})
 
+(defn- field-details [field]
+  (merge
+   default-field-details
+   (match-$ (hydrate/hydrate field :values)
+     {:updated_at         $
+      :id                 $
+      :raw_column_id      $
+      :created_at         $
+      :last_analyzed      $
+      :fingerprint        $
+      :fk_target_field_id $
+      :values             $})))
+
 ;; ## GET /api/meta/table/:id/query_metadata
 ;; TODO - add in example with Field :values
 (expect
@@ -254,36 +274,20 @@
                                   {:schema       "PUBLIC"
                                    :name         "CATEGORIES"
                                    :display_name "Categories"
-                                   :fields       [(merge default-field-details
-                                                         (match-$ (hydrate/hydrate (Field (id :categories :id)) :values)
-                                                           {:table_id           (id :categories)
-                                                            :special_type       "type/PK"
-                                                            :name               "ID"
-                                                            :display_name       "ID"
-                                                            :updated_at         $
-                                                            :id                 $
-                                                            :raw_column_id      $
-                                                            :created_at         $
-                                                            :last_analyzed      $
-                                                            :base_type          "type/BigInteger"
-                                                            :visibility_type    "normal"
-                                                            :fk_target_field_id $
-                                                            :values             $}))
-                                                  (merge default-field-details
-                                                         (match-$ (hydrate/hydrate (Field (id :categories :name)) :values)
-                                                           {:table_id           (id :categories)
-                                                            :special_type       "type/Name"
-                                                            :name               "NAME"
-                                                            :display_name       "Name"
-                                                            :updated_at         $
-                                                            :id                 $
-                                                            :raw_column_id      $
-                                                            :created_at         $
-                                                            :last_analyzed      $
-                                                            :base_type          "type/Text"
-                                                            :visibility_type    "normal"
-                                                            :fk_target_field_id $
-                                                            :values             $}))]
+                                   :fields       [(assoc (field-details (Field (id :categories :id)))
+                                                    :table_id        (id :categories)
+                                                    :special_type    "type/PK"
+                                                    :name            "ID"
+                                                    :display_name    "ID"
+                                                    :base_type       "type/BigInteger"
+                                                    :visibility_type "normal")
+                                                  (assoc (field-details (Field (id :categories :name)))
+                                                    :table_id           (id :categories)
+                                                    :special_type       "type/Name"
+                                                    :name               "NAME"
+                                                    :display_name       "Name"
+                                                    :base_type          "type/Text"
+                                                    :visibility_type    "normal")]
                                    :segments     []
                                    :metrics      []
                                    :rows         75
@@ -343,7 +347,7 @@
   (merge {:id           (format "card__%d" (u/get-id card))
           :db_id        database/virtual-id
           :display_name (:name card)
-          :schema       "All questions"
+          :schema       "Everything else"
           :description  nil}
          kvs))
 
@@ -356,6 +360,20 @@
     ;; Now fetch the database list. The 'Saved Questions' DB should be last on the list
     (last ((user->client :crowberto) :get 200 "database" :include_cards true))))
 
+;; Make sure saved questions are NOT included if the setting is disabled
+(expect
+  nil
+  (tt/with-temp Card [card (card-with-native-query "Kanye West Quote Views Per Month")]
+    (tu/with-temporary-setting-values [enable-nested-queries false]
+      ;; run the Card which will populate its result_metadata column
+      ((user->client :crowberto) :post 200 (format "card/%d/query" (u/get-id card)))
+      ;; Now fetch the database list. The 'Saved Questions' DB should NOT be in the list
+      (some (fn [database]
+              (when (= (u/get-id database) database/virtual-id)
+                database))
+            ((user->client :crowberto) :get 200 "database" :include_cards true)))))
+
+
 ;; make sure that GET /api/database?include_cards=true groups pretends COLLECTIONS are SCHEMAS
 (tt/expect-with-temp [Collection [stamp-collection {:name "Stamps"}]
                       Collection [coin-collection  {:name "Coins"}]
diff --git a/test/metabase/api/dataset_test.clj b/test/metabase/api/dataset_test.clj
index ca0ab8aef463964486c23b0662740c1253c9808a..f30842851a5199037df2fe8c77146c4048af17ab 100644
--- a/test/metabase/api/dataset_test.clj
+++ b/test/metabase/api/dataset_test.clj
@@ -1,10 +1,12 @@
 (ns metabase.api.dataset-test
   "Unit tests for /api/dataset endpoints."
-  (:require [expectations :refer :all]
+  (:require [cheshire.generate :as generate]
+            [dk.ative.docjure.spreadsheet :as spreadsheet]
+            [expectations :refer :all]
             [medley.core :as m]
             [metabase.api.dataset :refer [default-query-constraints]]
             [metabase.models.query-execution :refer [QueryExecution]]
-            [metabase.query-processor.expand :as ql]
+            [metabase.query-processor.middleware.expand :as ql]
             [metabase.test
              [data :refer :all]
              [util :as tu]]
@@ -54,7 +56,7 @@
    {:data                   {:rows    [[1000]]
                              :columns ["count"]
                              :cols    [{:base_type "type/Integer", :special_type "type/Number", :name "count", :display_name "count", :id nil, :table_id nil,
-                                        :description nil, :target nil, :extra_info {}, :source "aggregation"}]
+                                        :description nil, :target nil, :extra_info {}, :source "aggregation", :remapped_from nil, :remapped_to nil}]
                              :native_form true}
     :row_count              1
     :status                 "completed"
@@ -128,3 +130,26 @@
                                                                         :native   {:query "foobar"}})]
     [(check-error-message (format-response result))
      (check-error-message (format-response (most-recent-query-execution)))]))
+
+
+;;; Make sure that we're piggybacking off of the JSON encoding logic when encoding strange values in XLSX (#5145, #5220, #5459)
+(defrecord ^:private SampleNastyClass [^String v])
+
+(generate/add-encoder
+ SampleNastyClass
+ (fn [obj, ^com.fasterxml.jackson.core.JsonGenerator json-generator]
+   (.writeString json-generator (:v obj))))
+
+(defrecord ^:private AnotherNastyClass [^String v])
+
+(expect
+  [{"Values" "values"}
+   {"Values" "Hello XLSX World!"}   ; should use the JSON encoding implementation for object
+   {"Values" "{:v \"No Encoder\"}"} ; fall back to the implementation of `str` for an object if no JSON encoder exists rather than barfing
+   {"Values" "ABC"}]
+  (->> (spreadsheet/create-workbook "Results" [["values"]
+                                               [(SampleNastyClass. "Hello XLSX World!")]
+                                               [(AnotherNastyClass. "No Encoder")]
+                                               ["ABC"]])
+       (spreadsheet/select-sheet "Results")
+       (spreadsheet/select-columns {:A "Values"})))
diff --git a/test/metabase/api/embed_test.clj b/test/metabase/api/embed_test.clj
index c8f87209c8f825dd5782b8b84c7d1730eb5a61e5..4be9dd90f5aa193645130ecb4002a7402aece2fc 100644
--- a/test/metabase/api/embed_test.clj
+++ b/test/metabase/api/embed_test.clj
@@ -57,7 +57,8 @@
   ([]
    {:data       {:columns ["count"]
                  :cols    [{:description nil, :table_id nil, :special_type "type/Number", :name "count", :source "aggregation",
-                            :extra_info {}, :id nil, :target nil, :display_name "count", :base_type "type/Integer"}]
+                            :extra_info {}, :id nil, :target nil, :display_name "count", :base_type "type/Integer"
+                            :remapped_from nil, :remapped_to nil}]
                  :rows    [[100]]}
     :json_query {:parameters []}
     :status     "completed"})
diff --git a/test/metabase/api/field_test.clj b/test/metabase/api/field_test.clj
index 1d43f3cd99f1db4baa170efe84dfdf7a67d7f79c..6a2b2b7c19d104a3bc4a170de4b3e7ab65aac24e 100644
--- a/test/metabase/api/field_test.clj
+++ b/test/metabase/api/field_test.clj
@@ -10,11 +10,16 @@
              [util :as tu]]
             [metabase.test.data.users :refer :all]
             [ring.util.codec :as codec]
-            [toucan.db :as db]
+            [toucan
+             [db :as db]
+             [hydrate :refer [hydrate]]]
             [toucan.util.test :as tt]))
 
 ;; Helper Fns
 
+(def ^:private default-field-values
+  {:id true, :created_at true, :updated_at true, :field_id true})
+
 (defn- db-details []
   (tu/match-$ (db)
     {:created_at         $
@@ -36,6 +41,7 @@
     {:description        nil
      :table_id           (id :users)
      :raw_column_id      $
+     :fingerprint        $
      :table              (tu/match-$ (Table (id :users))
                            {:description             nil
                             :entity_type             nil
@@ -83,25 +89,28 @@
 ;; ## PUT /api/field/:id
 
 (defn- simple-field-details [field]
-  (select-keys field [:name :display_name :description :visibility_type :special_type]))
+  (select-keys field [:name :display_name :description :visibility_type :special_type :fk_target_field_id]))
 
 ;; test that we can do basic field update work, including unsetting some fields such as special-type
 (expect
-  [{:name            "Field Test"
-    :display_name    "Field Test"
-    :description     nil
-    :special_type    nil
-    :visibility_type :normal}
-   {:name            "Field Test"
-    :display_name    "yay"
-    :description     "foobar"
-    :special_type    :type/Name
-    :visibility_type :sensitive}
-   {:name            "Field Test"
-    :display_name    "yay"
-    :description     nil
-    :special_type    nil
-    :visibility_type :sensitive}]
+  [{:name               "Field Test"
+    :display_name       "Field Test"
+    :description        nil
+    :special_type       nil
+    :visibility_type    :normal
+    :fk_target_field_id nil}
+   {:name               "Field Test"
+    :display_name       "yay"
+    :description        "foobar"
+    :special_type       :type/Name
+    :visibility_type    :sensitive
+    :fk_target_field_id nil}
+   {:name               "Field Test"
+    :display_name       "yay"
+    :description        nil
+    :special_type       nil
+    :visibility_type    :sensitive
+    :fk_target_field_id nil}]
   (tt/with-temp* [Field [{field-id :id} {:name "Field Test"}]]
     (let [original-val (simple-field-details (Field field-id))]
       ;; set it
@@ -150,72 +159,381 @@
 ;; ## GET /api/field/:id/values
 ;; Should return something useful for a field that has special_type :type/Category
 (expect
-  {:field_id              (id :venues :price)
-   :human_readable_values {}
-   :values                [1 2 3 4]
-   :id                    (field-values-id :venues :price)}
+  (merge default-field-values {:values (mapv vector [1 2 3 4])})
   (do
     ;; clear out existing human_readable_values in case they're set
     (db/update! FieldValues (field-values-id :venues :price)
       :human_readable_values nil)
     ;; now update the values via the API
-    (-> ((user->client :rasta) :get 200 (format "field/%d/values" (id :venues :price)))
-        (dissoc :created_at :updated_at))))
+    (tu/boolean-ids-and-timestamps ((user->client :rasta) :get 200 (format "field/%d/values" (id :venues :price))))))
 
 ;; Should return nothing for a field whose special_type is *not* :type/Category
 (expect
-  {:values                {}
-   :human_readable_values {}}
+  {:values []}
   ((user->client :rasta) :get 200 (format "field/%d/values" (id :venues :id))))
 
+;; Sensisitive fields do not have field values and should return empty
+(expect
+  {:values []}
+  ((user->client :rasta) :get 200 (format "field/%d/values" (id :users :password))))
+
+(defn- num->$ [num-seq]
+  (mapv (fn [idx]
+          (vector idx (apply str (repeat idx \$))))
+        num-seq))
+
+(def category-field {:name "Field Test" :base_type :type/Integer :special_type :type/Category})
+
+;; ## POST /api/field/:id/values
+
+;; Human readable values are optional
+(expect
+  [(merge default-field-values {:values (map vector (range 5 10))})
+   {:status "success"}
+   (merge default-field-values {:values (map vector (range 1 5))})]
+  (tt/with-temp* [Field [{field-id :id} category-field]
+                  FieldValues [{field-value-id :id} {:values (range 5 10), :field_id field-id}]]
+    (mapv tu/boolean-ids-and-timestamps
+          [((user->client :crowberto) :get 200 (format "field/%d/values" field-id))
+           ((user->client :crowberto) :post 200 (format "field/%d/values" field-id)
+            {:values (map vector (range 1 5))})
+           ((user->client :crowberto) :get 200 (format "field/%d/values" field-id))])))
+
+;; Existing field values can be updated (with their human readable values)
+(expect
+  [(merge default-field-values {:values (map vector (range 1 5))})
+   {:status "success"}
+   (merge default-field-values {:values (num->$ (range 1 5))})]
+  (tt/with-temp* [Field [{field-id :id} category-field]
+                  FieldValues [{field-value-id :id} {:values (range 1 5), :field_id field-id}]]
+    (mapv tu/boolean-ids-and-timestamps
+          [((user->client :crowberto) :get 200 (format "field/%d/values" field-id))
+           ((user->client :crowberto) :post 200 (format "field/%d/values" field-id)
+            {:values (num->$ (range 1 5))})
+           ((user->client :crowberto) :get 200 (format "field/%d/values" field-id))])))
+
+;; Field values are created when not present
+(expect
+  [(merge default-field-values {:values []})
+   {:status "success"}
+   (merge default-field-values {:values (num->$ (range 1 5))})]
+  (tt/with-temp* [Field [{field-id :id} category-field]]
+    (mapv tu/boolean-ids-and-timestamps
+          [((user->client :crowberto) :get 200 (format "field/%d/values" field-id))
+           ((user->client :crowberto) :post 200 (format "field/%d/values" field-id)
+            {:values (num->$ (range 1 5))})
+           ((user->client :crowberto) :get 200 (format "field/%d/values" field-id))])))
+
+;; Can unset values
+(expect
+  [(merge default-field-values {:values (mapv vector (range 1 5))})
+   {:status "success"}
+   (merge default-field-values {:values []})]
+  (tt/with-temp* [Field [{field-id :id} category-field]
+                  FieldValues [{field-value-id :id} {:values (range 1 5), :field_id field-id}]]
+    (mapv tu/boolean-ids-and-timestamps
+          [((user->client :crowberto) :get 200 (format "field/%d/values" field-id))
+           ((user->client :crowberto) :post 200 (format "field/%d/values" field-id)
+            {:values []})
+           ((user->client :crowberto) :get 200 (format "field/%d/values" field-id))])))
+
+;; Can unset just human readable values
+(expect
+  [(merge default-field-values {:values (num->$ (range 1 5))})
+   {:status "success"}
+   (merge default-field-values {:values (mapv vector (range 1 5))})]
+  (tt/with-temp* [Field [{field-id :id} category-field]
+                  FieldValues [{field-value-id :id} {:values (range 1 5), :field_id field-id
+                                                     :human_readable_values ["$" "$$" "$$$" "$$$$"]}]]
+    (mapv tu/boolean-ids-and-timestamps
+          [((user->client :crowberto) :get 200 (format "field/%d/values" field-id))
+           ((user->client :crowberto) :post 200 (format "field/%d/values" field-id)
+            {:values (mapv vector (range 1 5))})
+           ((user->client :crowberto) :get 200 (format "field/%d/values" field-id))])))
+
+;; Should throw when human readable values are present but not for every value
+(expect
+  "If remapped values are specified, they must be specified for all field values"
+  (tt/with-temp* [Field [{field-id :id} {:name "Field Test" :base_type :type/Integer :special_type :type/Category}]]
+    ((user->client :crowberto) :post 400 (format "field/%d/values" field-id)
+     {:values [[1 "$"] [2 "$$"] [3] [4]]})))
+
+;; ## PUT /api/field/:id/dimension
+
+(defn- dimension-for-field [field-id]
+  (-> (Field :id field-id)
+      (hydrate :dimensions)
+      :dimensions))
+
+(defn dimension-post [field-id map-to-post]
+  ((user->client :crowberto) :post 200 (format "field/%d/dimension" field-id) map-to-post))
+
+;; test that we can do basic field update work, including unsetting some fields such as special-type
+(expect
+  [[]
+   {:id true
+    :created_at true
+    :updated_at true
+    :type :internal
+    :name "some dimension name"
+    :human_readable_field_id false
+    :field_id true}
+   {:id true
+    :created_at true
+    :updated_at true
+    :type :internal
+    :name "different dimension name"
+    :human_readable_field_id false
+    :field_id true}
+   true]
+  (tt/with-temp* [Field [{field-id :id} {:name "Field Test"}]]
+    (let [before-creation (dimension-for-field field-id)
+          _               (dimension-post field-id {:name "some dimension name", :type "internal"})
+          new-dim         (dimension-for-field field-id)
+          _               (dimension-post field-id {:name "different dimension name", :type "internal"})
+          updated-dim     (dimension-for-field field-id)]
+      [before-creation
+       (tu/boolean-ids-and-timestamps new-dim)
+       (tu/boolean-ids-and-timestamps updated-dim)
+       (= (:id new-dim) (:id updated-dim))])))
 
 ;; Check that trying to get values for a 'virtual' field just returns a blank values map
 (expect
-  {:values                {}
-   :human_readable_values {}}
+  {:values []}
   ((user->client :rasta) :get 200 (format "field/%s/values" (codec/url-encode "field-literal,created_at,type/Datetime"))))
 
+;; test that we can do basic field update work, including unsetting some fields such as special-type
+(expect
+  [[]
+   {:id true
+    :created_at true
+    :updated_at true
+    :type :external
+    :name "some dimension name"
+    :human_readable_field_id true
+    :field_id true}]
+  (tt/with-temp* [Field [{field-id-1 :id} {:name "Field Test 1"}]
+                  Field [{field-id-2 :id} {:name "Field Test 2"}]]
+    (let [before-creation (dimension-for-field field-id-1)
+          _               (dimension-post field-id-1 {:name "some dimension name", :type "external" :human_readable_field_id field-id-2})
+          new-dim         (dimension-for-field field-id-1)]
+      [before-creation
+       (tu/boolean-ids-and-timestamps new-dim)])))
+
+;; External remappings require a human readable field id
+(expect
+  clojure.lang.ExceptionInfo
+  (tt/with-temp* [Field [{field-id-1 :id} {:name "Field Test 1"}]]
+    (dimension-post field-id-1 {:name "some dimension name", :type "external"})))
+
+;; Non-admin users can't update dimensions
+(expect
+  "You don't have permissions to do that."
+  (tt/with-temp* [Field [{field-id :id} {:name "Field Test 1"}]]
+    ((user->client :rasta) :post 403 (format "field/%d/dimension" field-id) {:name "some dimension name", :type "external"})))
+
+;; Ensure we can delete a dimension
+(expect
+  [{:id true
+    :created_at true
+    :updated_at true
+    :type :internal
+    :name "some dimension name"
+    :human_readable_field_id false
+    :field_id true}
+   []]
+  (tt/with-temp* [Field [{field-id :id} {:name "Field Test"}]]
+
+    (dimension-post field-id {:name "some dimension name", :type "internal"})
+
+    (let [new-dim (dimension-for-field field-id)]
+      ((user->client :crowberto) :delete 204 (format "field/%d/dimension" field-id))
+      [(tu/boolean-ids-and-timestamps new-dim)
+       (dimension-for-field field-id)])))
+
+;; Non-admin users can't delete a dimension
+(expect
+  "You don't have permissions to do that."
+  (tt/with-temp* [Field [{field-id :id} {:name "Field Test 1"}]]
+    ((user->client :rasta) :delete 403 (format "field/%d/dimension" field-id))))
+
+;; When an FK field gets it's special_type removed, we should clear the external dimension
+(expect
+  [{:id true
+    :created_at true
+    :updated_at true
+    :type :external
+    :name "fk-remove-dimension"
+    :human_readable_field_id true
+    :field_id true}
+   []]
+  (tt/with-temp* [Field [{field-id-1 :id} {:name "Field Test 1"
+                                           :special_type :type/FK}]
+                  Field [{field-id-2 :id} {:name "Field Test 2"}]]
+
+    (dimension-post field-id-1 {:name "fk-remove-dimension", :type "external" :human_readable_field_id field-id-2})
+
+    (let [new-dim          (dimension-for-field field-id-1)
+          _                ((user->client :crowberto) :put 200 (format "field/%d" field-id-1) {:special_type nil})
+          dim-after-update (dimension-for-field field-id-1)]
+      [(tu/boolean-ids-and-timestamps new-dim)
+       (tu/boolean-ids-and-timestamps dim-after-update)])))
+
+;; The dimension should stay as long as the FK didn't change
+(expect
+  (repeat 2 {:id true
+             :created_at true
+             :updated_at true
+             :type :external
+             :name "fk-remove-dimension"
+             :human_readable_field_id true
+             :field_id true})
+  (tt/with-temp* [Field [{field-id-1 :id} {:name "Field Test 1"
+                                           :special_type :type/FK}]
+                  Field [{field-id-2 :id} {:name "Field Test 2"}]]
+
+    (dimension-post field-id-1 {:name "fk-remove-dimension", :type "external" :human_readable_field_id field-id-2})
+
+    (let [new-dim          (dimension-for-field field-id-1)
+          _                ((user->client :crowberto) :put 200 (format "field/%d" field-id-1) {:description "something diffrent"})
+          dim-after-update (dimension-for-field field-id-1)]
+      [(tu/boolean-ids-and-timestamps new-dim)
+       (tu/boolean-ids-and-timestamps dim-after-update)])))
 
-;; ## POST /api/field/:id/value_map_update
-
-;; Check that we can set values
-(expect
-  [{:status "success"}
-   {:field_id              (id :venues :price)
-    :human_readable_values {:1 "$"
-                            :2 "$$"
-                            :3 "$$$"
-                            :4 "$$$$"}
-    :values                [1 2 3 4]
-    :id                    (field-values-id :venues :price)}]
-  [((user->client :crowberto) :post 200 (format "field/%d/value_map_update" (id :venues :price)) {:values_map {:1 "$"
-                                                                                                               :2 "$$"
-                                                                                                               :3 "$$$"
-                                                                                                               :4 "$$$$"}})
-   (-> ((user->client :rasta) :get 200 (format "field/%d/values" (id :venues :price)))
-       (dissoc :created_at :updated_at))])
-
-;; Check that we can unset values
-(expect
-  [{:status "success"}
-   (tu/match-$ (FieldValues :field_id (id :venues :price))
-     {:field_id              (id :venues :price)
-      :human_readable_values {}
-      :values                [1 2 3 4]
-      :id                    (field-values-id :venues :price)})]
-  [(do (db/update! FieldValues (:id (field->field-values :venues :price))
-         :human_readable_values {:1 "$" ; make sure they're set
-                                 :2 "$$"
-                                 :3 "$$$"
-                                 :4 "$$$$"})
-       ((user->client :crowberto) :post 200 (format "field/%d/value_map_update" (id :venues :price)) {:values_map {}}))
-   (-> ((user->client :rasta) :get 200 (format "field/%d/values" (id :venues :price)))
-       (dissoc :created_at :updated_at))])
-
-;; Check that we get an error if we call value_map_update on something that isn't a category
-(expect "You can only update the mapped values of a Field whose 'special_type' is 'category'/'city'/'state'/'country' or whose 'base_type' is 'type/Boolean'."
-  ((user->client :crowberto) :post 400 (format "field/%d/value_map_update" (id :venues :id))
-   {:values_map {:1 "$"
-                 :2 "$$"
-                 :3 "$$$"
-                 :4 "$$$$"}}))
+;; When removing the FK special type, the fk_target_field_id should be cleared as well
+(expect
+  [{:name               "Field Test 2",
+    :display_name       "Field Test 2",
+    :description        nil,
+    :visibility_type    :normal,
+    :special_type       :type/FK,
+    :fk_target_field_id true}
+   {:name               "Field Test 2",
+    :display_name       "Field Test 2",
+    :description        nil,
+    :visibility_type    :normal,
+    :special_type       nil,
+    :fk_target_field_id false}]
+  (tt/with-temp* [Field [{field-id-1 :id} {:name "Field Test 1"}]
+                  Field [{field-id-2 :id} {:name               "Field Test 2"
+                                           :special_type       :type/FK
+                                           :fk_target_field_id field-id-1}]]
+
+    (let [before-change (simple-field-details (Field field-id-2))
+          _             ((user->client :crowberto) :put 200 (format "field/%d" field-id-2) {:special_type nil})
+          after-change  (simple-field-details (Field field-id-2))]
+      [(tu/boolean-ids-and-timestamps before-change)
+       (tu/boolean-ids-and-timestamps after-change)])))
+
+;; Checking update of the fk_target_field_id
+(expect
+  [{:name               "Field Test 3",
+    :display_name       "Field Test 3",
+    :description        nil,
+    :visibility_type    :normal,
+    :special_type       :type/FK,
+    :fk_target_field_id true}
+   {:name               "Field Test 3",
+    :display_name       "Field Test 3",
+    :description        nil,
+    :visibility_type    :normal,
+    :special_type       :type/FK,
+    :fk_target_field_id true}
+   true]
+  (tt/with-temp* [Field [{field-id-1 :id} {:name "Field Test 1"}]
+                  Field [{field-id-2 :id} {:name "Field Test 2"}]
+                  Field [{field-id-3 :id} {:name               "Field Test 3"
+                                           :special_type       :type/FK
+                                           :fk_target_field_id field-id-1}]]
+
+    (let [before-change (simple-field-details (Field field-id-3))
+          _             ((user->client :crowberto) :put 200 (format "field/%d" field-id-3) {:fk_target_field_id field-id-2})
+          after-change  (simple-field-details (Field field-id-3))]
+      [(tu/boolean-ids-and-timestamps before-change)
+       (tu/boolean-ids-and-timestamps after-change)
+       (not= (:fk_target_field_id before-change)
+             (:fk_target_field_id after-change))])))
+
+;; Checking update of the fk_target_field_id along with an FK change
+(expect
+  [{:name               "Field Test 2",
+    :display_name       "Field Test 2",
+    :description        nil,
+    :visibility_type    :normal,
+    :special_type       nil
+    :fk_target_field_id false}
+   {:name               "Field Test 2",
+    :display_name       "Field Test 2",
+    :description        nil,
+    :visibility_type    :normal,
+    :special_type       :type/FK,
+    :fk_target_field_id true}]
+  (tt/with-temp* [Field [{field-id-1 :id} {:name "Field Test 1"}]
+                  Field [{field-id-2 :id} {:name "Field Test 2"}]]
+
+    (let [before-change (simple-field-details (Field field-id-2))
+          _             ((user->client :crowberto) :put 200 (format "field/%d" field-id-2) {:special_type :type/FK
+                                                                                            :fk_target_field_id field-id-1})
+          after-change  (simple-field-details (Field field-id-2))]
+      [(tu/boolean-ids-and-timestamps before-change)
+       (tu/boolean-ids-and-timestamps after-change)])))
+
+;; Checking update of the fk_target_field_id and FK remain unchanged on updates of other fields
+(expect
+  [{:name               "Field Test 2",
+    :display_name       "Field Test 2",
+    :description        nil,
+    :visibility_type    :normal,
+    :special_type       :type/FK
+    :fk_target_field_id true}
+   {:name               "Field Test 2",
+    :display_name       "Field Test 2",
+    :description        "foo",
+    :visibility_type    :normal,
+    :special_type       :type/FK,
+    :fk_target_field_id true}]
+  (tt/with-temp* [Field [{field-id-1 :id} {:name "Field Test 1"}]
+                  Field [{field-id-2 :id} {:name               "Field Test 2"
+                                           :special_type       :type/FK
+                                           :fk_target_field_id field-id-1}]]
+
+    (let [before-change (simple-field-details (Field field-id-2))
+          _             ((user->client :crowberto) :put 200 (format "field/%d" field-id-2) {:description "foo"})
+          after-change  (simple-field-details (Field field-id-2))]
+      [(tu/boolean-ids-and-timestamps before-change)
+       (tu/boolean-ids-and-timestamps after-change)])))
+
+;; Changing a remapped field's type to something that can't be remapped will clear the dimension
+(expect
+  [{:id true
+    :created_at true
+    :updated_at true
+    :type :internal
+    :name "some dimension name"
+    :human_readable_field_id false
+    :field_id true}
+   []]
+  (tt/with-temp* [Field [{field-id :id} {:name "Field Test"
+                                         :base_type "type/Integer"}]]
+    (dimension-post field-id {:name "some dimension name", :type "internal"})
+    (let [new-dim (dimension-for-field field-id)]
+      ((user->client :crowberto) :put 200 (format "field/%d" field-id) {:special_type "type/Text"})
+      [(tu/boolean-ids-and-timestamps new-dim)
+       (dimension-for-field field-id)])))
+
+;; Change from supported type to supported type will leave the dimension
+(expect
+  (repeat 2 {:id true
+             :created_at true
+             :updated_at true
+             :type :internal
+             :name "some dimension name"
+             :human_readable_field_id false
+             :field_id true})
+  (tt/with-temp* [Field [{field-id :id} {:name "Field Test"
+                                         :base_type "type/Integer"}]]
+    (dimension-post field-id {:name "some dimension name", :type "internal"})
+    (let [new-dim (dimension-for-field field-id)]
+      ((user->client :crowberto) :put 200 (format "field/%d" field-id) {:special_type "type/Category"})
+      [(tu/boolean-ids-and-timestamps new-dim)
+       (tu/boolean-ids-and-timestamps (dimension-for-field field-id))])))
diff --git a/test/metabase/api/setting_test.clj b/test/metabase/api/setting_test.clj
index ad56acf33647c2b3ba161f27be16bc706d3da665..1608d91bf8026ebf17e170ff925674834bdf9687 100644
--- a/test/metabase/api/setting_test.clj
+++ b/test/metabase/api/setting_test.clj
@@ -15,8 +15,8 @@
 ;; ## GET /api/setting
 ;; Check that we can fetch all Settings for Org
 (expect
- [{:key "test-setting-1", :value nil,     :description "Test setting - this only shows up in dev (1)", :default "Using $MB_TEST_SETTING_1"}
-  {:key "test-setting-2", :value "FANCY", :description "Test setting - this only shows up in dev (2)", :default "[Default Value]"}]
+ [{:key "test-setting-1", :value nil,     :is_env_setting true,  :env_name "MB_TEST_SETTING_1", :description "Test setting - this only shows up in dev (1)", :default "Using $MB_TEST_SETTING_1"}
+  {:key "test-setting-2", :value "FANCY", :is_env_setting false, :env_name "MB_TEST_SETTING_2", :description "Test setting - this only shows up in dev (2)", :default "[Default Value]"}]
  (do (set-settings! nil "FANCY")
      (fetch-test-settings)))
 
diff --git a/test/metabase/api/table_test.clj b/test/metabase/api/table_test.clj
index 15655caf7e2a6d7eb195ed6312fb6439844608aa..62c60bf1444a9b02f40b42825bb32db5933dd978 100644
--- a/test/metabase/api/table_test.clj
+++ b/test/metabase/api/table_test.clj
@@ -1,11 +1,13 @@
 (ns metabase.api.table-test
   "Tests for /api/table endpoints."
-  (:require [expectations :refer :all]
+  (:require [clojure.walk :as walk]
+            [expectations :refer :all]
+            [medley.core :as m]
             [metabase
              [driver :as driver]
              [http-client :as http]
              [middleware :as middleware]
-             [sync-database :as sync-database]
+             [sync :as sync]
              [util :as u]]
             [metabase.models
              [card :refer [Card]]
@@ -15,15 +17,20 @@
              [permissions-group :as perms-group]
              [table :refer [Table]]]
             [metabase.test
-             [data :as data :refer :all]
+             [data :as data]
              [util :as tu :refer [match-$ resolve-private-vars]]]
             [metabase.test.data
              [dataset-definitions :as defs]
-             [users :refer :all]]
-            [toucan.hydrate :as hydrate]
+             [users :refer [user->client]]]
+            [toucan
+             [db :as db]
+             [hydrate :as hydrate]]
             [toucan.util.test :as tt]))
 
 (resolve-private-vars metabase.models.table pk-field-id)
+(resolve-private-vars metabase.api.table
+  dimension-options-for-response datetime-dimension-indexes numeric-dimension-indexes
+  numeric-default-index date-default-index coordinate-default-index)
 
 
 ;; ## /api/org/* AUTHENTICATION Tests
@@ -31,13 +38,13 @@
 ;; authentication test on every single individual endpoint
 
 (expect (get middleware/response-unauthentic :body) (http/client :get 401 "table"))
-(expect (get middleware/response-unauthentic :body) (http/client :get 401 (format "table/%d" (id :users))))
+(expect (get middleware/response-unauthentic :body) (http/client :get 401 (format "table/%d" (data/id :users))))
 
 
 ;; Helper Fns
 
 (defn- db-details []
-  (match-$ (db)
+  (match-$ (data/db)
     {:created_at         $
      :engine             "h2"
      :id                 $
@@ -58,46 +65,66 @@
    :entity_type             nil
    :visibility_type         nil
    :db                      (db-details)
-   :field_values            {}
    :entity_name             nil
    :active                  true
-   :db_id                   (id)
+   :db_id                   (data/id)
    :segments                []
    :metrics                 []})
 
-(def ^:private ^:const field-defaults
-  {:description        nil
-   :active             true
-   :position           0
-   :target             nil
-   :preview_display    true
-   :visibility_type    "normal"
-   :caveats            nil
-   :points_of_interest nil
-   :parent_id          nil})
+(def ^:private field-defaults
+  {:description              nil
+   :active                   true
+   :position                 0
+   :target                   nil
+   :preview_display          true
+   :visibility_type          "normal"
+   :caveats                  nil
+   :points_of_interest       nil
+   :special_type             nil
+   :parent_id                nil
+   :dimensions               []
+   :values                   []
+   :dimension_options        []
+   :default_dimension_option nil})
+
+(defn- field-details [field]
+  (merge
+   field-defaults
+   (match-$ field
+     {:updated_at         $
+      :id                 $
+      :created_at         $
+      :fk_target_field_id $
+      :raw_column_id      $
+      :last_analyzed      $
+      :fingerprint        $})))
+
+(defn- fk-field-details [field]
+  (-> (field-details field)
+      (dissoc :dimension_options :default_dimension_option)))
 
 
 ;; ## GET /api/table
 ;; These should come back in alphabetical order and include relevant metadata
 (expect
-  #{{:name         (format-name "categories")
+  #{{:name         (data/format-name "categories")
      :display_name "Categories"
      :rows         75
-     :id           (id :categories)}
-    {:name         (format-name "checkins")
+     :id           (data/id :categories)}
+    {:name         (data/format-name "checkins")
      :display_name "Checkins"
      :rows         1000
-     :id           (id :checkins)}
-    {:name         (format-name "users")
+     :id           (data/id :checkins)}
+    {:name         (data/format-name "users")
      :display_name "Users"
      :rows         15
-     :id           (id :users)}
-    {:name         (format-name "venues")
+     :id           (data/id :users)}
+    {:name         (data/format-name "venues")
      :display_name "Venues"
      :rows         100
-     :id           (id :venues)}}
+     :id           (data/id :venues)}}
   (->> ((user->client :rasta) :get 200 "table")
-       (filter #(= (:db_id %) (id))) ; prevent stray tables from affecting unit test results
+       (filter #(= (:db_id %) (data/id))) ; prevent stray tables from affecting unit test results
        (map #(dissoc %
                      :raw_table_id :db :created_at :updated_at :schema :entity_name :description :entity_type :visibility_type
                      :caveats :points_of_interest :show_in_getting_started :db_id :active))
@@ -107,18 +134,18 @@
 ;; ## GET /api/table/:id
 (expect
   (merge (dissoc (table-defaults) :segments :field_values :metrics)
-         (match-$ (Table (id :venues))
+         (match-$ (Table (data/id :venues))
            {:schema       "PUBLIC"
             :name         "VENUES"
             :display_name "Venues"
             :rows         100
             :updated_at   $
             :pk_field     (pk-field-id $$)
-            :id           (id :venues)
-            :db_id        (id)
+            :id           (data/id :venues)
+            :db_id        (data/id)
             :raw_table_id $
             :created_at   $}))
-  ((user->client :rasta) :get 200 (format "table/%d" (id :venues))))
+  ((user->client :rasta) :get 200 (format "table/%d" (data/id :venues))))
 
 ;; GET /api/table/:id should return a 403 for a user that doesn't have read permissions for the table
 (tt/expect-with-temp [Database [{database-id :id}]
@@ -128,46 +155,40 @@
     (perms/delete-related-permissions! (perms-group/all-users) (perms/object-path database-id))
     ((user->client :rasta) :get 403 (str "table/" table-id))))
 
+(defn- query-metadata-defaults []
+  (->> dimension-options-for-response
+       var-get
+       walk/keywordize-keys
+       (assoc (table-defaults) :dimension_options)))
 
 ;; ## GET /api/table/:id/query_metadata
 (expect
-  (merge (table-defaults)
-         (match-$ (hydrate/hydrate (Table (id :categories)) :field_values)
+  (merge (query-metadata-defaults)
+         (match-$ (hydrate/hydrate (Table (data/id :categories)) :field_values)
            {:schema       "PUBLIC"
             :name         "CATEGORIES"
             :display_name "Categories"
-            :fields       (let [defaults (assoc field-defaults :table_id (id :categories))]
-                            [(merge defaults (match-$ (Field (id :categories :id))
-                                               {:special_type       "type/PK"
-                                                :name               "ID"
-                                                :display_name       "ID"
-                                                :updated_at         $
-                                                :id                 $
-                                                :position           0
-                                                :created_at         $
-                                                :base_type          "type/BigInteger"
-                                                :fk_target_field_id $
-                                                :raw_column_id      $
-                                                :last_analyzed      $}))
-                             (merge defaults (match-$ (Field (id :categories :name))
-                                               {:special_type       "type/Name"
-                                                :name               "NAME"
-                                                :display_name       "Name"
-                                                :updated_at         $
-                                                :id                 $
-                                                :position           0
-                                                :created_at         $
-                                                :base_type          "type/Text"
-                                                :fk_target_field_id $
-                                                :raw_column_id      $
-                                                :last_analyzed      $}))])
+            :fields       [(assoc (field-details (Field (data/id :categories :id)))
+                             :table_id     (data/id :categories)
+                             :special_type "type/PK"
+                             :name         "ID"
+                             :display_name "ID"
+                             :base_type    "type/BigInteger")
+                           (assoc (field-details (Field (data/id :categories :name)))
+                             :table_id     (data/id :categories)
+                             :special_type "type/Name"
+                             :name         "NAME"
+                             :display_name "Name"
+                             :base_type    "type/Text"
+                             :values       data/venue-categories
+                             :dimension_options []
+                             :default_dimension_option nil)]
             :rows         75
             :updated_at   $
-            :id           (id :categories)
+            :id           (data/id :categories)
             :raw_table_id $
-            :created_at   $
-            :field_values (tu/obj->json->obj (:field_values $$))}))
-  ((user->client :rasta) :get 200 (format "table/%d/query_metadata" (id :categories))))
+            :created_at   $}))
+  ((user->client :rasta) :get 200 (format "table/%d/query_metadata" (data/id :categories))))
 
 
 (def ^:private user-last-login-date-strs
@@ -178,160 +199,111 @@
                               (+ (.getYear inst) 1900)
                               (+ (.getMonth inst) 1)
                               (.getDate inst)))]
-    (->> defs/test-data
-         :table-definitions
-         first
-         :rows
-         (map second)
+    (->> (defs/field-values defs/test-data-map "users" "last_login")
          (map format-inst)
          set
          sort
          vec)))
 
+(def ^:private user-full-names
+  (defs/field-values defs/test-data-map "users" "name"))
+
 ;;; GET api/table/:id/query_metadata?include_sensitive_fields
 ;;; Make sure that getting the User table *does* include info about the password field, but not actual values themselves
 (expect
-  (merge (table-defaults)
-         (match-$ (Table (id :users))
+  (merge (query-metadata-defaults)
+         (match-$ (Table (data/id :users))
            {:schema       "PUBLIC"
             :name         "USERS"
             :display_name "Users"
-            :fields       (let [defaults (assoc field-defaults :table_id (id :users))]
-                            [(merge defaults (match-$ (Field (id :users :id))
-                                               {:special_type       "type/PK"
-                                                :name               "ID"
-                                                :display_name       "ID"
-                                                :updated_at         $
-                                                :id                 $
-                                                :created_at         $
-                                                :base_type          "type/BigInteger"
-                                                :visibility_type    "normal"
-                                                :fk_target_field_id $
-                                                :raw_column_id      $
-                                                :last_analyzed      $}))
-                             (merge defaults (match-$ (Field (id :users :last_login))
-                                               {:special_type       nil
-                                                :name               "LAST_LOGIN"
-                                                :display_name       "Last Login"
-                                                :updated_at         $
-                                                :id                 $
-                                                :created_at         $
-                                                :base_type          "type/DateTime"
-                                                :visibility_type    "normal"
-                                                :fk_target_field_id $
-                                                :raw_column_id      $
-                                                :last_analyzed      $}))
-                             (merge defaults (match-$ (Field (id :users :name))
-                                               {:special_type       "type/Name"
-                                                :name               "NAME"
-                                                :display_name       "Name"
-                                                :updated_at         $
-                                                :id                 $
-                                                :created_at         $
-                                                :base_type          "type/Text"
-                                                :visibility_type    "normal"
-                                                :fk_target_field_id $
-                                                :raw_column_id      $
-                                                :last_analyzed      $}))
-                             (merge defaults (match-$ (Field :table_id (id :users), :name "PASSWORD")
-                                               {:special_type       "type/Category"
-                                                :name               "PASSWORD"
-                                                :display_name       "Password"
-                                                :updated_at         $
-                                                :id                 $
-                                                :created_at         $
-                                                :base_type          "type/Text"
-                                                :visibility_type    "sensitive"
-                                                :fk_target_field_id $
-                                                :raw_column_id      $
-                                                :last_analyzed      $}))])
+            :fields       [(assoc (field-details (Field (data/id :users :id)))
+                             :special_type    "type/PK"
+                             :table_id        (data/id :users)
+                             :name            "ID"
+                             :display_name    "ID"
+                             :base_type       "type/BigInteger"
+                             :visibility_type "normal")
+                           (assoc (field-details (Field (data/id :users :last_login)))
+                             :table_id        (data/id :users)
+                             :name            "LAST_LOGIN"
+                             :display_name    "Last Login"
+                             :base_type       "type/DateTime"
+                             :visibility_type "normal"
+                             :dimension_options        (var-get datetime-dimension-indexes)
+                             :default_dimension_option (var-get date-default-index)
+                             )
+                           (assoc (field-details (Field (data/id :users :name)))
+                             :special_type    "type/Name"
+                             :table_id        (data/id :users)
+                             :name            "NAME"
+                             :display_name    "Name"
+                             :base_type       "type/Text"
+                             :visibility_type "normal"
+                             :values          (map vector (sort user-full-names))
+                             :dimension_options []
+                             :default_dimension_option nil)
+                           (assoc (field-details (Field :table_id (data/id :users), :name "PASSWORD"))
+                             :special_type    "type/Category"
+                             :table_id        (data/id :users)
+                             :name            "PASSWORD"
+                             :display_name    "Password"
+                             :base_type       "type/Text"
+                             :visibility_type "sensitive")]
             :rows         15
             :updated_at   $
-            :id           (id :users)
+            :id           (data/id :users)
             :raw_table_id $
-            :field_values {(keyword (str (id :users :name)))
-                           ["Broen Olujimi"
-                            "Conchúr Tihomir"
-                            "Dwight Gresham"
-                            "Felipinho Asklepios"
-                            "Frans Hevel"
-                            "Kaneonuskatew Eiran"
-                            "Kfir Caj"
-                            "Nils Gotam"
-                            "Plato Yeshua"
-                            "Quentin Sören"
-                            "Rüstem Hebel"
-                            "Shad Ferdynand"
-                            "Simcha Yan"
-                            "Spiros Teofil"
-                            "Szymon Theutrich"]}
             :created_at   $}))
-  ((user->client :rasta) :get 200 (format "table/%d/query_metadata?include_sensitive_fields=true" (id :users))))
+  ((user->client :rasta) :get 200 (format "table/%d/query_metadata?include_sensitive_fields=true" (data/id :users))))
 
 ;;; GET api/table/:id/query_metadata
 ;;; Make sure that getting the User table does *not* include password info
 (expect
-  (merge (table-defaults)
-         (match-$ (Table (id :users))
+  (merge (query-metadata-defaults)
+         (match-$ (Table (data/id :users))
            {:schema       "PUBLIC"
             :name         "USERS"
             :display_name "Users"
-            :fields       (let [defaults (assoc field-defaults :table_id (id :users))]
-                            [(merge defaults (match-$ (Field (id :users :id))
-                                               {:special_type       "type/PK"
-                                                :name               "ID"
-                                                :display_name       "ID"
-                                                :updated_at         $
-                                                :id                 $
-                                                :created_at         $
-                                                :base_type          "type/BigInteger"
-                                                :fk_target_field_id $
-                                                :raw_column_id      $
-                                                :last_analyzed      $}))
-                             (merge defaults (match-$ (Field (id :users :last_login))
-                                               {:special_type       nil
-                                                :name               "LAST_LOGIN"
-                                                :display_name       "Last Login"
-                                                :updated_at         $
-                                                :id                 $
-                                                :created_at         $
-                                                :base_type          "type/DateTime"
-                                                :fk_target_field_id $
-                                                :raw_column_id      $
-                                                :last_analyzed      $}))
-                             (merge defaults (match-$ (Field (id :users :name))
-                                               {:special_type       "type/Name"
-                                                :name               "NAME"
-                                                :display_name       "Name"
-                                                :updated_at         $
-                                                :id                 $
-                                                :created_at         $
-                                                :base_type          "type/Text"
-                                                :fk_target_field_id $
-                                                :raw_column_id      $
-                                                :last_analyzed      $}))])
+            :fields       [(assoc (field-details (Field (data/id :users :id)))
+                             :table_id     (data/id :users)
+                             :special_type "type/PK"
+                             :name         "ID"
+                             :display_name "ID"
+                             :base_type    "type/BigInteger")
+                           (assoc (field-details (Field (data/id :users :last_login)))
+                             :table_id                 (data/id :users)
+                             :name                     "LAST_LOGIN"
+                             :display_name             "Last Login"
+                             :base_type                "type/DateTime"
+                             :dimension_options        (var-get datetime-dimension-indexes)
+                             :default_dimension_option (var-get date-default-index))
+                           (assoc (field-details (Field (data/id :users :name)))
+                             :table_id     (data/id :users)
+                             :special_type "type/Name"
+                             :name         "NAME"
+                             :display_name "Name"
+                             :base_type    "type/Text"
+                             :values       [["Broen Olujimi"]
+                                            ["Conchúr Tihomir"]
+                                            ["Dwight Gresham"]
+                                            ["Felipinho Asklepios"]
+                                            ["Frans Hevel"]
+                                            ["Kaneonuskatew Eiran"]
+                                            ["Kfir Caj"]
+                                            ["Nils Gotam"]
+                                            ["Plato Yeshua"]
+                                            ["Quentin Sören"]
+                                            ["Rüstem Hebel"]
+                                            ["Shad Ferdynand"]
+                                            ["Simcha Yan"]
+                                            ["Spiros Teofil"]
+                                            ["Szymon Theutrich"]])]
             :rows         15
             :updated_at   $
-            :id           (id :users)
+            :id           (data/id :users)
             :raw_table_id $
-            :field_values {(keyword (str (id :users :name)))
-                           ["Broen Olujimi"
-                            "Conchúr Tihomir"
-                            "Dwight Gresham"
-                            "Felipinho Asklepios"
-                            "Frans Hevel"
-                            "Kaneonuskatew Eiran"
-                            "Kfir Caj"
-                            "Nils Gotam"
-                            "Plato Yeshua"
-                            "Quentin Sören"
-                            "Rüstem Hebel"
-                            "Shad Ferdynand"
-                            "Simcha Yan"
-                            "Spiros Teofil"
-                            "Szymon Theutrich"]}
             :created_at   $}))
-  ((user->client :rasta) :get 200 (format "table/%d/query_metadata" (id :users))))
+  ((user->client :rasta) :get 200 (format "table/%d/query_metadata" (data/id :users))))
 
 ;; Check that FK fields belonging to Tables we don't have permissions for don't come back as hydrated `:target`(#3867)
 (expect
@@ -379,11 +351,11 @@
 
 (tt/expect-with-temp [Table [table {:rows 15}]]
   2
-  (let [original-sync-table! sync-database/sync-table!
+  (let [original-sync-table! sync/sync-table!
         called (atom 0)
         test-fun (fn [state]
-                   (with-redefs [sync-database/sync-table! (fn [& args] (swap! called inc)
-                                                             (apply original-sync-table! args))]
+                   (with-redefs [sync/sync-table! (fn [& args] (swap! called inc)
+                                                    (apply original-sync-table! args))]
                      ((user->client :crowberto) :put 200 (format "table/%d" (:id table)) {:display_name    "Userz"
                                                                                           :entity_type     "person"
                                                                                           :visibility_type state
@@ -397,67 +369,50 @@
         (test-fun "technical")
         @called)))
 
-
 ;; ## GET /api/table/:id/fks
 ;; We expect a single FK from CHECKINS.USER_ID -> USERS.ID
 (expect
-  (let [checkins-user-field (Field (id :checkins :user_id))
-        users-id-field      (Field (id :users :id))]
+  (let [checkins-user-field (Field (data/id :checkins :user_id))
+        users-id-field      (Field (data/id :users :id))
+        fk-field-defaults   (dissoc field-defaults :target :dimension_options :default_dimension_option)]
     [{:origin_id      (:id checkins-user-field)
       :destination_id (:id users-id-field)
       :relationship   "Mt1"
-      :origin         (merge (dissoc field-defaults :target)
-                             (match-$ checkins-user-field
-                               {:id                 $
-                                :table_id           $
-                                :raw_column_id      $
-                                :name               "USER_ID"
-                                :display_name       "User ID"
-                                :base_type          "type/Integer"
-                                :preview_display    $
-                                :position           $
-                                :special_type       "type/FK"
-                                :fk_target_field_id $
-                                :created_at         $
-                                :updated_at         $
-                                :last_analyzed      $
-                                :table              (merge (dissoc (table-defaults) :segments :field_values :metrics)
-                                                           (match-$ (Table (id :checkins))
-                                                             {:schema       "PUBLIC"
-                                                              :name         "CHECKINS"
-                                                              :display_name "Checkins"
-                                                              :rows         1000
-                                                              :updated_at   $
-                                                              :id           $
-                                                              :raw_table_id $
-                                                              :created_at   $}))}))
-      :destination    (merge (dissoc field-defaults :target)
-                             (match-$ users-id-field
-                               {:id                 $
-                                :table_id           $
-                                :raw_column_id      $
-                                :name               "ID"
-                                :display_name       "ID"
-                                :base_type          "type/BigInteger"
-                                :preview_display    $
-                                :position           $
-                                :special_type       "type/PK"
-                                :fk_target_field_id $
-                                :created_at         $
-                                :updated_at         $
-                                :last_analyzed      $
-                                :table              (merge (dissoc (table-defaults) :db :segments :field_values :metrics)
-                                                           (match-$ (Table (id :users))
-                                                             {:schema       "PUBLIC"
-                                                              :name         "USERS"
-                                                              :display_name "Users"
-                                                              :rows         15
-                                                              :updated_at   $
-                                                              :id           $
-                                                              :raw_table_id $
-                                                              :created_at   $}))}))}])
-  ((user->client :rasta) :get 200 (format "table/%d/fks" (id :users))))
-
+      :origin         (-> (fk-field-details checkins-user-field)
+                          (dissoc :target :dimensions :values)
+                          (assoc :table_id     (data/id :checkins)
+                                 :name         "USER_ID"
+                                 :display_name "User ID"
+                                 :base_type    "type/Integer"
+                                 :special_type "type/FK"
+                                 :table        (merge (dissoc (table-defaults) :segments :field_values :metrics)
+                                                      (match-$ (Table (data/id :checkins))
+                                                        {:schema       "PUBLIC"
+                                                         :name         "CHECKINS"
+                                                         :display_name "Checkins"
+                                                         :rows         1000
+                                                         :updated_at   $
+                                                         :id           $
+                                                         :raw_table_id $
+                                                         :created_at   $}))))
+      :destination    (-> (fk-field-details users-id-field)
+                          (dissoc :target :dimensions :values)
+                          (assoc :table_id     (data/id :users)
+                                 :name         "ID"
+                                 :display_name "ID"
+                                 :base_type    "type/BigInteger"
+                                 :special_type "type/PK"
+                                 :table        (merge (dissoc (table-defaults) :db :segments :field_values :metrics)
+                                                      (match-$ (Table (data/id :users))
+                                                        {:schema       "PUBLIC"
+                                                         :name         "USERS"
+                                                         :display_name "Users"
+                                                         :rows         15
+                                                         :updated_at   $
+                                                         :id           $
+                                                         :raw_table_id $
+                                                         :created_at   $}))))}])
+  ((user->client :rasta) :get 200 (format "table/%d/fks" (data/id :users))))
 
 ;; Make sure metadata for 'virtual' tables comes back as expected from GET /api/table/:id/query_metadata
 (tt/expect-with-temp [Card [card {:name          "Go Dubs!"
@@ -467,7 +422,7 @@
                                                   :native   {:query (format "SELECT NAME, ID, PRICE, LATITUDE FROM VENUES")}}}]]
   (let [card-virtual-table-id (str "card__" (u/get-id card))]
     {:display_name "Go Dubs!"
-     :schema       "All questions"
+     :schema       "Everything else"
      :db_id        database/virtual-id
      :id           card-virtual-table-id
      :description  nil
@@ -492,3 +447,148 @@
 (expect
   []
   ((user->client :crowberto) :get 200 "table/card__1000/fks"))
+
+(defn- narrow-fields [category-names api-response]
+  (for [field (:fields api-response)
+        :when (contains? (set category-names) (:name field))]
+    (-> field
+        (select-keys [:id :table_id :name :values :dimensions])
+        (update :dimensions (fn [dim]
+                              (if (map? dim)
+                                (dissoc dim :id :created_at :updated_at)
+                                dim))))))
+
+(defn- category-id-special-type
+  "Field values will only be returned when the field's special type is
+  set to type/Category. This function will change that for
+  category_id, then invoke `F` and roll it back afterwards"
+  [special-type f]
+  (let [original-special-type (:special_type (Field (data/id :venues :category_id)))]
+    (try
+      (db/update! Field (data/id :venues :category_id) {:special_type special-type})
+      (f)
+      (finally
+        (db/update! Field (data/id :venues :category_id) {:special_type original-special-type})))))
+
+;; ## GET /api/table/:id/query_metadata
+;; Ensure internal remapped dimensions and human_readable_values are returned
+(expect
+  [{:table_id   (data/id :venues)
+    :id         (data/id :venues :category_id)
+    :name       "CATEGORY_ID"
+    :values     (map-indexed (fn [idx [category]] [idx category]) data/venue-categories)
+    :dimensions {:name "Foo", :field_id (data/id :venues :category_id), :human_readable_field_id nil, :type "internal"}}
+   {:id         (data/id :venues :price)
+    :table_id   (data/id :venues)
+    :name       "PRICE"
+    :values     [[1] [2] [3] [4]]
+    :dimensions []}]
+  (data/with-data
+    (data/create-venue-category-remapping "Foo")
+    (category-id-special-type
+     :type/Category
+     (fn []
+       (narrow-fields ["PRICE" "CATEGORY_ID"]
+                      ((user->client :rasta) :get 200 (format "table/%d/query_metadata" (data/id :venues))))))))
+
+;; ## GET /api/table/:id/query_metadata
+;; Ensure internal remapped dimensions and human_readable_values are returned when type is enum
+(expect
+  [{:table_id   (data/id :venues)
+    :id         (data/id :venues :category_id)
+    :name       "CATEGORY_ID"
+    :values     (map-indexed (fn [idx [category]] [idx category]) data/venue-categories)
+    :dimensions {:name "Foo", :field_id (data/id :venues :category_id), :human_readable_field_id nil, :type "internal"}}
+   {:id         (data/id :venues :price)
+    :table_id   (data/id :venues)
+    :name       "PRICE"
+    :values     [[1] [2] [3] [4]]
+    :dimensions []}]
+  (data/with-data
+    (data/create-venue-category-remapping "Foo")
+    (category-id-special-type
+     :type/Enum
+     (fn []
+       (narrow-fields ["PRICE" "CATEGORY_ID"]
+                      ((user->client :rasta) :get 200 (format "table/%d/query_metadata" (data/id :venues))))))))
+
+;; ## GET /api/table/:id/query_metadata
+;; Ensure FK remappings are returned
+(expect
+  [{:table_id   (data/id :venues)
+    :id         (data/id :venues :category_id)
+    :name       "CATEGORY_ID"
+    :values     []
+    :dimensions {:name "Foo", :field_id (data/id :venues :category_id), :human_readable_field_id (data/id :categories :name), :type "external"}}
+   {:id         (data/id :venues :price)
+    :table_id   (data/id :venues)
+    :name       "PRICE"
+    :values     [[1] [2] [3] [4]]
+    :dimensions []}]
+  (data/with-data
+    (data/create-venue-category-fk-remapping "Foo")
+    (category-id-special-type
+     :type/Category
+     (fn []
+       (narrow-fields ["PRICE" "CATEGORY_ID"]
+                      ((user->client :rasta) :get 200 (format "table/%d/query_metadata" (data/id :venues))))))))
+
+;; Ensure dimensions options are sorted numerically, but returned as strings
+(expect
+  (map str (sort (map #(Long/parseLong %) (var-get datetime-dimension-indexes))))
+  (var-get datetime-dimension-indexes))
+
+(expect
+  (map str (sort (map #(Long/parseLong %) (var-get numeric-dimension-indexes))))
+  (var-get numeric-dimension-indexes))
+
+;; Numeric fields without min/max values should not have binning strategies
+(expect
+  []
+  (let [lat-field-id (data/id :venues :latitude)
+        fingerprint  (:fingerprint (Field lat-field-id))]
+    (try
+      (db/update! Field (data/id :venues :latitude) :fingerprint (-> fingerprint
+                                                                     (assoc-in [:type :type/Number :max] nil)
+                                                                     (assoc-in [:type :type/Number :min] nil)))
+      (-> ((user->client :rasta) :get 200 (format "table/%d/query_metadata" (data/id :categories)))
+          (get-in [:fields])
+          first
+          :dimension_options)
+      (finally
+        (db/update! Field lat-field-id :fingerprint fingerprint)))))
+
+(defn- extract-dimension-options
+  "For the given `FIELD-NAME` find it's dimension_options following
+  the indexes given in the field"
+  [response field-name]
+  (set
+   (for [dim-index (->> response
+                        :fields
+                        (m/find-first #(= field-name (:name %)))
+                        :dimension_options)
+         :let [{[_ _ strategy _] :mbql} (get-in response [:dimension_options (keyword dim-index)])]]
+     strategy)))
+
+;; Lat/Long fields should use bin-width rather than num-bins
+(expect
+  (if (data/binning-supported?)
+    #{nil "bin-width" "default"}
+    #{})
+  (let [response ((user->client :rasta) :get 200 (format "table/%d/query_metadata" (data/id :venues)))]
+    (extract-dimension-options response "LATITUDE")))
+
+;; Number columns without a special type should use "num-bins"
+(expect
+  (if (data/binning-supported?)
+    #{nil "num-bins" "default"}
+    #{})
+  (let [{:keys [special_type]} (Field (data/id :venues :price))]
+    (try
+      (db/update! Field (data/id :venues :price) :special_type nil)
+
+      (let [response ((user->client :rasta) :get 200 (format "table/%d/query_metadata" (data/id :venues)))]
+        (extract-dimension-options response "PRICE"))
+
+      (finally
+        (db/update! Field (data/id :venues :price) :special_type special_type)))))
diff --git a/test/metabase/api/tiles_test.clj b/test/metabase/api/tiles_test.clj
index f9a3756720b12428f67ce5b9f07b1f6bcbb3cc58..059d24d22f7712c375465a60d2b1903c25ec3c98 100644
--- a/test/metabase/api/tiles_test.clj
+++ b/test/metabase/api/tiles_test.clj
@@ -2,7 +2,7 @@
   "Tests for `/api/tiles` endpoints."
   (:require [cheshire.core :as json]
             [expectations :refer :all]
-            [metabase.query-processor.expand :as ql]
+            [metabase.query-processor.middleware.expand :as ql]
             [metabase.test.data :refer :all]
             [metabase.test.data.users :refer :all]))
 
diff --git a/test/metabase/driver/bigquery_test.clj b/test/metabase/driver/bigquery_test.clj
index 571c0160be03c443fffa9e179e0caef036f06b1f..85252d0c9c36c1fed7d033c6b499bbb59add842f 100644
--- a/test/metabase/driver/bigquery_test.clj
+++ b/test/metabase/driver/bigquery_test.clj
@@ -3,12 +3,15 @@
             [metabase
              [query-processor :as qp]
              [query-processor-test :as qptest]]
-            [metabase.query-processor.expand :as ql]
+            [metabase.query-processor.middleware.expand :as ql]
             [metabase.test
              [data :as data]
              [util :as tu]]
             [metabase.test.data.datasets :refer [expect-with-engine]]))
 
+(def ^:private col-defaults
+  {:remapped_to nil, :remapped_from nil})
+
 ;; Test native queries
 (expect-with-engine :bigquery
   [[100]
@@ -21,10 +24,12 @@
 
 ;; make sure that BigQuery native queries maintain the column ordering specified in the SQL -- post-processing ordering shouldn't apply (Issue #2821)
 (expect-with-engine :bigquery
-  {:cols    [{:name "venue_id",    :display_name "Venue ID",    :base_type :type/Integer}
-             {:name "user_id",     :display_name  "User ID"      :base_type :type/Integer}
-             {:name "checkins_id", :display_name "Checkins ID"  :base_type :type/Integer}],
-   :columns ["venue_id" "user_id" "checkins_id"]}
+  {:columns ["venue_id" "user_id" "checkins_id"],
+   :cols    (mapv #(merge col-defaults %)
+                  [{:name "venue_id",    :display_name "Venue ID",    :base_type :type/Integer}
+                   {:name "user_id",     :display_name  "User ID",    :base_type :type/Integer}
+                   {:name "checkins_id", :display_name "Checkins ID", :base_type :type/Integer}])}
+
   (select-keys (:data (qp/process-query {:native   {:query "SELECT [test_data.checkins.venue_id] AS [venue_id], [test_data.checkins.user_id] AS [user_id], [test_data.checkins.id] AS [checkins_id]
                                                             FROM [test_data.checkins]
                                                             LIMIT 2"}
diff --git a/test/metabase/driver/druid_test.clj b/test/metabase/driver/druid_test.clj
index 436ba38d1f8217607468d1eb6e4febb2bb518a8c..029fc169691b3c504396ad62813dbce60172058b 100644
--- a/test/metabase/driver/druid_test.clj
+++ b/test/metabase/driver/druid_test.clj
@@ -9,7 +9,7 @@
              [timeseries-query-processor-test :as timeseries-qp-test]
              [util :as u]]
             [metabase.models.metric :refer [Metric]]
-            [metabase.query-processor.expand :as ql]
+            [metabase.query-processor.middleware.expand :as ql]
             [metabase.test.data :as data]
             [metabase.test.data.datasets :as datasets :refer [expect-with-engine]]
             [toucan.util.test :as tt]))
@@ -35,6 +35,9 @@
                              :database (data/id)})
           (m/dissoc-in [:data :results_metadata])))))
 
+(def ^:private col-defaults
+  {:base_type :type/Text, :remapped_from nil, :remapped_to nil})
+
 ;; test druid native queries
 (expect-with-engine :druid
   {:row_count 2
@@ -42,12 +45,13 @@
    :data      {:columns     ["timestamp" "id" "user_name" "venue_price" "venue_name" "count"]
                :rows        [["2013-01-03T08:00:00.000Z" "931" "Simcha Yan" "1" "Kinaree Thai Bistro"       1]
                              ["2013-01-10T08:00:00.000Z" "285" "Kfir Caj"   "2" "Ruen Pair Thai Restaurant" 1]]
-               :cols        [{:name "timestamp",   :display_name "Timestamp",   :base_type :type/Text}
-                             {:name "id",          :display_name "ID",          :base_type :type/Text}
-                             {:name "user_name",   :display_name "User Name",   :base_type :type/Text}
-                             {:name "venue_price", :display_name "Venue Price", :base_type :type/Text}
-                             {:name "venue_name",  :display_name "Venue Name",  :base_type :type/Text}
-                             {:name "count",       :display_name "Count",       :base_type :type/Integer}]
+               :cols        (mapv #(merge col-defaults %)
+                                  [{:name "timestamp",   :display_name "Timestamp"}
+                                   {:name "id",          :display_name "ID"}
+                                   {:name "user_name",   :display_name "User Name"}
+                                   {:name "venue_price", :display_name "Venue Price"}
+                                   {:name "venue_name",  :display_name "Venue Name"}
+                                   {:name "count",       :display_name "Count", :base_type :type/Integer}])
                :native_form {:query native-query-1}}}
   (process-native-query native-query-1))
 
diff --git a/test/metabase/driver/generic_sql/native_test.clj b/test/metabase/driver/generic_sql/native_test.clj
index f466b820b2fd3cdc8544ecd2f306c47b607a9d7c..f91553a304fc1e651397778140147d27d6ae393c 100644
--- a/test/metabase/driver/generic_sql/native_test.clj
+++ b/test/metabase/driver/generic_sql/native_test.clj
@@ -6,6 +6,9 @@
             [metabase.test.data :refer :all]
             [toucan.db :as db]))
 
+(def ^:private col-defaults
+  {:remapped_from nil, :remapped_to nil})
+
 ;; Just check that a basic query works
 (expect
   {:status    :completed
@@ -13,7 +16,7 @@
    :data      {:rows        [[100]
                              [99]]
                :columns     ["ID"]
-               :cols        [{:name "ID", :display_name "ID", :base_type :type/Integer}]
+               :cols        [(merge col-defaults {:name "ID", :display_name "ID", :base_type :type/Integer})]
                :native_form {:query "SELECT ID FROM VENUES ORDER BY ID DESC LIMIT 2;"}}}
   (-> (qp/process-query {:native   {:query "SELECT ID FROM VENUES ORDER BY ID DESC LIMIT 2;"}
                          :type     :native
@@ -27,9 +30,10 @@
    :data      {:rows        [[100 "Mohawk Bend" 46]
                              [99 "Golden Road Brewing" 10]]
                :columns     ["ID" "NAME" "CATEGORY_ID"]
-               :cols        [{:name "ID",          :display_name "ID",          :base_type :type/Integer}
-                             {:name "NAME",        :display_name "Name",        :base_type :type/Text}
-                             {:name "CATEGORY_ID", :display_name "Category ID", :base_type :type/Integer}]
+               :cols        (mapv #(merge col-defaults %)
+                                  [{:name "ID",          :display_name "ID",          :base_type :type/Integer}
+                                   {:name "NAME",        :display_name "Name",        :base_type :type/Text}
+                                   {:name "CATEGORY_ID", :display_name "Category ID", :base_type :type/Integer}])
                :native_form {:query "SELECT ID, NAME, CATEGORY_ID FROM VENUES ORDER BY ID DESC LIMIT 2;"}}}
   (-> (qp/process-query {:native   {:query "SELECT ID, NAME, CATEGORY_ID FROM VENUES ORDER BY ID DESC LIMIT 2;"}
                          :type     :native
diff --git a/test/metabase/driver/generic_sql_test.clj b/test/metabase/driver/generic_sql_test.clj
index b78b80cc98f1e3c4dafc2cc35e4503bd776d0af7..75c4ed7337106e6a30af5abebd108d40a9eb4b84 100644
--- a/test/metabase/driver/generic_sql_test.clj
+++ b/test/metabase/driver/generic_sql_test.clj
@@ -1,13 +1,11 @@
 (ns metabase.driver.generic-sql-test
   (:require [expectations :refer :all]
             [metabase.driver :as driver]
-            [metabase.driver.generic-sql :refer :all]
+            [metabase.driver.generic-sql :as sql :refer :all]
             [metabase.models
              [field :refer [Field]]
              [table :as table :refer [Table]]]
-            [metabase.test
-             [data :refer :all]
-             [util :refer [resolve-private-vars]]]
+            [metabase.test.data :refer :all]
             [metabase.test.data.datasets :as datasets]
             [toucan.db :as db])
   (:import metabase.driver.h2.H2Driver))
@@ -66,29 +64,6 @@
      :dest-column-name "ID"}}
   (driver/describe-table-fks (H2Driver.) (db) @venues-table))
 
-
-;; ANALYZE-TABLE
-
-(expect
-  {:row_count 100,
-   :fields    [{:id (id :venues :category_id)}
-               {:id (id :venues :id)}
-               {:id (id :venues :latitude)}
-               {:id (id :venues :longitude)}
-               {:id (id :venues :name), :values (db/select-one-field :values 'FieldValues, :field_id (id :venues :name))}
-               {:id (id :venues :price), :values [1 2 3 4]}]}
-  (driver/analyze-table (H2Driver.) @venues-table (set (mapv :id (table/fields @venues-table)))))
-
-(resolve-private-vars metabase.driver.generic-sql field-avg-length field-values-lazy-seq table-rows-seq)
-
-;;; FIELD-AVG-LENGTH
-(datasets/expect-with-engines @generic-sql-engines
-  ;; Not sure why some databases give different values for this but they're close enough that I'll allow them
-  (if (contains? #{:redshift :sqlserver} datasets/*engine*)
-    15
-    16)
-  (field-avg-length datasets/*driver* (db/select-one 'Field :id (id :venues :name))))
-
 ;;; FIELD-VALUES-LAZY-SEQ
 (datasets/expect-with-engines @generic-sql-engines
   ["Red Medicine"
@@ -96,7 +71,7 @@
    "The Apple Pan"
    "Wurstküche"
    "Brite Spot Family Restaurant"]
-  (take 5 (field-values-lazy-seq datasets/*driver* (db/select-one 'Field :id (id :venues :name)))))
+  (take 5 (#'sql/field-values-lazy-seq datasets/*driver* (db/select-one 'Field :id (id :venues :name)))))
 
 
 ;;; TABLE-ROWS-SEQ
@@ -106,23 +81,15 @@
    {:name "The Apple Pan",                :price 2, :category_id 11, :id 3}
    {:name "Wurstküche",                   :price 2, :category_id 29, :id 4}
    {:name "Brite Spot Family Restaurant", :price 2, :category_id 20, :id 5}]
-  (for [row (take 5 (sort-by :id (table-rows-seq datasets/*driver*
-                                                 (db/select-one 'Database :id (id))
-                                                 (db/select-one 'RawTable :id (db/select-one-field :raw_table_id 'Table, :id (id :venues))))))]
+  (for [row (take 5 (sort-by :id (#'sql/table-rows-seq datasets/*driver*
+                                   (db/select-one 'Database :id (id))
+                                   (db/select-one 'Table :id (id :venues)))))]
     ;; different DBs use different precisions for these
     (-> (dissoc row :latitude :longitude)
         (update :price int)
         (update :category_id int)
         (update :id int))))
 
-;;; FIELD-PERCENT-URLS
-(datasets/expect-with-engines @generic-sql-engines
-  (if (= datasets/*engine* :oracle)
-    ;; Oracle considers empty strings to be NULL strings; thus in this particular test `percent-valid-urls` gives us 4/7 valid valid where other DBs give us 4/8
-    0.5714285714285714
-    0.5)
-  (dataset half-valid-urls
-    (field-percent-urls datasets/*driver* (db/select-one 'Field :id (id :urls :url)))))
 
 ;;; Make sure invalid ssh credentials are detected if a direct connection is possible
 (expect
diff --git a/test/metabase/driver/mongo_test.clj b/test/metabase/driver/mongo_test.clj
index edbce9b4d472c202a26e1a9f123b477a35f8e6b1..132c3384f4ac7b8aecdb1ee3632ee0791ee26c35 100644
--- a/test/metabase/driver/mongo_test.clj
+++ b/test/metabase/driver/mongo_test.clj
@@ -10,7 +10,7 @@
              [field :refer [Field]]
              [field-values :refer [FieldValues]]
              [table :as table :refer [Table]]]
-            [metabase.query-processor.expand :as ql]
+            [metabase.query-processor.middleware.expand :as ql]
             [metabase.test
              [data :as data]
              [util :as tu]]
@@ -78,7 +78,8 @@
    :row_count 1
    :data      {:rows        [[1]]
                :columns     ["count"]
-               :cols        [{:name "count", :display_name "Count", :base_type :type/Integer}]
+               :cols        [{:name "count", :display_name "Count", :base_type :type/Integer
+                              :remapped_to nil, :remapped_from nil}]
                :native_form {:collection "venues"
                              :query      native-query}}}
   (-> (qp/process-query {:native   {:query      native-query
@@ -116,15 +117,6 @@
               :pk? true}}}
   (driver/describe-table (MongoDriver.) (data/db) (Table (data/id :venues))))
 
-;; ANALYZE-TABLE
-(datasets/expect-with-engine :mongo
-  {:row_count 100
-   :fields    [{:id (data/id :venues :category_id) :values [2 3 4 5 6 7 10 11 12 13 14 15 18 19 20 29 40 43 44 46 48 49 50 58 64 67 71 74]}
-               {:id (data/id :venues :name),       :values (db/select-one-field :values FieldValues, :field_id (data/id :venues :name))}
-               {:id (data/id :venues :price),      :values [1 2 3 4]}]}
-  (let [venues-table (Table (data/id :venues))]
-    (driver/analyze-table (MongoDriver.) venues-table (set (mapv :id (table/fields venues-table))))))
-
 ;; ## Big-picture tests for the way data should look post-sync
 
 ;; Test that Tables got synced correctly, and row counts are correct
diff --git a/test/metabase/driver/mysql_test.clj b/test/metabase/driver/mysql_test.clj
index 8758296e5643a400e00546f22fc8e5ce15c4e7bc..688eb6eeb1964bf0ed0166221af46bcb44292161 100644
--- a/test/metabase/driver/mysql_test.clj
+++ b/test/metabase/driver/mysql_test.clj
@@ -1,7 +1,7 @@
 (ns metabase.driver.mysql-test
   (:require [expectations :refer :all]
             [metabase
-             [sync-database :as sync-db]
+             [sync :as sync]
              [util :as u]]
             [metabase.driver.generic-sql :as sql]
             [metabase.models.database :refer [Database]]
@@ -67,5 +67,5 @@
     (tt/with-temp Database [db {:engine "mysql"
                                 :details (assoc (:details db)
                                            :additional-options "tinyInt1isBit=false")}]
-      (sync-db/sync-database! db)
+      (sync/sync-database! db)
       (db->fields db))))
diff --git a/test/metabase/driver/postgres_test.clj b/test/metabase/driver/postgres_test.clj
index ffd9889adbe6b663b560aa3f8de1ca30ed6a65eb..7a9c434707cd1823661d17a94aacd0e9e88e7d1e 100644
--- a/test/metabase/driver/postgres_test.clj
+++ b/test/metabase/driver/postgres_test.clj
@@ -5,7 +5,7 @@
             [metabase
              [driver :as driver]
              [query-processor-test :refer [rows]]
-             [sync-database :as sync-db]
+             [sync :as sync]
              [util :as u]]
             [metabase.driver
              [generic-sql :as sql]
@@ -14,7 +14,7 @@
              [database :refer [Database]]
              [field :refer [Field]]
              [table :refer [Table]]]
-            [metabase.query-processor.expand :as ql]
+            [metabase.query-processor.middleware.expand :as ql]
             [metabase.test
              [data :as data]
              [util :as tu]]
@@ -221,7 +221,7 @@
     (drop-if-exists-and-create-db! "dropped_views_test")
     ;; create the DB object
     (tt/with-temp Database [database {:engine :postgres, :details (assoc details :dbname "dropped_views_test")}]
-      (let [sync! #(sync-db/sync-database! database, :full-sync? true)]
+      (let [sync! #(sync/sync-database! database {:full-sync? true})]
         ;; populate the DB and create a view
         (exec! ["CREATE table birds (name VARCHAR UNIQUE NOT NULL);"
                 "INSERT INTO birds (name) VALUES ('Rasta'), ('Lucky'), ('Kanye Nest');"
diff --git a/test/metabase/driver/presto_test.clj b/test/metabase/driver/presto_test.clj
index 2b88139e253263261824350361b3fff086253c16..476d70ab0adc50e7f0a8db93ddfa596dce941724 100644
--- a/test/metabase/driver/presto_test.clj
+++ b/test/metabase/driver/presto_test.clj
@@ -96,18 +96,6 @@
               :base-type :type/Integer}}}
   (driver/describe-table (PrestoDriver.) (data/db) (db/select-one 'Table :id (data/id :venues))))
 
-;;; ANALYZE-TABLE
-(datasets/expect-with-engine :presto
-  {:row_count 100
-   :fields    [{:id (data/id :venues :category_id), :values [2 3 4 5 6 7 10 11 12 13 14 15 18 19 20 29 40 43 44 46 48 49 50 58 64 67 71 74]}
-               {:id (data/id :venues :id)}
-               {:id (data/id :venues :latitude)}
-               {:id (data/id :venues :longitude)}
-               {:id (data/id :venues :name), :values (db/select-one-field :values 'FieldValues, :field_id (data/id :venues :name))}
-               {:id (data/id :venues :price), :values [1 2 3 4]}]}
-  (let [venues-table (db/select-one 'Table :id (data/id :venues))]
-    (driver/analyze-table (PrestoDriver.) venues-table (set (mapv :id (table/fields venues-table))))))
-
 ;;; FIELD-VALUES-LAZY-SEQ
 (datasets/expect-with-engine :presto
   ["Red Medicine"
@@ -126,18 +114,12 @@
    {:name "Brite Spot Family Restaurant", :price 2, :category_id 20, :id 5}]
   (for [row (take 5 (sort-by :id (driver/table-rows-seq (PrestoDriver.)
                                                         (db/select-one 'Database :id (data/id))
-                                                        (db/select-one 'RawTable :id (db/select-one-field :raw_table_id 'Table, :id (data/id :venues))))))]
+                                                        (db/select-one 'Table :id (data/id :venues)))))]
     (-> (dissoc row :latitude :longitude)
         (update :price int)
         (update :category_id int)
         (update :id int))))
 
-;;; FIELD-PERCENT-URLS
-(datasets/expect-with-engine :presto
-  0.5
-  (data/dataset half-valid-urls
-    (sql/field-percent-urls (PrestoDriver.) (db/select-one 'Field :id (data/id :urls :url)))))
-
 ;;; APPLY-PAGE
 (expect
   {:select ["name" "id"]
diff --git a/test/metabase/fingerprinting_test.clj b/test/metabase/fingerprinting_test.clj
new file mode 100644
index 0000000000000000000000000000000000000000..98deffe4ebdc75889a41d3629e2caadbbcf94c57
--- /dev/null
+++ b/test/metabase/fingerprinting_test.clj
@@ -0,0 +1,154 @@
+(ns metabase.fingerprinting-test
+  (:require [clj-time.coerce :as t.coerce]
+            [clj-time.core :as t]
+            [expectations :refer :all]
+            [metabase.fingerprinting
+             [core :as f.core]
+             [costs :refer :all]
+             [fingerprinters :as f :refer :all]
+             [histogram :as h :refer :all]]
+            [redux.core :as redux]))
+
+(def ^:private numbers [0.1 0.4 0.2 nil 0.5 0.3 0.51 0.55 0.22])
+(def ^:private datetimes ["2015-06-01" nil "2015-06-11" "2015-01-01"
+                          "2016-06-31" "2017-09-01" "2016-04-15" "2017-11-02"])
+(def ^:private categories [:foo :baz :bar :bar nil :foo])
+
+(def ^:private hist (transduce identity histogram (take 100 (cycle numbers))))
+(def ^:private hist-c (transduce identity histogram-categorical
+                                 (take 100 (cycle categories))))
+
+(expect
+  [2
+   (/ 4)
+   nil
+   nil]
+  [(safe-divide 4 2)
+   (safe-divide 4)
+   (safe-divide 0)
+   (safe-divide 4 0)])
+
+(expect
+  [(/ 23 100)
+   0.5
+   -1.0
+   -5.0
+   1.2]
+  [(growth 123 100)
+   (growth -0.1 -0.2)
+   (growth -0.4 -0.2)
+   (growth -0.4 0.1)
+   (growth 0.1 -0.5)])
+
+(expect
+  [100.0
+   11]
+  [(total-count hist)
+   (nil-count hist)])
+
+(expect
+  [-0.0
+   true]
+  (let [all-ones (entropy (transduce identity histogram (repeat 10 1)))]
+    [all-ones
+     (> (entropy hist) (entropy hist-c) all-ones)]))
+
+(expect
+  [{:foo 2
+    :bar 10}
+   {}]
+  [(transduce identity (rollup (redux/pre-step + :y) :x)
+              [{:x :foo :y 1}
+               {:x :foo :y 1}
+               {:x :bar :y 5}
+               {:x :bar :y 3}
+               {:x :bar :y 2}])
+   (transduce identity (rollup (redux/pre-step + :y) :x) [])])
+
+(expect
+  [1
+   1
+   2
+   4]
+  [(#'f/quarter (t/date-time 2017 1))
+   (#'f/quarter (t/date-time 2017 3))
+   (#'f/quarter (t/date-time 2017 5))
+   (#'f/quarter (t/date-time 2017 12))])
+
+(expect
+  {:limit (var-get #'f.core/max-sample-size)}
+  (#'f.core/extract-query-opts {:max-cost {:query :sample}}))
+
+(defn- make-timestamp
+  [y m]
+  (-> (t/date-time y m)
+      ((var f/to-double))))
+
+(expect
+  [[(make-timestamp 2016 1) 12]
+   [(make-timestamp 2016 2) 0]
+   [(make-timestamp 2016 3) 4]
+   [(make-timestamp 2016 4) 0]
+   [(make-timestamp 2016 5) 0]
+   [(make-timestamp 2016 6) 0]
+   [(make-timestamp 2016 7) 0]
+   [(make-timestamp 2016 8) 0]
+   [(make-timestamp 2016 9) 0]
+   [(make-timestamp 2016 10) 0]
+   [(make-timestamp 2016 11) 0]
+   [(make-timestamp 2016 12) 0]
+   [(make-timestamp 2017 1) 25]]
+  (#'f/fill-timeseries (t/months 1) [[(make-timestamp 2016 1) 12]
+                                     [(make-timestamp 2016 3) 4]
+                                     [(make-timestamp 2017 1) 25]]))
+
+;; Also low-key tests if fingerprinters can survive nils.
+(expect
+  [(var-get #'f/Num)
+   (var-get #'f/DateTime)
+   (var-get #'f/Category)
+   (var-get #'f/Text)
+   [nil [:type/NeverBeforeSeen :type/*]]]
+  [(-> (#'f.core/fingerprint-field {} {:base_type :type/Number} numbers) :type)
+   (-> (#'f.core/fingerprint-field {} {:base_type :type/DateTime} datetimes)
+       :type)
+   (-> (#'f.core/fingerprint-field {} {:base_type :type/Text
+                                  :special_type :type/Category}
+                              categories)
+       :type)
+   (->> categories
+        (map str)
+        (#'f.core/fingerprint-field {} {:base_type :type/Text})
+        :type)
+   (-> (#'f.core/fingerprint-field {} {:base_type :type/NeverBeforeSeen} numbers)
+       :type)])
+
+(expect
+  [true
+   true
+   true
+   true
+   false
+   false
+   true
+   true
+   true
+   true
+   true
+   true
+   false
+   false]
+  [(-> {:computation :linear} linear-computation? boolean)
+   (-> {:computation :unbounded} unbounded-computation? boolean)
+   (-> {:computation :yolo} unbounded-computation? boolean)
+   (-> {:computation :yolo} yolo-computation? boolean)
+   (-> {:computation :unbounded} linear-computation? boolean)
+   (-> {:computation :unbounded} yolo-computation? boolean)
+   (-> {:query :cache} cache-only? boolean)
+   (-> {:query :sample} sample-only? boolean)
+   (-> {:query :full-scan} full-scan? boolean)
+   (-> {:query :joins} full-scan? boolean)
+   (-> {:query :joins} alow-joins? boolean)
+   (-> nil full-scan? boolean)
+   (-> nil alow-joins? boolean)
+   (-> {:query :sample} full-scan? boolean)])
diff --git a/test/metabase/models/card_test.clj b/test/metabase/models/card_test.clj
index 9dde412f80c1a58365eac44e3a000ba5b47441b3..40e9c4ecdab5517a228e01741be8e84d8287eb6d 100644
--- a/test/metabase/models/card_test.clj
+++ b/test/metabase/models/card_test.clj
@@ -8,7 +8,7 @@
              [database :as database]
              [interface :as mi]
              [permissions :as perms]]
-            [metabase.query-processor.expand :as ql]
+            [metabase.query-processor.middleware.expand :as ql]
             [metabase.test
              [data :as data]
              [util :as tu]]
diff --git a/test/metabase/models/field_test.clj b/test/metabase/models/field_test.clj
index 0288dc81ddc14b5b209deb85fe7e1cc827e0d82c..ba98f7218c68c440bb6594e6ca3ee6bbf067ef91 100644
--- a/test/metabase/models/field_test.clj
+++ b/test/metabase/models/field_test.clj
@@ -1,9 +1,7 @@
 (ns metabase.models.field-test
   (:require [expectations :refer :all]
-            [metabase.models
-             [field :refer :all]
-             [field-values :refer :all]]
-            [metabase.test.util :as tu]))
+            [metabase.models.field-values :refer :all]
+            [metabase.sync.analyze.classifiers.name :as name]))
 
 ;; field-should-have-field-values?
 
@@ -71,15 +69,9 @@
 
 
 ;;; infer-field-special-type
-
-(tu/resolve-private-vars metabase.models.field infer-field-special-type)
-
-(expect nil            (infer-field-special-type nil       nil))
-(expect nil            (infer-field-special-type "id"      nil))
-(expect nil            (infer-field-special-type nil       :type/Integer))
-(expect :type/PK       (infer-field-special-type "id"      :type/Integer))
+(expect :type/PK       (#'name/special-type-for-name-and-base-type "id"      :type/Integer))
 ;; other pattern matches based on type/regex (remember, base_type matters in matching!)
-(expect :type/Category (infer-field-special-type "rating"  :type/Integer))
-(expect nil            (infer-field-special-type "rating"  :type/Boolean))
-(expect :type/Country  (infer-field-special-type "country" :type/Text))
-(expect nil            (infer-field-special-type "country" :type/Integer))
+(expect :type/Category (#'name/special-type-for-name-and-base-type "rating"  :type/Integer))
+(expect nil            (#'name/special-type-for-name-and-base-type "rating"  :type/Boolean))
+(expect :type/Country  (#'name/special-type-for-name-and-base-type "country" :type/Text))
+(expect nil            (#'name/special-type-for-name-and-base-type "country" :type/Integer))
diff --git a/test/metabase/models/setting_test.clj b/test/metabase/models/setting_test.clj
index 87461daa2ef65e30ac8fc1a8a2252745bfd41c0d..734c406db05b0cdcb8c799109861b081d0b5d74d 100644
--- a/test/metabase/models/setting_test.clj
+++ b/test/metabase/models/setting_test.clj
@@ -125,47 +125,47 @@
 
 ;; user-facing-info w/ no db value, no env var value, no default value
 (expect
-  {:value nil, :default nil}
+  {:value nil, :is_env_setting false, :env_name "MB_TEST_SETTING_1", :default nil}
   (user-facing-info-with-db-and-env-var-values :test-setting-1 nil nil))
 
 ;; user-facing-info w/ no db value, no env var value, default value
 (expect
-  {:value nil, :default "[Default Value]"}
+  {:value nil, :is_env_setting false, :env_name "MB_TEST_SETTING_2", :default "[Default Value]"}
   (user-facing-info-with-db-and-env-var-values :test-setting-2 nil nil))
 
 ;; user-facing-info w/ no db value, env var value, no default value -- shouldn't leak env var value
 (expect
-  {:value nil, :default "Using $MB_TEST_SETTING_1"}
+  {:value nil, :is_env_setting true, :env_name "MB_TEST_SETTING_1", :default "Using $MB_TEST_SETTING_1"}
   (user-facing-info-with-db-and-env-var-values :test-setting-1 nil "TOUCANS"))
 
 ;; user-facing-info w/ no db value, env var value, default value
 (expect
-  {:value nil, :default "Using $MB_TEST_SETTING_2"}
+  {:value nil,  :is_env_setting true, :env_name "MB_TEST_SETTING_2", :default "Using $MB_TEST_SETTING_2"}
   (user-facing-info-with-db-and-env-var-values :test-setting-2 nil "TOUCANS"))
 
 ;; user-facing-info w/ db value, no env var value, no default value
 (expect
-  {:value "WOW", :default nil}
+  {:value "WOW", :is_env_setting false, :env_name "MB_TEST_SETTING_1", :default nil}
   (user-facing-info-with-db-and-env-var-values :test-setting-1 "WOW" nil))
 
 ;; user-facing-info w/ db value, no env var value, default value
 (expect
-  {:value "WOW", :default "[Default Value]"}
+  {:value "WOW", :is_env_setting false, :env_name "MB_TEST_SETTING_2", :default "[Default Value]"}
   (user-facing-info-with-db-and-env-var-values :test-setting-2 "WOW" nil))
 
 ;; user-facing-info w/ db value, env var value, no default value -- the DB value should take precedence over the env var
 (expect
-  {:value "WOW", :default "Using $MB_TEST_SETTING_1"}
+  {:value "WOW", :is_env_setting true, :env_name "MB_TEST_SETTING_1", :default "Using $MB_TEST_SETTING_1"}
   (user-facing-info-with-db-and-env-var-values :test-setting-1 "WOW" "ENV VAR"))
 
 ;; user-facing-info w/ db value, env var value, default value -- env var should take precedence over default value
 (expect
-  {:value "WOW", :default "Using $MB_TEST_SETTING_2"}
+  {:value "WOW", :is_env_setting true, :env_name "MB_TEST_SETTING_2", :default "Using $MB_TEST_SETTING_2"}
   (user-facing-info-with-db-and-env-var-values :test-setting-2 "WOW" "ENV VAR"))
 
 ;; all
 (expect
-  {:key :test-setting-2, :value "TOUCANS", :description "Test setting - this only shows up in dev (2)", :default "[Default Value]"}
+  {:key :test-setting-2, :value "TOUCANS", :description "Test setting - this only shows up in dev (2)",  :is_env_setting false, :env_name "MB_TEST_SETTING_2", :default "[Default Value]"}
   (do (set-settings! nil "TOUCANS")
       (some (fn [setting]
               (when (re-find #"^test-setting-2$" (name (:key setting)))
@@ -174,8 +174,8 @@
 
 ;; all
 (expect
-  [{:key :test-setting-1, :value nil,  :description "Test setting - this only shows up in dev (1)", :default "Using $MB_TEST_SETTING_1"}
-   {:key :test-setting-2, :value "S2", :description "Test setting - this only shows up in dev (2)", :default "[Default Value]"}]
+  [{:key :test-setting-1, :value nil, :is_env_setting true, :env_name "MB_TEST_SETTING_1", :description "Test setting - this only shows up in dev (1)", :default "Using $MB_TEST_SETTING_1"}
+   {:key :test-setting-2, :value "S2", :is_env_setting false, :env_name "MB_TEST_SETTING_2",  :description "Test setting - this only shows up in dev (2)", :default "[Default Value]"}]
   (do (set-settings! nil "S2")
       (for [setting (setting/all)
             :when   (re-find #"^test-setting-\d$" (name (:key setting)))]
@@ -185,12 +185,12 @@
 ;;; ------------------------------------------------------------ BOOLEAN SETTINGS ------------------------------------------------------------
 
 (expect
-  {:value nil, :default nil}
+  {:value nil, :is_env_setting false, :env_name "MB_TEST_BOOLEAN_SETTING", :default nil}
   (user-facing-info-with-db-and-env-var-values :test-boolean-setting nil nil))
 
 ;; boolean settings shouldn't be obfuscated when set by env var
 (expect
-  {:value true, :default "Using $MB_TEST_BOOLEAN_SETTING"}
+  {:value true, :is_env_setting true, :env_name "MB_TEST_BOOLEAN_SETTING", :default "Using $MB_TEST_BOOLEAN_SETTING"}
   (user-facing-info-with-db-and-env-var-values :test-boolean-setting nil "true"))
 
 ;; env var values should be case-insensitive
diff --git a/test/metabase/permissions_test.clj b/test/metabase/permissions_test.clj
index 5171091191b20c4c4f8b67275217dc3a378c1c1d..dfab5d9df3196e4eadaee57cbdfa7f3892654ea8 100644
--- a/test/metabase/permissions_test.clj
+++ b/test/metabase/permissions_test.clj
@@ -18,7 +18,7 @@
              [pulse-channel-recipient :refer [PulseChannelRecipient]]
              [segment :refer [Segment]]
              [table :refer [Table]]]
-            [metabase.query-processor.expand :as ql]
+            [metabase.query-processor.middleware.expand :as ql]
             [metabase.test.data :as data]
             [metabase.test.data.users :as test-users]
             [metabase.util :as u]
diff --git a/test/metabase/query_processor/expand_resolve_test.clj b/test/metabase/query_processor/expand_resolve_test.clj
index cb381f32c1628d74451e7952b93522aee0c199ef..4cbdb6cbd83f1c31de4efe9079bef256ac7f5933 100644
--- a/test/metabase/query_processor/expand_resolve_test.clj
+++ b/test/metabase/query_processor/expand_resolve_test.clj
@@ -1,12 +1,16 @@
 (ns metabase.query-processor.expand-resolve-test
   "Tests query expansion/resolution"
   (:require [expectations :refer :all]
-            (metabase.query-processor [expand :as ql]
-                                      [resolve :as resolve])
-            [metabase.test.data :refer :all]
+            [metabase.query-processor.middleware
+             [expand :as ql]
+             [resolve :as resolve]
+             [source-table :as st]]
+            [metabase.test
+             [data :refer :all]
+             [util :as tu]]
+            [metabase.test.data.dataset-definitions :as defs]
             [metabase.util :as u]))
 
-
 ;; this is here because expectations has issues comparing and object w/ a map and most of the output
 ;; below has objects for the various place holders in the expanded/resolved query
 (defn- obj->map [o]
@@ -19,6 +23,44 @@
                                {k (obj->map v)}))
     :else           o))
 
+(def ^:private resolve'
+  "Testing the resolve middleware requires that the source table be
+  resolved before calling the resolve function. In the query pipeline
+  this is two separate steps. This function combines the function for
+  resolving the source table and the middleware that resolves the rest
+  of the expanded query into a single function to make tests more
+  concise."
+  (comp resolve/resolve (st/resolve-source-table-middleware identity)))
+
+(def ^:private field-ph-defaults
+  {:fk-field-id        nil
+   :datetime-unit      nil
+   :remapped-from      nil
+   :remapped-to        nil
+   :field-display-name nil
+   :binning-strategy   nil
+   :binning-param      nil})
+
+(def ^:private field-defaults
+  {:fk-field-id     nil
+   :visibility-type :normal
+   :position        nil
+   :description     nil
+   :parent-id       nil
+   :parent          nil
+   :schema-name     nil
+   :remapped-from   nil
+   :remapped-to     nil
+   :dimensions      []
+   :values          []})
+
+(def ^:private price-field-values
+  {:field-value-id true
+   :created-at true
+   :updated-at true
+   :values [1 2 3 4]
+   :human-readable-values {}
+   :field-id true})
 
 ;; basic rows query w/ filter
 (expect
@@ -27,57 +69,59 @@
     :type     :query
     :query    {:source-table (id :venues)
                :filter       {:filter-type :>
-                              :field       {:field-id      (id :venues :price)
-                                            :fk-field-id   nil
-                                            :datetime-unit nil}
-                              :value       {:field-placeholder {:field-id      (id :venues :price)
-                                                                :fk-field-id   nil
-                                                                :datetime-unit nil}
+                              :field       (merge field-ph-defaults
+                                                  {:field-id true})
+                              :value       {:field-placeholder (merge field-ph-defaults
+                                                                      {:field-id true})
                                             :value             1}}}}
    ;; resolved form
-   {:database     (id)
-    :type         :query
-    :query        {:source-table {:schema "PUBLIC"
-                                  :name   "VENUES"
-                                  :id     (id :venues)}
-                   :filter       {:filter-type :>
-                                  :field       {:field-id           (id :venues :price)
-                                                :fk-field-id        nil
-                                                :field-name         "PRICE"
-                                                :field-display-name "Price"
-                                                :base-type          :type/Integer
-                                                :special-type       :type/Category
-                                                :visibility-type    :normal
-                                                :table-id           (id :venues)
-                                                :schema-name        "PUBLIC"
-                                                :table-name         "VENUES"
-                                                :position           nil
-                                                :description        nil
-                                                :parent-id          nil
-                                                :parent             nil}
-                                  :value       {:value 1
-                                                :field {:field-id           (id :venues :price)
-                                                        :fk-field-id        nil
-                                                        :field-name         "PRICE"
-                                                        :field-display-name "Price"
-                                                        :base-type          :type/Integer
-                                                        :special-type       :type/Category
-                                                        :visibility-type    :normal
-                                                        :table-id           (id :venues)
-                                                        :schema-name        "PUBLIC"
-                                                        :table-name         "VENUES"
-                                                        :position           nil
-                                                        :description        nil
-                                                        :parent-id          nil
-                                                        :parent             nil}}}
-                   :join-tables  nil}
+   {:database (id)
+    :type     :query
+    :query    {:source-table {:schema "PUBLIC"
+                              :name   "VENUES"
+                              :id     true}
+               :filter       {:filter-type :>
+                              :field       (merge field-defaults
+                                                  {:field-id           true
+                                                   :field-name         "PRICE"
+                                                   :field-display-name "Price"
+                                                   :base-type          :type/Integer
+                                                   :special-type       :type/Category
+                                                   :table-id           (id :venues)
+                                                   :schema-name        "PUBLIC"
+                                                   :table-name         "VENUES"
+                                                   :values             price-field-values
+                                                   :fingerprint        {:global {:distinct-count 4}, :type {:type/Number {:min 1, :max 4, :avg 2.03}}}})
+                              :value       {:value 1
+                                            :field (merge field-defaults
+                                                          {:field-id           true
+                                                           :field-name         "PRICE"
+                                                           :field-display-name "Price"
+                                                           :base-type          :type/Integer
+                                                           :special-type       :type/Category
+                                                           :table-id           (id :venues)
+                                                           :schema-name        "PUBLIC"
+                                                           :table-name         "VENUES"
+                                                           :values             price-field-values
+                                                           :fingerprint        {:global {:distinct-count 4}, :type {:type/Number {:min 1, :max 4, :avg 2.03}}}})}}
+
+
+               :join-tables nil}
     :fk-field-ids #{}
     :table-ids    #{(id :venues)}}]
   (let [expanded-form (ql/expand (wrap-inner-query (query venues
-                                                        (ql/filter (ql/and (ql/> $price 1))))))]
-    (mapv obj->map [expanded-form
-                    (resolve/resolve expanded-form)])))
+                                                     (ql/filter (ql/and (ql/> $price 1))))))]
+    (tu/boolean-ids-and-timestamps
+     (mapv obj->map [expanded-form
+                     (resolve' expanded-form)]))))
 
+(def category-field-values
+  {:values                (defs/field-values defs/test-data-map "categories" "name")
+   :human-readable-values {}
+   :field-value-id        true
+   :field-id              true
+   :created-at            true
+   :updated-at            true})
 
 ;; basic rows query w/ FK filter
 (expect
@@ -86,52 +130,46 @@
     :type     :query
     :query    {:source-table (id :venues)
                :filter       {:filter-type :=
-                              :field       {:field-id      (id :categories :name)
-                                            :fk-field-id   (id :venues :category_id)
-                                            :datetime-unit nil}
-                              :value       {:field-placeholder {:field-id      (id :categories :name)
-                                                                :fk-field-id   (id :venues :category_id)
-                                                                :datetime-unit nil}
+                              :field       (merge field-ph-defaults
+                                                  {:field-id    true
+                                                   :fk-field-id (id :venues :category_id)})
+                              :value       {:field-placeholder (merge field-ph-defaults
+                                                                      {:field-id    true
+                                                                       :fk-field-id (id :venues :category_id)})
                                             :value             "abc"}}}}
    ;; resolved form
    {:database     (id)
     :type         :query
     :query        {:source-table {:schema "PUBLIC"
                                   :name   "VENUES"
-                                  :id     (id :venues)}
+                                  :id     true}
                    :filter       {:filter-type :=
-                                  :field       {:field-id           (id :categories :name)
-                                                :fk-field-id        (id :venues :category_id)
-                                                :field-name         "NAME"
-                                                :field-display-name "Name"
-                                                :base-type          :type/Text
-                                                :special-type       :type/Name
-                                                :visibility-type    :normal
-                                                :table-id           (id :categories)
-                                                :schema-name        nil
-                                                :table-name         "CATEGORIES__via__CATEGORY_ID"
-                                                :position           nil
-                                                :description        nil
-                                                :parent-id          nil
-                                                :parent             nil}
+                                  :field       (merge field-defaults
+                                                      {:field-id           true
+                                                       :fk-field-id        (id :venues :category_id)
+                                                       :field-name         "NAME"
+                                                       :field-display-name "Name"
+                                                       :base-type          :type/Text
+                                                       :special-type       :type/Name
+                                                       :table-id           (id :categories)
+                                                       :table-name         "CATEGORIES__via__CATEGORY_ID"
+                                                       :values             category-field-values
+                                                       :fingerprint        {:global {:distinct-count 75}, :type {:type/Text {:percent-json 0.0, :percent-url 0.0, :percent-email 0.0, :average-length 8.333333333333334}}}})
                                   :value       {:value "abc"
-                                                :field {:field-id           (id :categories :name)
-                                                        :fk-field-id        (id :venues :category_id)
-                                                        :field-name         "NAME"
-                                                        :field-display-name "Name"
-                                                        :base-type          :type/Text
-                                                        :special-type       :type/Name
-                                                        :visibility-type    :normal
-                                                        :table-id           (id :categories)
-                                                        :schema-name        nil
-                                                        :table-name         "CATEGORIES__via__CATEGORY_ID"
-                                                        :position           nil
-                                                        :description        nil
-                                                        :parent-id          nil
-                                                        :parent             nil}}}
-                   :join-tables  [{:source-field {:field-id   (id :venues :category_id)
+                                                :field (merge field-defaults
+                                                              {:field-id           true
+                                                               :fk-field-id        (id :venues :category_id)
+                                                               :field-name         "NAME"
+                                                               :field-display-name "Name"
+                                                               :base-type          :type/Text
+                                                               :special-type       :type/Name
+                                                               :table-id           (id :categories)
+                                                               :table-name         "CATEGORIES__via__CATEGORY_ID"
+                                                               :values             category-field-values
+                                                               :fingerprint        {:global {:distinct-count 75}, :type {:type/Text {:percent-json 0.0, :percent-url 0.0, :percent-email 0.0, :average-length 8.333333333333334}}}})}}
+                   :join-tables  [{:source-field {:field-id   true
                                                   :field-name "CATEGORY_ID"}
-                                   :pk-field     {:field-id   (id :categories :id)
+                                   :pk-field     {:field-id   true
                                                   :field-name "ID"}
                                    :table-id     (id :categories)
                                    :table-name   "CATEGORIES"
@@ -139,11 +177,12 @@
                                    :join-alias   "CATEGORIES__via__CATEGORY_ID"}]}
     :fk-field-ids #{(id :venues :category_id)}
     :table-ids    #{(id :categories)}}]
-  (let [expanded-form (ql/expand (wrap-inner-query (query venues
-                                                        (ql/filter (ql/= $category_id->categories.name
-                                                                         "abc")))))]
-    (mapv obj->map [expanded-form
-                    (resolve/resolve expanded-form)])))
+  (tu/boolean-ids-and-timestamps
+   (let [expanded-form (ql/expand (wrap-inner-query (query venues
+                                                      (ql/filter (ql/= $category_id->categories.name
+                                                                       "abc")))))]
+     (mapv obj->map [expanded-form
+                     (resolve' expanded-form)]))))
 
 
 ;; basic rows query w/ FK filter on datetime
@@ -153,12 +192,14 @@
     :type     :query
     :query    {:source-table (id :checkins)
                :filter       {:filter-type :>
-                              :field       {:field-id      (id :users :last_login)
-                                            :fk-field-id   (id :checkins :user_id)
-                                            :datetime-unit :year}
-                              :value       {:field-placeholder {:field-id      (id :users :last_login)
-                                                                :fk-field-id   (id :checkins :user_id)
-                                                                :datetime-unit :year}
+                              :field       (merge field-ph-defaults
+                                                  {:field-id      (id :users :last_login)
+                                                   :fk-field-id   (id :checkins :user_id)
+                                                   :datetime-unit :year})
+                              :value       {:field-placeholder (merge field-ph-defaults
+                                                                      {:field-id      (id :users :last_login)
+                                                                       :fk-field-id   (id :checkins :user_id)
+                                                                       :datetime-unit :year})
                                             :value             "1980-01-01"}}}}
    ;; resolved form
    {:database     (id)
@@ -167,23 +208,21 @@
                                   :name   "CHECKINS"
                                   :id     (id :checkins)}
                    :filter       {:filter-type :>
-                                  :field       {:field {:field-id           (id :users :last_login)
-                                                        :fk-field-id        (id :checkins :user_id)
-                                                        :field-name         "LAST_LOGIN"
-                                                        :field-display-name "Last Login"
-                                                        :base-type          :type/DateTime
-                                                        :special-type       nil
-                                                        :visibility-type    :normal
-                                                        :table-id           (id :users)
-                                                        :schema-name        nil
-                                                        :table-name         "USERS__via__USER_ID"
-                                                        :position           nil
-                                                        :description        nil
-                                                        :parent-id          nil
-                                                        :parent             nil}
+                                  :field       {:field (merge field-defaults
+                                                              {:field-id           (id :users :last_login)
+                                                               :fk-field-id        (id :checkins :user_id)
+                                                               :field-name         "LAST_LOGIN"
+                                                               :field-display-name "Last Login"
+                                                               :base-type          :type/DateTime
+                                                               :special-type       nil
+                                                               :table-id           (id :users)
+                                                               :table-name         "USERS__via__USER_ID"
+                                                               :fingerprint        {:global {:distinct-count 15}}})
                                                 :unit  :year}
                                   :value       {:value (u/->Timestamp "1980-01-01")
-                                                :field {:field {:field-id           (id :users :last_login)
+                                                :field {:field
+                                                        (merge field-defaults
+                                                               {:field-id           (id :users :last_login)
                                                                 :fk-field-id        (id :checkins :user_id)
                                                                 :field-name         "LAST_LOGIN"
                                                                 :field-display-name "Last Login"
@@ -191,13 +230,9 @@
                                                                 :special-type       nil
                                                                 :visibility-type    :normal
                                                                 :table-id           (id :users)
-                                                                :schema-name        nil
                                                                 :table-name         "USERS__via__USER_ID"
-                                                                :position           nil
-                                                                :description        nil
-                                                                :parent-id          nil
-                                                                :parent             nil}
-                                                        :unit  :year}}}
+                                                                :fingerprint        {:global {:distinct-count 15}}})
+                                                        :unit :year}}}
                    :join-tables  [{:source-field {:field-id   (id :checkins :user_id)
                                                   :field-name "USER_ID"}
                                    :pk-field     {:field-id   (id :users :id)
@@ -209,10 +244,10 @@
     :fk-field-ids #{(id :checkins :user_id)}
     :table-ids    #{(id :users)}}]
   (let [expanded-form (ql/expand (wrap-inner-query (query checkins
-                                                        (ql/filter (ql/> (ql/datetime-field $user_id->users.last_login :year)
-                                                                         "1980-01-01")))))]
+                                                     (ql/filter (ql/> (ql/datetime-field $user_id->users.last_login :year)
+                                                                      "1980-01-01")))))]
     (mapv obj->map [expanded-form
-                    (resolve/resolve expanded-form)])))
+                    (resolve' expanded-form)])))
 
 
 ;; sum aggregation w/ datetime breakout
@@ -223,52 +258,45 @@
     :query    {:source-table (id :checkins)
                :aggregation  [{:aggregation-type :sum
                                :custom-name      nil
-                               :field            {:field-id      (id :venues :price)
-                                                  :fk-field-id   (id :checkins :venue_id)
-                                                  :datetime-unit nil}}]
-               :breakout     [{:field-id      (id :checkins :date)
-                               :fk-field-id   nil
-                               :datetime-unit :day-of-week}]}}
+                               :field            (merge field-ph-defaults
+                                                        {:field-id           true
+                                                         :fk-field-id        (id :checkins :venue_id)})}]
+               :breakout     [(merge field-ph-defaults
+                                     {:field-id           true
+                                      :datetime-unit      :day-of-week})]}}
    ;; resolved form
    {:database     (id)
     :type         :query
     :query        {:source-table {:schema "PUBLIC"
                                   :name   "CHECKINS"
-                                  :id     (id :checkins)}
+                                  :id     true}
                    :aggregation  [{:aggregation-type :sum
                                    :custom-name      nil
-                                   :field            {:description        nil
-                                                      :base-type          :type/Integer
-                                                      :parent             nil
-                                                      :table-id           (id :venues)
-                                                      :special-type       :type/Category
-                                                      :field-name         "PRICE"
-                                                      :field-display-name "Price"
-                                                      :parent-id          nil
-                                                      :visibility-type    :normal
-                                                      :position           nil
-                                                      :field-id           (id :venues :price)
-                                                      :fk-field-id        (id :checkins :venue_id)
-                                                      :table-name         "VENUES__via__VENUE_ID"
-                                                      :schema-name        nil}}]
-                   :breakout     [{:field {:description        nil
-                                           :base-type          :type/Date
-                                           :parent             nil
-                                           :table-id           (id :checkins)
-                                           :special-type       nil
-                                           :field-name         "DATE"
-                                           :field-display-name "Date"
-                                           :parent-id          nil
-                                           :visibility-type    :normal
-                                           :position           nil
-                                           :field-id           (id :checkins :date)
-                                           :fk-field-id        nil
-                                           :table-name         "CHECKINS"
-                                           :schema-name        "PUBLIC"}
+                                   :field            (merge field-defaults
+                                                            {:base-type          :type/Integer
+                                                             :table-id           (id :venues)
+                                                             :special-type       :type/Category
+                                                             :field-name         "PRICE"
+                                                             :field-display-name "Price"
+                                                             :field-id           true
+                                                             :fk-field-id        (id :checkins :venue_id)
+                                                             :table-name         "VENUES__via__VENUE_ID"
+                                                             :values             price-field-values
+                                                             :fingerprint {:global {:distinct-count 4}, :type {:type/Number {:min 1, :max 4, :avg 2.03}}}})}]
+                   :breakout     [{:field (merge field-defaults
+                                                 {:base-type          :type/Date
+                                                  :table-id           (id :checkins)
+                                                  :special-type       nil
+                                                  :field-name         "DATE"
+                                                  :field-display-name "Date"
+                                                  :field-id           true
+                                                  :table-name         "CHECKINS"
+                                                  :schema-name        "PUBLIC"
+                                                  :fingerprint        {:global {:distinct-count 618}}})
                                    :unit  :day-of-week}]
-                   :join-tables  [{:source-field {:field-id   (id :checkins :venue_id)
+                   :join-tables  [{:source-field {:field-id   true
                                                   :field-name "VENUE_ID"}
-                                   :pk-field     {:field-id   (id :venues :id)
+                                   :pk-field     {:field-id   true
                                                   :field-name "ID"}
                                    :table-id     (id :venues)
                                    :table-name   "VENUES"
@@ -279,5 +307,6 @@
   (let [expanded-form (ql/expand (wrap-inner-query (query checkins
                                                      (ql/aggregation (ql/sum $venue_id->venues.price))
                                                      (ql/breakout (ql/datetime-field $checkins.date :day-of-week)))))]
-    (mapv obj->map [expanded-form
-                    (resolve/resolve expanded-form)])))
+    (tu/boolean-ids-and-timestamps
+     (mapv obj->map [expanded-form
+                     (resolve' expanded-form)]))))
diff --git a/test/metabase/query_processor/middleware/add_dimension_projections_test.clj b/test/metabase/query_processor/middleware/add_dimension_projections_test.clj
new file mode 100644
index 0000000000000000000000000000000000000000..9444115b60d15057a64168763fc3cd56e0b033d4
--- /dev/null
+++ b/test/metabase/query_processor/middleware/add_dimension_projections_test.clj
@@ -0,0 +1,204 @@
+(ns metabase.query-processor.middleware.add-dimension-projections-test
+  "Tests for the Query Processor cache."
+  (:require [expectations :refer :all]
+            [metabase.query-processor.middleware.add-dimension-projections :refer :all]
+            [metabase.query-processor.interface :as i]
+            [metabase.test.util :as tu]
+            [toucan.db :as db]
+            [metabase.query-processor.middleware.expand :as ql]))
+
+(tu/resolve-private-vars metabase.query-processor.middleware.add-dimension-projections remap-results add-fk-remaps)
+
+(def ^:private col-defaults
+  {:description nil
+     :source :fields,
+     :extra_info {},
+     :fk_field_id nil,
+     :values [],
+     :dimensions [],
+     :visibility_type :normal,
+     :target nil,
+     :remapped_from nil,
+     :remapped_to nil})
+
+(def ^:private example-resultset
+  {:rows
+   [[1 "Red Medicine" 4 3]
+    [2 "Stout Burgers & Beers" 11 2]
+    [3 "The Apple Pan" 11 2]
+    [4 "Wurstküche" 29 2]
+    [5 "Brite Spot Family Restaurant" 20 2]],
+   :columns ["ID" "NAME" "CATEGORY_ID" "PRICE"],
+   :cols
+   (mapv #(merge col-defaults %)
+         [
+          ;; 0
+          {:table_id 4,
+           :schema_name "PUBLIC",
+           :special_type :type/PK,
+           :name "ID",
+           :id 12,
+           :display_name "ID",
+           :base_type :type/BigInteger}
+          ;; 1
+          {:table_id 4,
+           :schema_name "PUBLIC",
+           :special_type :type/Name,
+           :name "NAME",
+           :id 15,
+           :display_name "Name",
+           :base_type :type/Text}
+          ;; 2
+          {:table_id 4,
+           :schema_name "PUBLIC",
+           :special_type :type/FK,
+           :name "CATEGORY_ID",
+           :extra_info {:target_table_id 1},
+           :id 11,
+           :values {:field-value-id 1, :human-readable-values ["Foo" "Bar" "Baz" "Qux"],
+                    :values [4 11 29 20], :field-id 33}
+           :dimensions {:dimension-id 1 :dimension-type :internal :dimension-name "Foo" :field-id 10}
+           :display_name "Category ID",
+           :base_type :type/Integer}
+          ;; 3
+          {:table_id 4,
+           :schema_name "PUBLIC",
+           :special_type :type/Category,
+           :name "PRICE",
+           :id 16,
+           :display_name "Price",
+           :base_type :type/Integer}])})
+
+(expect
+  (-> example-resultset
+      (assoc :rows [[1 "Red Medicine" 4 3 "Foo"]
+                    [2 "Stout Burgers & Beers" 11 2 "Bar"]
+                    [3 "The Apple Pan" 11 2 "Bar"]
+                    [4 "Wurstküche" 29 2 "Baz"]
+                    [5 "Brite Spot Family Restaurant" 20 2 "Qux"]])
+      (update :columns conj "Foo")
+      (update :cols (fn [cols]
+                      (conj
+                       (mapv (fn [col]
+                               (let [new-col (dissoc col :dimensions :values)]
+                                 (if (= "CATEGORY_ID" (:name new-col))
+                                   (assoc new-col
+                                     :remapped_to "Foo"
+                                     :remapped_from nil)
+                                   new-col)))
+                             cols)
+                       {:description nil,
+                        :id nil,
+                        :table_id nil,
+                        :expression-name "Foo",
+                        :source :fields,
+                        :name "Foo",
+                        :display_name "Foo",
+                        :target nil,
+                        :extra_info {}
+                        :remapped_from "CATEGORY_ID"
+                        :remapped_to nil}))))
+  (remap-results example-resultset))
+
+(def ^:private field-defaults
+  {:dimensions [],
+   :values [],
+   :visibility-type :normal})
+
+(def ^:private example-query
+  {:query
+   {:order-by nil
+    :fields
+    (mapv #(merge field-defaults %)
+          [{:description "A unique internal identifier for the review. Should not be used externally.",
+            :base-type :type/BigInteger,
+            :table-id 4,
+            :special-type :type/PK,
+            :field-name "ID",
+            :field-display-name "ID",
+            :position 0,
+            :field-id 31,
+            :table-name "REVIEWS",
+            :schema-name "PUBLIC"}
+           {:description "The review the user left. Limited to 2000 characters.",
+            :base-type :type/Text,
+            :table-id 4,
+            :special-type :type/Description,
+            :field-name "BODY",
+            :field-display-name "Body",
+            :position 0,
+            :field-id 29,
+            :table-name "REVIEWS",
+            :schema-name "PUBLIC"}
+           {:field-id 32,
+            :field-name "PRODUCT_ID",
+            :field-display-name "Product ID",
+            :base-type :type/Integer,
+            :special-type :type/FK,
+            :table-id 4,
+            :schema-name "PUBLIC",
+            :table-name "REVIEWS",
+            :position 0,
+            :fk-field-id nil,
+            :description "The product the review was for",
+            :parent-id nil,
+            :parent nil,
+            :remapped-from nil,
+            :remapped-to nil,
+            :dimensions {:dimension-id 2, :dimension-name "Product", :field-id 32, :human-readable-field-id 27, :dimension-type :external}}])}})
+
+(expect
+  (update-in example-query [:query :fields]
+             conj (i/map->FieldPlaceholder {:fk-field-id 32
+                                            :field-id 27
+                                            :remapped-from "PRODUCT_ID"
+                                            :remapped-to nil
+                                            :field-display-name "Product"}))
+  (add-fk-remaps example-query))
+
+(expect
+  (-> example-query
+      (assoc-in [:query :order-by] [{:direction :ascending
+                                     :field (i/map->FieldPlaceholder {:fk-field-id 32
+                                                                      :field-id 27
+                                                                      :remapped-from "PRODUCT_ID"
+                                                                      :remapped-to nil
+                                                                      :field-display-name "Product"})}])
+      (update-in [:query :fields]
+                 conj (i/map->FieldPlaceholder {:fk-field-id 32
+                                                :field-id 27
+                                                :remapped-from "PRODUCT_ID"
+                                                :remapped-to nil
+                                                :field-display-name "Product"})))
+  (-> example-query
+      (assoc-in [:query :order-by] [{:direction :ascending :field {:field-id 32}}])
+      add-fk-remaps))
+
+(def ^:private external-remapped-result
+  (-> example-resultset
+      (update :cols conj {:description "The name of the product as it should be displayed to customers.",
+                          :table_id 3,
+                          :schema_name nil,
+                          :special_type :type/Category,
+                          :name "CATEGORY",
+                          :source :fields,
+                          :remapped_from "CATEGORY_ID",
+                          :extra_info {},
+                          :fk_field_id 32,
+                          :remapped_to nil,
+                          :id 27,
+                          :visibility_type :normal,
+                          :target nil,
+                          :display_name "Category",
+                          :base_type :type/Text})
+      (update-in [:cols 2]
+                 (fn [col]
+                   (-> col
+                       (update :values merge {:human-readable-values []})
+                       (update :dimensions merge {:dimension-type :external :human-readable_field-id 27}))))))
+
+(expect
+  (-> external-remapped-result
+      (update :cols (fn [col] (mapv #(dissoc % :dimensions :values) col)))
+      (update-in [:cols 2] assoc :remapped_to "CATEGORY"))
+  (remap-results external-remapped-result))
diff --git a/test/metabase/query_processor/middleware/binning_test.clj b/test/metabase/query_processor/middleware/binning_test.clj
new file mode 100644
index 0000000000000000000000000000000000000000..4756b58c9d3b110961c773e2b958825b285aa9d8
--- /dev/null
+++ b/test/metabase/query_processor/middleware/binning_test.clj
@@ -0,0 +1,91 @@
+(ns metabase.query-processor.middleware.binning-test
+  (:require [expectations :refer [expect]]
+            [metabase.query-processor.middleware
+             [binning :refer :all]
+             [expand :as ql]]
+            [metabase.test.util :as tu]))
+
+(tu/resolve-private-vars metabase.query-processor.middleware.binning filter->field-map extract-bounds ceil-to floor-to nicer-bin-width nicer-bounds nicer-breakout)
+
+(expect
+  {}
+  (filter->field-map (ql/and
+                      (ql/= (ql/field-id 1) 10)
+                      (ql/= (ql/field-id 2) 10))))
+
+(expect
+  {1 [(ql/< (ql/field-id 1) 10) (ql/> (ql/field-id 1) 1)]
+   2 [(ql/> (ql/field-id 2) 20) (ql/< (ql/field-id 2) 10)]
+   3 [(ql/between (ql/field-id 3) 5 10)]}
+  (filter->field-map (ql/and
+                      (ql/< (ql/field-id 1) 10)
+                      (ql/> (ql/field-id 1) 1)
+                      (ql/> (ql/field-id 2) 20)
+                      (ql/< (ql/field-id 2) 10)
+                      (ql/between (ql/field-id 3) 5 10))))
+
+(expect
+  [[1.0 1.0 1.0]
+   [1.0 2.0 2.0]
+   [15.0 15.0 30.0]]
+  [(mapv (partial floor-to 1.0) [1 1.1 1.8])
+   (mapv (partial ceil-to 1.0) [1 1.1 1.8])
+   (mapv (partial ceil-to 15.0) [1.0 15.0 16.0])])
+
+(expect
+  [20 2000]
+  [(nicer-bin-width 27 135 8)
+   (nicer-bin-width -0.0002 10000.34 8)])
+
+(def ^:private test-min-max-field
+  {:field-id 1 :fingerprint {:type {:type/Number {:min 100 :max 1000}}}})
+
+(expect
+  [1 10]
+  (extract-bounds test-min-max-field
+                  {1 [(ql/> (ql/field-id 1) 1) (ql/< (ql/field-id 1) 10)]}))
+
+(expect
+  [1 10]
+  (extract-bounds test-min-max-field
+                  {1 [(ql/between (ql/field-id 1) 1 10)]}))
+
+(expect
+  [100 1000]
+  (extract-bounds test-min-max-field
+                  {}))
+
+(expect
+  [500 1000]
+  (extract-bounds test-min-max-field
+                  {1 [(ql/> (ql/field-id 1) 500)]}))
+
+(expect
+  [100 500]
+  (extract-bounds test-min-max-field
+                  {1 [(ql/< (ql/field-id 1) 500)]}))
+
+(expect
+  [600 700]
+  (extract-bounds test-min-max-field
+                  {1 [(ql/> (ql/field-id 1) 200)
+                      (ql/< (ql/field-id 1) 800)
+                      (ql/between (ql/field-id 1) 600 700)]}))
+
+(expect
+  [[0.0 1000.0 125.0 8]
+   [200N 1600N 200 8]
+   [0.0 1200.0 200 8]
+   [0.0 1005.0 15.0 67]]
+  [((juxt :min-value :max-value :bin-width :num-bins)
+         (nicer-breakout {:field-id 1 :min-value 100 :max-value 1000
+                          :strategy :num-bins :num-bins 8}))
+   ((juxt :min-value :max-value :bin-width :num-bins)
+         (nicer-breakout {:field-id 1 :min-value 200 :max-value 1600
+                          :strategy :num-bins :num-bins 8}))
+   ((juxt :min-value :max-value :bin-width :num-bins)
+         (nicer-breakout {:field-id 1 :min-value 9 :max-value 1002
+                          :strategy :num-bins :num-bins 8}))
+   ((juxt :min-value :max-value :bin-width :num-bins)
+         (nicer-breakout {:field-id 1 :min-value 9 :max-value 1002
+                          :strategy :bin-width :bin-width 15.0}))])
diff --git a/test/metabase/query_processor/middleware/expand_macros_test.clj b/test/metabase/query_processor/middleware/expand_macros_test.clj
index c98831e5a0f682cc3a879922a1c1b84b4dba56cb..5eb01954583674549d15b19b8575ca9840fbaf51 100644
--- a/test/metabase/query_processor/middleware/expand_macros_test.clj
+++ b/test/metabase/query_processor/middleware/expand_macros_test.clj
@@ -9,7 +9,7 @@
              [metric :refer [Metric]]
              [segment :refer [Segment]]
              [table :refer [Table]]]
-            [metabase.query-processor.expand :as ql]
+            [metabase.query-processor.middleware.expand :as ql]
             [metabase.query-processor.middleware.expand-macros :refer :all]
             [metabase.test
              [data :as data]
diff --git a/test/metabase/query_processor/middleware/parameters/mbql_test.clj b/test/metabase/query_processor/middleware/parameters/mbql_test.clj
index ef6cb90b51e480f279c1da508c06e28a972d6c02..c250c40a636e2189e4df849932bb8ddfadc13f9d 100644
--- a/test/metabase/query_processor/middleware/parameters/mbql_test.clj
+++ b/test/metabase/query_processor/middleware/parameters/mbql_test.clj
@@ -4,7 +4,7 @@
             [metabase
              [query-processor :as qp]
              [query-processor-test :refer [first-row format-rows-by non-timeseries-engines]]]
-            [metabase.query-processor.expand :as ql]
+            [metabase.query-processor.middleware.expand :as ql]
             [metabase.query-processor.middleware.parameters.mbql :refer :all]
             [metabase.test.data :as data]
             [metabase.test.data.datasets :as datasets]))
diff --git a/test/metabase/query_processor_test.clj b/test/metabase/query_processor_test.clj
index f2e0f3f162ff749d26a91b8527f6f1b8768fd32e..8158d920f2f75de3ed9c1be9fbc1987e24aee791 100644
--- a/test/metabase/query_processor_test.clj
+++ b/test/metabase/query_processor_test.clj
@@ -4,12 +4,12 @@
    Event-based DBs such as Druid are tested in `metabase.driver.event-query-processor-test`."
   (:require [clojure.set :as set]
             [clojure.tools.logging :as log]
+            [medley.core :as m]
             [metabase
              [driver :as driver]
              [util :as u]]
             [metabase.test.data :as data]
-            [metabase.test.data.datasets :as datasets]
-            [medley.core :as m]))
+            [metabase.test.data.datasets :as datasets]))
 
 ;; make sure all the driver test extension namespaces are loaded <3
 ;; if this isn't done some things will get loaded at the wrong time which can end up causing test databases to be created more than once, which fails
@@ -56,20 +56,20 @@
 
 (defmacro qp-expect-with-all-engines
   {:style/indent 0}
-  [data q-form & post-process-fns]
+  [data query-form & post-process-fns]
   `(expect-with-non-timeseries-dbs
      {:status    :completed
       :row_count ~(count (:rows data))
       :data      ~data}
-     (-> ~q-form
+     (-> ~query-form
          ~@post-process-fns)))
 
-(defmacro qp-expect-with-engines [datasets data q-form]
+(defmacro qp-expect-with-engines [datasets data query-form]
   `(datasets/expect-with-engines ~datasets
      {:status    :completed
       :row_count ~(count (:rows data))
       :data      ~data}
-     ~q-form))
+     ~query-form))
 
 
 (defn ->columns
@@ -91,11 +91,13 @@
    :visibility_type :normal
    :schema_name     (data/default-schema)
    :source          :fields
-   :fk_field_id     nil})
+   :fk_field_id     nil
+   :remapped_from   nil
+   :remapped_to     nil})
 
 (defn- target-field [field]
   (when (data/fks-supported?)
-    (dissoc field :target :extra_info :schema_name :source :fk_field_id)))
+    (dissoc field :target :extra_info :schema_name :source :fk_field_id :remapped_from :remapped_to :fingerprint)))
 
 (defn categories-col
   "Return column information for the `categories` column named by keyword COL."
@@ -112,7 +114,8 @@
      :name {:special_type :type/Name
             :base_type    (data/expected-base-type->actual :type/Text)
             :name         (data/format-name "name")
-            :display_name "Name"})))
+            :display_name "Name"
+            :fingerprint  {:global {:distinct-count 75}, :type {:type/Text {:percent-json 0.0, :percent-url 0.0, :percent-email 0.0, :average-length 8.333}}}})))
 
 ;; #### users
 (defn users-col
@@ -126,16 +129,19 @@
      :id         {:special_type :type/PK
                   :base_type    (data/id-field-type)
                   :name         (data/format-name "id")
-                  :display_name "ID"}
+                  :display_name "ID"
+                  :fingerprint  {:global {:distinct-count 15}, :type {:type/Number {:min 1, :max 15, :avg 8.0}}}}
      :name       {:special_type :type/Name
                   :base_type    (data/expected-base-type->actual :type/Text)
                   :name         (data/format-name "name")
-                  :display_name "Name"}
+                  :display_name "Name"
+                  :fingerprint  {:global {:distinct-count 15}, :type {:type/Text {:percent-json 0.0, :percent-url 0.0, :percent-email 0.0, :average-length 13.267}}}}
      :last_login {:special_type nil
                   :base_type    (data/expected-base-type->actual :type/DateTime)
                   :name         (data/format-name "last_login")
                   :display_name "Last Login"
-                  :unit         :default})))
+                  :unit         :default
+                  :fingerprint  {:global {:distinct-count 15}}})))
 
 ;; #### venues
 (defn venues-columns
@@ -154,7 +160,8 @@
      :id          {:special_type :type/PK
                    :base_type    (data/id-field-type)
                    :name         (data/format-name "id")
-                   :display_name "ID"}
+                   :display_name "ID"
+                   :fingerprint  {:global {:distinct-count 100}, :type {:type/Number {:min 1, :max 100, :avg 50.5}}}}
      :category_id {:extra_info   (if (data/fks-supported?)
                                    {:target_table_id (data/id :categories)}
                                    {})
@@ -164,23 +171,28 @@
                                    :type/Category)
                    :base_type    (data/expected-base-type->actual :type/Integer)
                    :name         (data/format-name "category_id")
-                   :display_name "Category ID"}
+                   :display_name "Category ID"
+                   :fingerprint  {:global {:distinct-count 28}, :type {:type/Number {:min 2, :max 74, :avg 29.98}}}}
      :price       {:special_type :type/Category
                    :base_type    (data/expected-base-type->actual :type/Integer)
                    :name         (data/format-name "price")
-                   :display_name "Price"}
+                   :display_name "Price"
+                   :fingerprint  {:global {:distinct-count 4}, :type {:type/Number {:min 1, :max 4, :avg 2.03}}}}
      :longitude   {:special_type :type/Longitude
                    :base_type    (data/expected-base-type->actual :type/Float)
                    :name         (data/format-name "longitude")
+                   :fingerprint  {:global {:distinct-count 84}, :type {:type/Number {:min -165.374, :max -73.953, :avg -115.998}}}
                    :display_name "Longitude"}
      :latitude    {:special_type :type/Latitude
                    :base_type    (data/expected-base-type->actual :type/Float)
                    :name         (data/format-name "latitude")
-                   :display_name "Latitude"}
+                   :display_name "Latitude"
+                   :fingerprint  {:global {:distinct-count 94}, :type {:type/Number {:min 10.065, :max 40.779, :avg 35.506}}}}
      :name        {:special_type :type/Name
                    :base_type    (data/expected-base-type->actual :type/Text)
                    :name         (data/format-name "name")
-                   :display_name "Name"})))
+                   :display_name "Name"
+                   :fingerprint  {:global {:distinct-count 100}, :type {:type/Text {:percent-json 0.0, :percent-url 0.0, :percent-email 0.0, :average-length 15.63}}}})))
 
 (defn venues-cols
   "`cols` information for all the columns in `venues`."
@@ -209,7 +221,8 @@
                                 :type/Category)
                 :base_type    (data/expected-base-type->actual :type/Integer)
                 :name         (data/format-name "venue_id")
-                :display_name "Venue ID"}
+                :display_name "Venue ID"
+                :fingerprint  {:global {:distinct-count 100}, :type {:type/Number {:min 1, :max 100, :avg 51.965}}}}
      :user_id  {:extra_info   (if (data/fks-supported?) {:target_table_id (data/id :users)}
                                   {})
                 :target       (target-field (users-col :id))
@@ -218,7 +231,8 @@
                                 :type/Category)
                 :base_type    (data/expected-base-type->actual :type/Integer)
                 :name         (data/format-name "user_id")
-                :display_name "User ID"})))
+                :display_name "User ID"
+                :fingerprint  {:global {:distinct-count 15}, :type {:type/Number {:min 1, :max 15, :avg 7.929}}}})))
 
 
 ;;; #### aggregate columns
@@ -231,28 +245,32 @@
   {:arglists '([ag-col-kw] [ag-col-kw field])}
   ([ag-col-kw]
    (case ag-col-kw
-     :count  {:base_type    :type/Integer
-              :special_type :type/Number
-              :name         "count"
-              :display_name "count"
-              :id           nil
-              :table_id     nil
-              :description  nil
-              :source       :aggregation
-              :extra_info   {}
-              :target       nil}))
+     :count  {:base_type       :type/Integer
+              :special_type    :type/Number
+              :name            "count"
+              :display_name    "count"
+              :id              nil
+              :table_id        nil
+              :description     nil
+              :source          :aggregation
+              :extra_info      {}
+              :target          nil
+              :remapped_from   nil
+              :remapped_to     nil}))
   ([ag-col-kw {:keys [base_type special_type]}]
    {:pre [base_type special_type]}
    {:base_type    base_type
-    :special_type special_type
-    :id           nil
-    :table_id     nil
-    :description  nil
-    :source       :aggregation
-    :extra_info   {}
-    :target       nil
-    :name         (name ag-col-kw)
-    :display_name (name ag-col-kw)}))
+    :special_type  special_type
+    :id            nil
+    :table_id      nil
+    :description   nil
+    :source        :aggregation
+    :extra_info    {}
+    :target        nil
+    :name          (name ag-col-kw)
+    :display_name  (name ag-col-kw)
+    :remapped_from nil
+    :remapped_to   nil}))
 
 (defn breakout-col [column]
   (assoc column :source :breakout))
diff --git a/test/metabase/query_processor_test/aggregation_test.clj b/test/metabase/query_processor_test/aggregation_test.clj
index 79933406eff8f67ad700803d57d3a3dc17cd913a..a32bef8a47249b04bd742acd897f8111a87ee5dd 100644
--- a/test/metabase/query_processor_test/aggregation_test.clj
+++ b/test/metabase/query_processor_test/aggregation_test.clj
@@ -3,9 +3,10 @@
   (:require [metabase
              [query-processor-test :refer :all]
              [util :as u]]
-            [metabase.query-processor.expand :as ql]
+            [metabase.query-processor.middleware.expand :as ql]
             [metabase.test.data :as data]
-            [metabase.test.data.datasets :as datasets]))
+            [metabase.test.data.datasets :as datasets]
+            [metabase.test.util :as tu]))
 
 ;;; ------------------------------------------------------------ "COUNT" AGGREGATION ------------------------------------------------------------
 
@@ -72,11 +73,12 @@
      :columns     (venues-columns)
      :cols        (venues-cols)
      :native_form true}
-    (->> (data/run-query venues
+    (-> (data/run-query venues
            (ql/limit 10)
            (ql/order-by (ql/asc $id)))
-         booleanize-native-form
-         formatted-venues-rows))
+        booleanize-native-form
+        formatted-venues-rows
+        tu/round-fingerprint-cols))
 
 
 ;;; ------------------------------------------------------------ STDDEV AGGREGATION ------------------------------------------------------------
@@ -230,7 +232,8 @@
          (ql/aggregation (ql/cum-sum $id))
          (ql/breakout $name))
        booleanize-native-form
-       (format-rows-by [str int])))
+       (format-rows-by [str int])
+       tu/round-fingerprint-cols))
 
 
 ;;; Cumulative sum w/ a different breakout field that requires grouping
@@ -295,7 +298,8 @@
          (ql/aggregation (ql/cum-count))
          (ql/breakout $name))
        booleanize-native-form
-       (format-rows-by [str int])))
+       (format-rows-by [str int])
+       tu/round-fingerprint-cols))
 
 
 ;;; Cumulative count w/ a different breakout field that requires grouping
diff --git a/test/metabase/query_processor_test/breakout_test.clj b/test/metabase/query_processor_test/breakout_test.clj
index 2d8ecbf89ceebf6be02d1828c73ce7fb828c7ec1..6b498d543d4840f00d9642c9ca2ea8c910a086eb 100644
--- a/test/metabase/query_processor_test/breakout_test.clj
+++ b/test/metabase/query_processor_test/breakout_test.clj
@@ -1,8 +1,23 @@
 (ns metabase.query-processor-test.breakout-test
   "Tests for the `:breakout` clause."
-  (:require [metabase.query-processor-test :refer :all]
-            [metabase.query-processor.expand :as ql]
-            [metabase.test.data :as data]))
+  (:require [cheshire.core :as json]
+            [metabase
+             [query-processor-test :refer :all]
+             [util :as u]]
+            [metabase.models
+             [dimension :refer [Dimension]]
+             [field :refer [Field]]
+             [field-values :refer [FieldValues]]]
+            [metabase.query-processor.middleware.expand :as ql]
+            [metabase.test
+             [data :as data]
+             [util :as tu]]
+            [metabase.test.data
+             [dataset-definitions :as defs]
+             [datasets :as datasets]]
+            [toucan.db :as db]))
+
+(tu/resolve-private-vars metabase.query-processor.middleware.add-dimension-projections create-remapped-col)
 
 ;;; single column
 (qp-expect-with-all-engines
@@ -69,3 +84,170 @@
          (ql/limit 10))
        booleanize-native-form
        (format-rows-by [int int int])))
+
+(qp-expect-with-all-engines
+  {:rows  [[2 8 "Artisan"]
+           [3 2 "Asian"]
+           [4 2 "BBQ"]
+           [5 7 "Bakery"]
+           [6 2 "Bar"]]
+   :columns [(data/format-name "category_id")
+             "count"
+             "Foo"]
+   :cols    [(assoc (breakout-col (venues-col :category_id))
+               :remapped_to "Foo")
+             (aggregate-col :count)
+             (create-remapped-col "Foo" (data/format-name "category_id"))]
+   :native_form true}
+  (data/with-data
+    (fn []
+      (let [venue-names (defs/field-values defs/test-data-map "categories" "name")]
+        [(db/insert! Dimension {:field_id (data/id :venues :category_id)
+                                :name "Foo"
+                                :type :internal})
+         (db/insert! FieldValues {:field_id (data/id :venues :category_id)
+                                  :values (json/generate-string (range 0 (count venue-names)))
+                                  :human_readable_values (json/generate-string venue-names)})]))
+    (->> (data/run-query venues
+           (ql/aggregation (ql/count))
+           (ql/breakout $category_id)
+           (ql/limit 5))
+         booleanize-native-form
+         (format-rows-by [int int str]))))
+
+(datasets/expect-with-engines (engines-that-support :foreign-keys)
+  [["Wine Bar" "Thai" "Thai" "Thai" "Thai" "Steakhouse" "Steakhouse" "Steakhouse" "Steakhouse" "Southern"]
+   ["American" "American" "American" "American" "American" "American" "American" "American" "Artisan" "Artisan"]]
+  (data/with-data
+    (fn []
+      [(db/insert! Dimension {:field_id (data/id :venues :category_id)
+                              :name "Foo"
+                              :type :external
+                              :human_readable_field_id (data/id :categories :name)})])
+    [(->> (data/run-query venues
+             (ql/order-by (ql/desc $category_id))
+             (ql/limit 10))
+           rows
+           (map last))
+     (->> (data/run-query venues
+             (ql/order-by (ql/asc $category_id))
+             (ql/limit 10))
+           rows
+           (map last))]))
+
+(datasets/expect-with-engines (engines-that-support :binning)
+  [[10.0 1] [32.0 4] [34.0 57] [36.0 29] [40.0 9]]
+  (format-rows-by [(partial u/round-to-decimals 1) int]
+    (rows (data/run-query venues
+            (ql/aggregation (ql/count))
+            (ql/breakout (ql/binning-strategy $latitude :num-bins 20))))))
+
+(datasets/expect-with-engines (engines-that-support :binning)
+ [[0.0 1] [20.0 90] [40.0 9]]
+  (format-rows-by [(partial u/round-to-decimals 1) int]
+    (rows (data/run-query venues
+            (ql/aggregation (ql/count))
+            (ql/breakout (ql/binning-strategy $latitude :num-bins 3))))))
+
+(datasets/expect-with-engines (engines-that-support :binning)
+   [[10.0 -170.0 1] [32.0 -120.0 4] [34.0 -120.0 57] [36.0 -125.0 29] [40.0 -75.0 9]]
+  (format-rows-by [(partial u/round-to-decimals 1) (partial u/round-to-decimals 1) int]
+    (rows (data/run-query venues
+            (ql/aggregation (ql/count))
+            (ql/breakout (ql/binning-strategy $latitude :num-bins 20)
+                         (ql/binning-strategy $longitude :num-bins 20))))))
+
+;; Currently defaults to 8 bins when the number of bins isn't
+;; specified
+(datasets/expect-with-engines (engines-that-support :binning)
+  [[10.0 1] [30.0 90] [40.0 9]]
+  (format-rows-by [(partial u/round-to-decimals 1) int]
+    (rows (data/run-query venues
+            (ql/aggregation (ql/count))
+            (ql/breakout (ql/binning-strategy $latitude :default))))))
+
+(datasets/expect-with-engines (engines-that-support :binning)
+  [[10.0 1] [30.0 61] [35.0 29] [40.0 9]]
+  (tu/with-temporary-setting-values [breakout-bin-width 5.0]
+    (format-rows-by [(partial u/round-to-decimals 1) int]
+      (rows (data/run-query venues
+              (ql/aggregation (ql/count))
+              (ql/breakout (ql/binning-strategy $latitude :default)))))))
+
+;; Testing bin-width
+(datasets/expect-with-engines (engines-that-support :binning)
+  [[10.0 1] [33.0 4] [34.0 57] [37.0 29] [40.0 9]]
+  (format-rows-by [(partial u/round-to-decimals 1) int]
+    (rows (data/run-query venues
+            (ql/aggregation (ql/count))
+            (ql/breakout (ql/binning-strategy $latitude :bin-width 1))))))
+
+;; Testing bin-width using a float
+(datasets/expect-with-engines (engines-that-support :binning)
+  [[10.0 1] [32.5 61] [37.5 29] [40.0 9]]
+  (format-rows-by [(partial u/round-to-decimals 1) int]
+    (rows (data/run-query venues
+            (ql/aggregation (ql/count))
+            (ql/breakout (ql/binning-strategy $latitude :bin-width 2.5))))))
+
+(datasets/expect-with-engines (engines-that-support :binning)
+  [[33.0 4] [34.0 57]]
+  (tu/with-temporary-setting-values [breakout-bin-width 1.0]
+    (format-rows-by [(partial u/round-to-decimals 1) int]
+      (rows (data/run-query venues
+              (ql/aggregation (ql/count))
+              (ql/filter (ql/and (ql/< $latitude 35)
+                                 (ql/> $latitude 20)))
+              (ql/breakout (ql/binning-strategy $latitude :default)))))))
+
+(defn- round-binning-decimals [result]
+  (let [round-to-decimal #(u/round-to-decimals 4 %)]
+    (-> result
+        (update :min_value round-to-decimal)
+        (update :max_value round-to-decimal)
+        (update-in [:binning_info :min_value] round-to-decimal)
+        (update-in [:binning_info :max_value] round-to-decimal))))
+
+;;Validate binning info is returned with the binning-strategy
+(datasets/expect-with-engines (engines-that-support :binning)
+  (assoc (breakout-col (venues-col :latitude))
+         :binning_info {:binning_strategy :bin-width, :bin_width 10.0,
+                        :num_bins         4,          :min_value 10.0
+                        :max_value        50.0})
+  (-> (data/run-query venues
+        (ql/aggregation (ql/count))
+        (ql/breakout (ql/binning-strategy $latitude :default)))
+      tu/round-fingerprint-cols
+      (get-in [:data :cols])
+      first))
+
+(datasets/expect-with-engines (engines-that-support :binning)
+  (assoc (breakout-col (venues-col :latitude))
+         :binning_info {:binning_strategy :num-bins, :bin_width 7.5,
+                        :num_bins         5,         :min_value 7.5,
+                        :max_value        45.0})
+  (-> (data/run-query venues
+                      (ql/aggregation (ql/count))
+                      (ql/breakout (ql/binning-strategy $latitude :num-bins 5)))
+      tu/round-fingerprint-cols
+      (get-in [:data :cols])
+      first))
+
+;;Validate binning info is returned with the binning-strategy
+(datasets/expect-with-engines (engines-that-support :binning)
+  {:status :failed
+   :class Exception
+   :error (format "Unable to bin field '%s' with id '%s' without a min/max value"
+                  (:name (Field (data/id :venues :latitude)))
+                  (data/id :venues :latitude))}
+  (let [fingerprint (-> (data/id :venues :latitude)
+                        Field
+                        :fingerprint)]
+    (try
+      (db/update! Field (data/id :venues :latitude) :fingerprint {:type {:type/Number {:min nil :max nil}}})
+      (-> (data/run-query venues
+            (ql/aggregation (ql/count))
+            (ql/breakout (ql/binning-strategy $latitude :default)))
+          (select-keys [:status :class :error]))
+      (finally
+        (db/update! Field (data/id :venues :latitude) :fingerprint fingerprint)))))
diff --git a/test/metabase/query_processor_test/date_bucketing_test.clj b/test/metabase/query_processor_test/date_bucketing_test.clj
index 02f65fa184a3aaadab637b41d689f18f0e3885eb..8cb9370e6f29043daa6098018ee3f30d9d2b72de 100644
--- a/test/metabase/query_processor_test/date_bucketing_test.clj
+++ b/test/metabase/query_processor_test/date_bucketing_test.clj
@@ -4,7 +4,7 @@
              [driver :as driver]
              [query-processor-test :refer :all]
              [util :as u]]
-            [metabase.query-processor.expand :as ql]
+            [metabase.query-processor.middleware.expand :as ql]
             [metabase.test.data :as data]
             [metabase.test.data
              [dataset-definitions :as defs]
diff --git a/test/metabase/query_processor_test/expression_aggregations_test.clj b/test/metabase/query_processor_test/expression_aggregations_test.clj
index 82fdb86b0915e16c5af646d0ae3b05488810a393..a743eaea2587e23f69700da5c6a0242501f23599 100644
--- a/test/metabase/query_processor_test/expression_aggregations_test.clj
+++ b/test/metabase/query_processor_test/expression_aggregations_test.clj
@@ -6,7 +6,7 @@
              [query-processor-test :refer :all]
              [util :as u]]
             [metabase.models.metric :refer [Metric]]
-            [metabase.query-processor.expand :as ql]
+            [metabase.query-processor.middleware.expand :as ql]
             [metabase.test.data :as data]
             [metabase.test.data.datasets :as datasets :refer [*driver* *engine*]]
             [toucan.util.test :as tt]))
diff --git a/test/metabase/query_processor_test/expressions_test.clj b/test/metabase/query_processor_test/expressions_test.clj
index 4f9c6cc7acba696b2dd23089bbdbd81889410123..d39a64a1742165bce60e0a7951ef4cf1c862c1e1 100644
--- a/test/metabase/query_processor_test/expressions_test.clj
+++ b/test/metabase/query_processor_test/expressions_test.clj
@@ -4,17 +4,20 @@
             [metabase
              [query-processor-test :refer :all]
              [util :as u]]
-            [metabase.query-processor
-             [expand :as ql]
-             [interface :as qpi]]
+            [metabase.query-processor.interface :as qpi]
+            [metabase.query-processor.middleware.expand :as ql]
             [metabase.test.data :as data]
             [metabase.test.data.datasets :as datasets]))
 
 ;; Test the expansion of the expressions clause
 (expect
   {:expressions {:my-cool-new-field (qpi/map->Expression {:operator :*
-                                                          :args [{:field-id 10, :fk-field-id nil, :datetime-unit nil}
-                                                                 20.0]})}}                                            ; 20 should be converted to a FLOAT
+                                                          :args     [{:field-id         10,  :fk-field-id        nil,
+                                                                      :datetime-unit    nil, :remapped-from      nil,
+                                                                      :remapped-to      nil, :field-display-name nil
+                                                                      :binning-strategy nil, :binning-param      nil}
+                                                                     20.0]})}}; 20 should be converted to a FLOAT
+
   (ql/expressions {} {:my-cool-new-field (ql/* (ql/field-id 10) 20)}))
 
 
diff --git a/test/metabase/query_processor_test/field_visibility_test.clj b/test/metabase/query_processor_test/field_visibility_test.clj
index e012dd68df107392c47f632fb664a15da4881d9e..fe7a78423b57c56f4d09ae937b9c0b7043ff0ba4 100644
--- a/test/metabase/query_processor_test/field_visibility_test.clj
+++ b/test/metabase/query_processor_test/field_visibility_test.clj
@@ -1,9 +1,13 @@
 (ns metabase.query-processor-test.field-visibility-test
   "Tests for behavior of fields with different visibility settings."
-  (:require [metabase.models.field :refer [Field]]
-            [metabase.query-processor-test :refer :all]
-            [metabase.query-processor.expand :as ql]
-            [metabase.test.data :as data]
+  (:require [metabase
+             [query-processor-test :refer :all]
+             [util :as u]]
+            [metabase.models.field :refer [Field]]
+            [metabase.query-processor.middleware.expand :as ql]
+            [metabase.test
+             [data :as data]
+             [util :as tu]]
             [toucan.db :as db]))
 
 ;;; ------------------------------------------------------------ :details-only fields  ------------------------------------------------------------
@@ -12,16 +16,18 @@
   (-> (data/run-query venues
         (ql/order-by (ql/asc $id))
         (ql/limit 1))
-      :data :cols set))
+      tu/round-fingerprint-cols
+      :data
+      :cols
+      set))
 
 (expect-with-non-timeseries-dbs
   [(set (venues-cols))
-   #{(venues-col :category_id)
-     (venues-col :name)
-     (venues-col :latitude)
-     (venues-col :id)
-     (venues-col :longitude)
-     (assoc (venues-col :price) :visibility_type :details-only)}
+   (set (map (fn [col]
+               (if (= (data/id :venues :price) (u/get-id col))
+                 (assoc col :visibility_type :details-only)
+                 col))
+             (venues-cols)))
    (set (venues-cols))]
   [(get-col-names)
    (do (db/update! Field (data/id :venues :price), :visibility_type :details-only)
@@ -57,5 +63,6 @@
   (-> (data/run-query users
         (ql/order-by (ql/asc $id)))
       booleanize-native-form
+      tu/round-fingerprint-cols
       (update-in [:data :rows] (partial mapv (fn [[id name last-login]]
                                                [(int id) name])))))
diff --git a/test/metabase/query_processor_test/fields_test.clj b/test/metabase/query_processor_test/fields_test.clj
index 0994dac31e96d6e355b452f468a3da16387026c4..49846c308f078cf4b1fb6b0a101e8c6a9e8c0c91 100644
--- a/test/metabase/query_processor_test/fields_test.clj
+++ b/test/metabase/query_processor_test/fields_test.clj
@@ -1,7 +1,7 @@
 (ns metabase.query-processor-test.fields-test
   "Tests for the `:fields` clause."
   (:require [metabase.query-processor-test :refer :all]
-            [metabase.query-processor.expand :as ql]
+            [metabase.query-processor.middleware.expand :as ql]
             [metabase.test.data :as data]))
 
 ;; Test that we can restrict the Fields that get returned to the ones specified, and that results come back in the order of the IDs in the `fields` clause
diff --git a/test/metabase/query_processor_test/filter_test.clj b/test/metabase/query_processor_test/filter_test.clj
index 9cda4adc30fcea94e58eae4d562906779cf5c58d..95e5bdb58c4722c7c6d2c27dc981a86e78054407 100644
--- a/test/metabase/query_processor_test/filter_test.clj
+++ b/test/metabase/query_processor_test/filter_test.clj
@@ -1,7 +1,7 @@
 (ns metabase.query-processor-test.filter-test
   "Tests for the `:filter` clause."
   (:require [metabase.query-processor-test :refer :all]
-            [metabase.query-processor.expand :as ql]
+            [metabase.query-processor.middleware.expand :as ql]
             [metabase.test.data :as data]))
 
 ;;; ------------------------------------------------------------ "FILTER" CLAUSE ------------------------------------------------------------
diff --git a/test/metabase/query_processor_test/joins_test.clj b/test/metabase/query_processor_test/joins_test.clj
index f79c151f05a0e27a62bb6520c00ebdb372c29f6d..1ce40cf4c5c874cf84fb9ed833de3d614378da2a 100644
--- a/test/metabase/query_processor_test/joins_test.clj
+++ b/test/metabase/query_processor_test/joins_test.clj
@@ -1,7 +1,7 @@
 (ns metabase.query-processor-test.joins-test
   "Test for JOIN behavior."
   (:require [metabase.query-processor-test :refer :all]
-            [metabase.query-processor.expand :as ql]
+            [metabase.query-processor.middleware.expand :as ql]
             [metabase.test.data :as data]
             [metabase.test.data.datasets :as datasets]))
 
diff --git a/test/metabase/query_processor_test/nested_field_test.clj b/test/metabase/query_processor_test/nested_field_test.clj
index d6ba58e109631639aea0b3996fde4d1dc735106e..a77550db9401619b302f2187805b89d179cf28ad 100644
--- a/test/metabase/query_processor_test/nested_field_test.clj
+++ b/test/metabase/query_processor_test/nested_field_test.clj
@@ -1,7 +1,7 @@
 (ns metabase.query-processor-test.nested-field-test
   "Tests for nested field access."
   (:require [metabase.query-processor-test :refer :all]
-            [metabase.query-processor.expand :as ql]
+            [metabase.query-processor.middleware.expand :as ql]
             [metabase.test.data :as data]
             [metabase.test.data.datasets :as datasets]))
 
diff --git a/test/metabase/query_processor_test/order_by_test.clj b/test/metabase/query_processor_test/order_by_test.clj
index b73d08497a5f6e2058159b09cc8f374469522749..1271d122a88536b9c0334993b4608d1bd110a0bd 100644
--- a/test/metabase/query_processor_test/order_by_test.clj
+++ b/test/metabase/query_processor_test/order_by_test.clj
@@ -2,7 +2,7 @@
   "Tests for the `:order-by` clause."
   (:require [clojure.math.numeric-tower :as math]
             [metabase.query-processor-test :refer :all]
-            [metabase.query-processor.expand :as ql]
+            [metabase.query-processor.middleware.expand :as ql]
             [metabase.test.data :as data]
             [metabase.test.data.datasets :as datasets :refer [*engine*]]))
 
diff --git a/test/metabase/query_processor_test/page_test.clj b/test/metabase/query_processor_test/page_test.clj
index 4a6e3c44ee6eaf40a39e3a2a83c959b8aa5afe2e..014e755f939157bb752df45d1a818c80390a70f0 100644
--- a/test/metabase/query_processor_test/page_test.clj
+++ b/test/metabase/query_processor_test/page_test.clj
@@ -1,7 +1,7 @@
 (ns metabase.query-processor-test.page-test
   "Tests for the `:page` clause."
   (:require [metabase.query-processor-test :refer :all]
-            [metabase.query-processor.expand :as ql]
+            [metabase.query-processor.middleware.expand :as ql]
             [metabase.test.data :as data]))
 
 ;; Test that we can get "pages" of results.
diff --git a/test/metabase/query_processor_test/parameters_test.clj b/test/metabase/query_processor_test/parameters_test.clj
index d4ac9682b63eb4e7a96fc0f275b97eeac9ed6950..7ef8c17820a1e80403acc525b00e3626138bf0cb 100644
--- a/test/metabase/query_processor_test/parameters_test.clj
+++ b/test/metabase/query_processor_test/parameters_test.clj
@@ -1,9 +1,10 @@
 (ns metabase.query-processor-test.parameters_test
   "Tests for query parameters."
-  (:require [metabase
+  (:require [expectations :refer [expect]]
+            [metabase
              [query-processor :as qp]
              [query-processor-test :refer :all]]
-            [metabase.query-processor.expand :as ql]
+            [metabase.query-processor.middleware.expand :as ql]
             [metabase.test.data :as data]))
 
 (expect-with-non-timeseries-dbs
@@ -24,3 +25,18 @@
           outer-query (data/wrap-inner-query inner-query)
           outer-query (assoc outer-query :parameters [{:name "price", :type "category", :target ["field-id" (data/id :venues :price)], :value 4}])]
       (rows (qp/process-query outer-query)))))
+
+;; Make sure using commas in numeric params treats them as separate IDs (#5457)
+(expect
+  "SELECT * FROM USERS where id IN (1, 2, 3)"
+  (-> (qp/process-query
+        {:database   (data/id)
+         :type       "native"
+         :native     {:query         "SELECT * FROM USERS [[where id IN ({{ids_list}})]]"
+                      :template_tags {:ids_list {:name         "ids_list"
+                                                 :display_name "Ids list"
+                                                 :type         "number"}}}
+         :parameters [{:type   "category"
+                       :target ["variable" ["template-tag" "ids_list"]]
+                       :value  "1,2,3"}]})
+      :data :native_form :query))
diff --git a/test/metabase/query_processor_test/remapping_test.clj b/test/metabase/query_processor_test/remapping_test.clj
new file mode 100644
index 0000000000000000000000000000000000000000..cb1d0051c187bbcecbcc0662d70530186aec42da
--- /dev/null
+++ b/test/metabase/query_processor_test/remapping_test.clj
@@ -0,0 +1,80 @@
+(ns metabase.query-processor-test.remapping-test
+  "Tests for the remapping results"
+  (:require [metabase.query-processor.middleware.expand :as ql]
+            [metabase.query-processor-test :refer :all]
+            [metabase.test.data :as data]
+            [metabase.test.data.datasets :as datasets]
+            [metabase.test.util :as tu]))
+
+(tu/resolve-private-vars metabase.query-processor.middleware.add-dimension-projections create-remapped-col)
+
+(qp-expect-with-all-engines
+  {:rows  [["20th Century Cafe" 12 "Café Sweets"]
+           ["25°" 11 "Café"]
+           ["33 Taps" 7 "Beer Garden"]
+           ["800 Degrees Neapolitan Pizzeria" 58 "Ramen"]]
+   :columns [(data/format-name "name")
+             (data/format-name "category_id")
+             "Foo"]
+   :cols    [(venues-col :name)
+             (assoc (venues-col :category_id) :remapped_to "Foo")
+             (create-remapped-col "Foo" (data/format-name "category_id"))]
+   :native_form true}
+  (data/with-data
+    (data/create-venue-category-remapping "Foo")
+    (->> (data/run-query venues
+           (ql/fields $name $category_id)
+           (ql/order-by (ql/asc $name))
+           (ql/limit 4))
+         booleanize-native-form
+         (format-rows-by [str int str]))))
+
+(defn- select-columns
+  "Focuses the given resultset to columns that return true when passed
+  to `COLUMNS-PRED`. Typically this would be done as part of the
+  query, however there's a bug currently preventing that from working
+  when remapped. This allows the data compared to be smaller and avoid
+  that bug."
+  [columns-pred results]
+  (let [col-indexes (remove nil? (map-indexed (fn [idx col]
+                                                (when (columns-pred col)
+                                                  idx))
+                                              (get-in results [:data :columns])))]
+    (-> results
+        (update-in [:data :columns]
+                   (fn [rows]
+                     (filterv columns-pred rows)))
+        (update-in [:data :cols]
+                   (fn [rows]
+                     (filterv #(columns-pred (:name %)) rows)))
+        (update-in [:data :rows]
+                   (fn [rows]
+                     (map #(mapv % col-indexes) rows))))))
+
+(datasets/expect-with-engines (engines-that-support :foreign-keys)
+  {:rows   [["20th Century Cafe" 2 "Café"]
+            ["25°" 2 "Burger"]
+            ["33 Taps" 2 "Bar"]
+            ["800 Degrees Neapolitan Pizzeria" 2 "Pizza"]]
+   :columns [(:name (venues-col :name))
+             (:name (venues-col :price))
+             (data/format-name "name_2")]
+   :cols    [(venues-col :name)
+             (venues-col :price)
+             (assoc (categories-col :name)
+               :fk_field_id (data/id :venues :category_id)
+               :display_name "Foo"
+               :name (data/format-name "name_2")
+               :remapped_from (data/format-name "category_id")
+               :schema_name nil)]
+   :native_form true}
+  (data/with-data
+    (data/create-venue-category-fk-remapping "Foo")
+    (->> (data/run-query venues
+           (ql/order-by (ql/asc $name))
+           (ql/limit 4))
+         booleanize-native-form
+         (format-rows-by [int str int double double int str])
+         (select-columns (set (map data/format-name ["name" "price" "name_2"])))
+         tu/round-fingerprint-cols
+         :data)))
diff --git a/test/metabase/query_processor_test/unix_timestamp_test.clj b/test/metabase/query_processor_test/unix_timestamp_test.clj
index fab6284521ee61a44ee24b72c41d37071f0526f4..0c56f629b06c34440865b3252bbdfd8f30737586 100644
--- a/test/metabase/query_processor_test/unix_timestamp_test.clj
+++ b/test/metabase/query_processor_test/unix_timestamp_test.clj
@@ -1,7 +1,7 @@
 (ns metabase.query-processor-test.unix-timestamp-test
   "Tests for UNIX timestamp support."
   (:require [metabase.query-processor-test :refer :all]
-            [metabase.query-processor.expand :as ql]
+            [metabase.query-processor.middleware.expand :as ql]
             [metabase.test.data :as data]
             [metabase.test.data
              [datasets :as datasets :refer [*driver* *engine*]]
diff --git a/test/metabase/sync/analyze/fingerprint_test.clj b/test/metabase/sync/analyze/fingerprint_test.clj
new file mode 100644
index 0000000000000000000000000000000000000000..31fd51812e95c7adb572f8a6f3e21eea92756086
--- /dev/null
+++ b/test/metabase/sync/analyze/fingerprint_test.clj
@@ -0,0 +1,29 @@
+(ns metabase.sync.analyze.fingerprint-test
+  "Basic tests to make sure the fingerprint generatation code is doing something that makes sense."
+  (:require [expectations :refer :all]
+            [metabase.models.field :refer [Field]]
+            [metabase.sync.analyze.fingerprint :as fingerprint]
+            [metabase.test.data :as data]))
+
+;; basic test for a numeric Field
+(expect
+  {:global {:distinct-count 4}
+   :type   {:type/Number {:min 1, :max 4, :avg 2.03}}}
+  (#'fingerprint/fingerprint (Field (data/id :venues :price))))
+
+;; basic test for a Text Field
+(expect
+  {:global {:distinct-count 100}
+   :type   {:type/Text {:percent-json 0.0, :percent-url 0.0, :percent-email 0.0, :average-length 15.63}}}
+  (#'fingerprint/fingerprint (Field (data/id :venues :name))))
+
+;; a non-integer numeric Field
+(expect
+  {:global {:distinct-count 94}
+   :type   {:type/Number {:min 10.0646, :max 40.7794, :avg 35.50589199999998}}}
+  (#'fingerprint/fingerprint (Field (data/id :venues :latitude))))
+
+;; a datetime field
+(expect
+  {:global {:distinct-count 618}}
+  (#'fingerprint/fingerprint (Field (data/id :checkins :date))))
diff --git a/test/metabase/sync/analyze/special_types/values_test.clj b/test/metabase/sync/analyze/special_types/values_test.clj
new file mode 100644
index 0000000000000000000000000000000000000000..6eb021652a85b4776c628c3f4d91538d67d520c9
--- /dev/null
+++ b/test/metabase/sync/analyze/special_types/values_test.clj
@@ -0,0 +1,13 @@
+(ns metabase.sync.analyze.special-types.values-test
+  (:require [metabase.models.field :refer [Field]]
+            [metabase.query-processor-test :as qp-test]
+            [metabase.sync.analyze.fingerprint :as fingerprint]
+            [metabase.test.data :as data]
+            [metabase.test.data.datasets :as datasets]))
+
+;; field-avg-length
+;; This test won't work for Druid because it doesn't have a 'venues' Table. TODO - Add a test for Druid as well
+(datasets/expect-with-engines qp-test/non-timeseries-engines
+  16
+  (Math/round (get-in (#'fingerprint/fingerprint (Field (data/id :venues :name)))
+                      [:type :type/Text :average-length])))
diff --git a/test/metabase/sync/analyze/table_row_count_test.clj b/test/metabase/sync/analyze/table_row_count_test.clj
new file mode 100644
index 0000000000000000000000000000000000000000..3c2b3decbe72750f5119dfc21ca2fb60c377831c
--- /dev/null
+++ b/test/metabase/sync/analyze/table_row_count_test.clj
@@ -0,0 +1,22 @@
+(ns metabase.sync.analyze.table-row-count-test
+  "Tests for the sync logic that updates a Table's row count."
+  (:require [metabase
+             [query-processor-test :as qp-test]
+             [util :as u]]
+            [metabase.models.table :refer [Table]]
+            [metabase.sync.analyze.table-row-count :as table-row-count]
+            [metabase.test.data :as data]
+            [toucan.db :as db]
+            [toucan.util.test :as tt]
+            [metabase.test.data.datasets :as datasets]))
+
+;; test that syncing table row counts works
+;; TODO - write a Druid version of this test. Works slightly differently since Druid doesn't have a 'venues' table
+;; TODO - not sure why this doesn't work on Oracle. Seems to be an issue with the test rather than with the Oracle driver
+(datasets/expect-with-engines (disj qp-test/non-timeseries-engines :oracle)
+  100
+  (tt/with-temp Table [venues-copy (let [venues-table (Table (data/id :venues))]
+                                     (assoc (select-keys venues-table [:schema :name :db_id])
+                                       :rows 0))]
+    (table-row-count/update-row-count! venues-copy)
+    (db/select-one-field :rows Table :id (u/get-id venues-copy))))
diff --git a/test/metabase/sync/analyze_test.clj b/test/metabase/sync/analyze_test.clj
new file mode 100644
index 0000000000000000000000000000000000000000..255b3cb61a582bd9e0af20e5fe55a3c2a0e4eb19
--- /dev/null
+++ b/test/metabase/sync/analyze_test.clj
@@ -0,0 +1,55 @@
+(ns metabase.sync.analyze-test
+  (:require [expectations :refer :all]
+            [metabase.models
+             [database :refer [Database]]
+             [field :refer [Field]]
+             [table :refer [Table]]]
+            [metabase.sync
+             [analyze :as analyze]
+             [sync-metadata :as sync-metadata]]
+            [metabase.test.data :as data]
+            [metabase.util :as u]
+            [toucan.db :as db]
+            [toucan.util.test :as tt]))
+
+(def ^:private fake-analysis-completion-date
+  (u/->Timestamp "2017-08-01"))
+
+;; Check that Fields do *not* get analyzed if they're not newly created
+(expect
+  #{{:name "LONGITUDE",   :special_type nil,      :last_analyzed fake-analysis-completion-date}
+    {:name "CATEGORY_ID", :special_type nil,      :last_analyzed fake-analysis-completion-date}
+    {:name "PRICE",       :special_type nil,      :last_analyzed fake-analysis-completion-date}
+    {:name "LATITUDE",    :special_type nil,      :last_analyzed fake-analysis-completion-date}
+    {:name "NAME",        :special_type nil,      :last_analyzed fake-analysis-completion-date}
+    {:name "ID",          :special_type :type/PK, :last_analyzed fake-analysis-completion-date}} ; PK is ok because it gets marked as part of metadata sync
+  (tt/with-temp* [Database [db    {:engine "h2", :details (:details (data/db))}]
+                  Table    [table {:name "VENUES", :db_id (u/get-id db)}]]
+    ;; sync the metadata, but DON't do analysis YET
+    (sync-metadata/sync-table-metadata! table)
+    ;; now mark all the Tables as analyzed so they won't be subject to analysis
+    (db/update-where! Field {:table_id (u/get-id table)}
+      :last_analyzed fake-analysis-completion-date)
+    ;; ok, NOW run the analysis process
+    (analyze/analyze-table! table)
+    ;; check and make sure all the Fields don't have special types and their last_analyzed date didn't change
+    (set (for [field (db/select [Field :name :special_type :last_analyzed] :table_id (u/get-id table))]
+           (into {} field)))))
+
+;; ...but they *SHOULD* get analyzed if they ARE newly created
+(expect
+  #{{:name "LATITUDE",    :special_type :type/Latitude,  :last_analyzed true}
+    {:name "ID",          :special_type :type/PK,        :last_analyzed true}
+    {:name "PRICE",       :special_type :type/Category,  :last_analyzed true}
+    {:name "LONGITUDE",   :special_type :type/Longitude, :last_analyzed true}
+    {:name "CATEGORY_ID", :special_type :type/Category,  :last_analyzed true}
+    {:name "NAME",        :special_type :type/Name,      :last_analyzed true}}
+  (tt/with-temp* [Database [db    {:engine "h2", :details (:details (data/db))}]
+                  Table    [table {:name "VENUES", :db_id (u/get-id db)}]]
+    ;; sync the metadata, but DON't do analysis YET
+    (sync-metadata/sync-table-metadata! table)
+    ;; ok, NOW run the analysis process
+    (analyze/analyze-table! table)
+    ;; fields *SHOULD* have special types now
+    (set (for [field (db/select [Field :name :special_type :last_analyzed] :table_id (u/get-id table))]
+           (into {} (update field :last_analyzed boolean))))))
diff --git a/test/metabase/sync/sync_metadata/metabase_metadata_test.clj b/test/metabase/sync/sync_metadata/metabase_metadata_test.clj
new file mode 100644
index 0000000000000000000000000000000000000000..033159c5d35ad86de7cd9b8cc33f05a4dcb85ded
--- /dev/null
+++ b/test/metabase/sync/sync_metadata/metabase_metadata_test.clj
@@ -0,0 +1,47 @@
+(ns metabase.sync.sync-metadata.metabase-metadata-test
+  "Tests for the logic that syncs the `_metabase_metadata` Table."
+  (:require [expectations :refer :all]
+            [metabase.models
+             [database :refer [Database]]
+             [table :refer [Table]]]
+            [metabase.sync.sync-metadata.metabase-metadata :as metabase-metadata]
+            [metabase.test.util :as tu]
+            [metabase.util :as u]
+            [toucan
+             [db :as db]
+             [hydrate :refer [hydrate]]]
+            [toucan.util.test :as tt]
+            [metabase.models.field :refer [Field]]))
+
+;; Test that the `_metabase_metadata` table can be used to populate values for things like descriptions
+(defn- get-table-and-fields-descriptions [table-or-id]
+  (-> (db/select-one [Table :id :name :description], :id (u/get-id table-or-id))
+      (hydrate :fields)
+      (update :fields #(for [field %]
+                         (select-keys field [:name :description])))
+      tu/boolean-ids-and-timestamps))
+
+(expect
+  [{:name        "movies"
+    :description nil
+    :id          true
+    :fields      [{:name "filming", :description nil}]}
+   {:name        "movies"
+    :description "A cinematic adventure."
+    :id          true
+    :fields      [{:name "filming", :description "If the movie is currently being filmed."}]}]
+  (tt/with-temp* [Database [db {:engine :moviedb}]]
+    ;; manually add in the movies table
+    (let [table (db/insert! Table
+                  :db_id  (u/get-id db)
+                  :name   "movies"
+                  :active true)]
+      (db/insert! Field
+        :base_type :type/Boolean
+        :table_id (u/get-id table)
+        :name     "filming")
+      ;; here we go
+      [(get-table-and-fields-descriptions table)
+       (do
+         (metabase-metadata/sync-metabase-metadata! db)
+         (get-table-and-fields-descriptions table))])))
diff --git a/test/metabase/sync/sync_metadata/tables_test.clj b/test/metabase/sync/sync_metadata/tables_test.clj
new file mode 100644
index 0000000000000000000000000000000000000000..51d73cd46d78ee784f199a525bd2bfeebcb3c286
--- /dev/null
+++ b/test/metabase/sync/sync_metadata/tables_test.clj
@@ -0,0 +1,29 @@
+(ns metabase.sync.sync-metadata.tables-test
+  "Test for the logic that syncs Table models with the metadata fetched from a DB."
+  (:require [expectations :refer :all]
+            [metabase.models.table :refer [Table]]
+            [metabase.test.data :as data]
+            [metabase.test.data.interface :as i]
+            [toucan.db :as db]))
+
+(i/def-database-definition ^:const ^:private db-with-some-cruft
+  ["acquired_toucans"
+   [{:field-name "species",              :base-type :type/Text}
+    {:field-name "cam_has_acquired_one", :base-type :type/Boolean}]
+   [["Toco"               false]
+    ["Chestnut-Mandibled" true]
+    ["Keel-billed"        false]
+    ["Channel-billed"     false]]]
+  ["south_migrationhistory"
+   [{:field-name "app_name",  :base-type :type/Text}
+    {:field-name "migration", :base-type :type/Text}]
+   [["main" "0001_initial"]
+    ["main" "0002_add_toucans"]]])
+
+;; south_migrationhistory, being a CRUFTY table, should still be synced, but marked as such
+(expect
+  #{{:name "SOUTH_MIGRATIONHISTORY", :visibility_type :cruft}
+    {:name "ACQUIRED_TOUCANS",       :visibility_type nil}}
+  (data/dataset metabase.sync.sync-metadata.tables-test/db-with-some-cruft
+    (set (for [table (db/select [Table :name :visibility_type], :db_id (data/id))]
+           (into {} table)))))
diff --git a/test/metabase/sync_database/analyze_test.clj b/test/metabase/sync_database/analyze_test.clj
index fe57c4d943a98aa1e62861684171ef3525b9b9fe..6017c095e7e6fd86f5e16e70310507b0d90854d0 100644
--- a/test/metabase/sync_database/analyze_test.clj
+++ b/test/metabase/sync_database/analyze_test.clj
@@ -1,4 +1,5 @@
 (ns metabase.sync-database.analyze-test
+  ;; TODO - this namespace follows the old pattern of sync namespaces. Tests should be moved to appropriate new homes at some point
   (:require [clojure.string :as str]
             [expectations :refer :all]
             [metabase
@@ -6,9 +7,13 @@
              [util :as u]]
             [metabase.db.metadata-queries :as metadata-queries]
             [metabase.models
-             [field :refer [Field]]
+             [database :refer [Database]]
+             [field :refer [Field] :as field]
+             [field-values :as field-values]
              [table :as table :refer [Table]]]
-            [metabase.sync-database.analyze :refer :all]
+            [metabase.sync.analyze :as analyze]
+            [metabase.sync.analyze.fingerprint :as fingerprint]
+            [metabase.sync.analyze.classifiers.text-fingerprint :as classify-text-fingerprint]
             [metabase.test
              [data :as data]
              [util :as tu]]
@@ -16,27 +21,26 @@
             [toucan.db :as db]
             [toucan.util.test :as tt]))
 
-;; test:cardinality-and-extract-field-values
+;; distinct-values
 ;; (#2332) check that if field values are long we skip over them
+;; TODO - the next two should probably be moved into field-values-test
 (expect
-  {:values nil}
-  (with-redefs-fn {#'metadata-queries/field-distinct-values (constantly [(str/join (repeat 50000 "A"))])}
-    #(test:cardinality-and-extract-field-values {} {})))
+  nil
+  (with-redefs [metadata-queries/field-distinct-values (constantly [(str/join (repeat 50000 "A"))])]
+    (#'field-values/distinct-values {})))
 
 (expect
-  {:values       [1 2 3 4]
-   :special-type :type/Category}
-  (with-redefs-fn {#'metadata-queries/field-distinct-values (constantly [1 2 3 4])}
-    #(test:cardinality-and-extract-field-values {} {})))
+  [1 2 3 4]
+  (with-redefs [metadata-queries/field-distinct-values (constantly [1 2 3 4])]
+    (#'field-values/distinct-values {})))
 
 
 ;;; ## mark-json-field!
 
-(tu/resolve-private-vars metabase.sync-database.analyze
-  values-are-valid-json? values-are-valid-emails?)
-
-(def ^:const ^:private fake-values-seq-json
-  "A sequence of values that should be marked is valid JSON.")
+(defn- values-are-valid-json? [values]
+  (let [field (field/map->FieldInstance {:base_type :type/Text})]
+    (= (:special_type (classify-text-fingerprint/infer-special-type field (#'fingerprint/fingerprint field values)))
+       :type/SerializedJSON)))
 
 ;; When all the values are valid JSON dicts they're valid JSON
 (expect
@@ -56,16 +60,6 @@
                            "[1, 2, 3, 4]"
                            "[1, 2, 3, 4]"]))
 
-;; If the values have some valid JSON dicts but is mostly null, it's still valid JSON
-(expect
-  (values-are-valid-json? ["{\"this\":\"is\",\"valid\":\"json\"}"
-                           nil
-                           nil]))
-
-;; If every value is nil then the values should not be considered valid JSON
-(expect false
-  (values-are-valid-json? [nil nil nil]))
-
 ;; Check that things that aren't dictionaries or arrays aren't marked as JSON
 (expect false (values-are-valid-json? ["\"A JSON string should not cause a Field to be marked as JSON\""]))
 (expect false (values-are-valid-json? ["100"]))
@@ -73,9 +67,14 @@
 (expect false (values-are-valid-json? ["false"]))
 
 ;; Check that things that are valid emails are marked as Emails
+
+(defn- values-are-valid-emails? [values]
+  (let [field (field/map->FieldInstance {:base_type :type/Text})]
+    (= (:special_type (classify-text-fingerprint/infer-special-type field (#'fingerprint/fingerprint field values)))
+       :type/Email)))
+
 (expect true (values-are-valid-emails? ["helper@metabase.com"]))
 (expect true (values-are-valid-emails? ["helper@metabase.com", "someone@here.com", "help@nope.com"]))
-(expect true (values-are-valid-emails? ["helper@metabase.com", nil, "help@nope.com"]))
 
 (expect false (values-are-valid-emails? ["helper@metabase.com", "1111IsNot!An....email", "help@nope.com"]))
 (expect false (values-are-valid-emails? ["\"A string should not cause a Field to be marked as email\""]))
@@ -83,11 +82,15 @@
 (expect false (values-are-valid-emails? ["true"]))
 (expect false (values-are-valid-emails? ["false"]))
 
-;; Tests to avoid analyzing hidden tables
+
+;;; +------------------------------------------------------------------------------------------------------------------------+
+;;; |                                         Tests to avoid analyzing hidden tables                                         |
+;;; +------------------------------------------------------------------------------------------------------------------------+
+
 (defn- unanalyzed-fields-count [table]
-  (assert (pos? ;; don't let ourselves be fooled if the test passes because the table is
-           ;; totally broken or has no fields. Make sure we actually test something
-           (db/count Field :table_id (u/get-id table))))
+  ;; don't let ourselves be fooled if the test passes because the table is
+  ;; totally broken or has no fields. Make sure we actually test something
+  (assert (pos? (db/count Field :table_id (u/get-id table))))
   (db/count Field :last_analyzed nil, :table_id (u/get-id table)))
 
 (defn- latest-sync-time [table]
@@ -96,24 +99,35 @@
     :table_id      (u/get-id table)
     {:order-by [[:last_analyzed :desc]]}))
 
-(defn- set-table-visibility-type! [table visibility-type]
+(defn- set-table-visibility-type!
+  "Change the VISIBILITY-TYPE of TABLE via an API call.
+   (This is done via the API so we can see which, if any, side effects (e.g. analysis) get triggered.)"
+  [table visibility-type]
   ((user->client :crowberto) :put 200 (format "table/%d" (:id table)) {:display_name    "hiddentable"
                                                                        :entity_type     "person"
                                                                        :visibility_type visibility-type
                                                                        :description     "What a nice table!"}))
 
-(defn- api-sync! [table]
+(defn- api-sync!
+  "Trigger a sync of TABLE via the API."
+  [table]
   ((user->client :crowberto) :post 200 (format "database/%d/sync" (:db_id table))))
 
-(defn- analyze! [table]
-  (let [db-id (:db_id table)]
-    (analyze-data-shape-for-tables! (driver/database-id->driver db-id) {:id db-id})))
+;; use these functions to create fake Tables & Fields that are actually backed by something real in the database.
+;; Otherwise when we go to resync them the logic will figure out Table/Field doesn't exist and mark it as inactive
+(defn- fake-table [& {:as additional-options}]
+  (merge {:rows 15, :db_id (data/id), :name "VENUES"}
+         additional-options))
+
+(defn- fake-field [table & {:as additional-options}]
+  (merge {:table_id (u/get-id table), :name "NAME"}
+         additional-options))
 
 ;; expect all the kinds of hidden tables to stay un-analyzed through transitions and repeated syncing
 (expect
   1
-  (tt/with-temp* [Table [table {:rows 15}]
-                  Field [field {:table_id (:id table)}]]
+  (tt/with-temp* [Table [table (fake-table)]
+                  Field [field (fake-field table)]]
     (set-table-visibility-type! table "hidden")
     (api-sync! table)
     (set-table-visibility-type! table "cruft")
@@ -127,27 +141,32 @@
     (unanalyzed-fields-count table)))
 
 ;; same test not coming through the api
+(defn- analyze-table! [table]
+  ;; we're calling `analyze-db!` instead of `analyze-table!` because the latter doesn't care if you try to sync a hidden table
+  ;; and will allow that. TODO - Does that behavior make sense?
+  (analyze/analyze-db! (Database (:db_id table))))
+
 (expect
   1
-  (tt/with-temp* [Table [table {:rows 15}]
-                  Field [field {:table_id (:id table)}]]
+  (tt/with-temp* [Table [table (fake-table)]
+                  Field [field (fake-field table)]]
     (set-table-visibility-type! table "hidden")
-    (analyze! table)
+    (analyze-table! table)
     (set-table-visibility-type! table "cruft")
     (set-table-visibility-type! table "cruft")
-    (analyze! table)
+    (analyze-table! table)
     (set-table-visibility-type! table "technical")
-    (analyze! table)
+    (analyze-table! table)
     (set-table-visibility-type! table "technical")
-    (analyze! table)
-    (analyze! table)
+    (analyze-table! table)
+    (analyze-table! table)
     (unanalyzed-fields-count table)))
 
 ;; un-hiding a table should cause it to be analyzed
 (expect
   0
-  (tt/with-temp* [Table [table {:rows 15}]
-                  Field [field {:table_id (:id table)}]]
+  (tt/with-temp* [Table [table (fake-table)]
+                  Field [field (fake-field table)]]
     (set-table-visibility-type! table "hidden")
     (set-table-visibility-type! table nil)
     (unanalyzed-fields-count table)))
@@ -155,8 +174,8 @@
 ;; re-hiding a table should not cause it to be analyzed
 (expect
   ;; create an initially hidden table
-  (tt/with-temp* [Table [table {:rows 15, :visibility_type "hidden"}]
-                  Field [field {:table_id (:id table)}]]
+  (tt/with-temp* [Table [table (fake-table :visibility_type "hidden")]
+                  Field [field (fake-field table)]]
     ;; switch the table to visible (triggering a sync) and get the last sync time
     (let [last-sync-time (do (set-table-visibility-type! table nil)
                              (latest-sync-time table))]
diff --git a/test/metabase/sync_database/introspect_test.clj b/test/metabase/sync_database/introspect_test.clj
deleted file mode 100644
index 826c63d5e67e8589365999e699689fb8a7398042..0000000000000000000000000000000000000000
--- a/test/metabase/sync_database/introspect_test.clj
+++ /dev/null
@@ -1,268 +0,0 @@
-(ns metabase.sync-database.introspect-test
-  (:require [expectations :refer :all]
-            [metabase.models
-             [database :refer [Database]]
-             [raw-column :refer [RawColumn]]
-             [raw-table :refer [RawTable]]]
-            [metabase.sync-database.introspect :as introspect]
-            [metabase.test.mock.moviedb :as moviedb]
-            [metabase.test.util :as tu]
-            [toucan
-             [db :as db]
-             [hydrate :refer [hydrate]]]
-            [toucan.util.test :as tt]))
-
-(tu/resolve-private-vars metabase.sync-database.introspect
-  save-all-table-columns! save-all-table-fks! create-raw-table! update-raw-table! disable-raw-tables!)
-
-(defn get-tables [database-id]
-  (->> (hydrate (db/select RawTable, :database_id database-id, {:order-by [:id]}) :columns)
-       (mapv tu/boolean-ids-and-timestamps)))
-
-(defn get-table [table-id]
-  (->> (hydrate (RawTable :raw_table_id table-id) :columns)
-       (mapv tu/boolean-ids-and-timestamps)))
-
-(def ^:private ^:const field-defaults
-  {:id                  true
-   :raw_table_id        true
-   :active              true
-   :column_type         nil
-   :is_pk               false
-   :fk_target_column_id false
-   :details             {}
-   :created_at          true
-   :updated_at          true})
-
-;; save-all-table-fks
-;; test case of multi schema with repeating table names
-(expect
-  [[(merge field-defaults {:name "id"})
-    (merge field-defaults {:name "user_id"})]
-   [(merge field-defaults {:name "id"})
-    (merge field-defaults {:name "user_id", :fk_target_column_id true})]
-   [(merge field-defaults {:name "id"})
-    (merge field-defaults {:name "user_id"})]
-   [(merge field-defaults {:name "id"})
-    (merge field-defaults {:name "user_id", :fk_target_column_id true})]]
-  (tt/with-temp* [Database  [{database-id :id}]
-                  RawTable  [{raw-table-id1 :id, :as table}  {:database_id database-id, :schema "customer1", :name "photos"}]
-                  RawColumn [_                               {:raw_table_id raw-table-id1, :name "id"}]
-                  RawColumn [_                               {:raw_table_id raw-table-id1, :name "user_id"}]
-                  RawTable  [{raw-table-id2 :id, :as table1} {:database_id database-id, :schema "customer2", :name "photos"}]
-                  RawColumn [_                               {:raw_table_id raw-table-id2, :name "id"}]
-                  RawColumn [_                               {:raw_table_id raw-table-id2, :name "user_id"}]
-                  RawTable  [{raw-table-id3 :id, :as table2} {:database_id database-id, :schema nil, :name "users"}]
-                  RawColumn [_                               {:raw_table_id raw-table-id3, :name "id"}]]
-    (let [get-columns #(->> (db/select RawColumn, :raw_table_id raw-table-id1, {:order-by [:id]})
-                            (mapv tu/boolean-ids-and-timestamps))]
-      ;; original list should not have any fks
-      [(get-columns)
-       ;; now add a fk
-       (do
-         (save-all-table-fks! table [{:fk-column-name   "user_id"
-                                      :dest-table       {:schema nil, :name "users"}
-                                      :dest-column-name "id"}])
-         (get-columns))
-       ;; now remove the fk
-       (do
-         (save-all-table-fks! table [])
-         (get-columns))
-       ;; now add back a different fk
-       (do
-         (save-all-table-fks! table [{:fk-column-name   "user_id"
-                                      :dest-table       {:schema "customer1", :name "photos"}
-                                      :dest-column-name "id"}])
-         (get-columns))])))
-
-;; save-all-table-columns
-(expect
-  [[]
-   [(merge field-defaults
-           {:name    "beak_size"
-            :is_pk   true
-            :details {:inches 7, :special-type "type/Category", :base-type "type/Integer"}})]
-   [(merge field-defaults
-           {:name    "beak_size"
-            :details {:inches 8, :base-type "type/Integer"}})
-    (merge field-defaults
-           {:name    "num_feathers"
-            :details {:count 10000, :base-type "type/Integer"}})]
-   [(merge field-defaults
-           {:name    "beak_size"
-            :details {:inches 8, :base-type "type/Integer"}
-            :active  false})
-    (merge field-defaults
-           {:name    "num_feathers"
-            :details {:count 12000, :base-type "type/Integer"}})]
-   [(merge field-defaults
-           {:name    "beak_size"
-            :details {:inches 8, :base-type "type/Integer"}})
-    (merge field-defaults
-           {:name    "num_feathers"
-            :details {:count 12000, :base-type "type/Integer"}})]]
-  (tt/with-temp* [Database [{database-id :id}]
-                  RawTable [{raw-table-id :id, :as table} {:database_id database-id}]]
-    (let [get-columns #(->> (db/select RawColumn, :raw_table_id raw-table-id, {:order-by [:id]})
-                            (mapv tu/boolean-ids-and-timestamps))]
-      ;; original list should be empty
-      [(get-columns)
-       ;; now add a column
-       (do
-         (save-all-table-columns! table [{:name "beak_size", :base-type :type/Integer, :details {:inches 7}, :pk? true, :special-type "type/Category"}])
-         (get-columns))
-       ;; now add another column and modify the first
-       (do
-         (save-all-table-columns! table [{:name "beak_size", :base-type :type/Integer, :details {:inches 8}}
-                                         {:name "num_feathers", :base-type :type/Integer, :details {:count 10000}}])
-         (get-columns))
-       ;; now remove the first column
-       (do
-         (save-all-table-columns! table [{:name "num_feathers", :base-type :type/Integer, :details {:count 12000}}])
-         (get-columns))
-       ;; lastly, resurrect the first column (this ensures uniqueness by name)
-       (do
-         (save-all-table-columns! table [{:name "beak_size", :base-type :type/Integer, :details {:inches 8}}
-                                         {:name "num_feathers", :base-type :type/Integer, :details {:count 12000}}])
-         (get-columns))])))
-
-;; create-raw-table
-
-(def ^:private ^:const table-defaults
-  {:id          true
-   :database_id true
-   :active      true
-   :schema      nil
-   :columns     []
-   :details     {}
-   :created_at  true
-   :updated_at  true})
-
-
-(expect
-  [[]
-   [(merge table-defaults
-           {:name    "users"
-            :details {:a "b"}})]
-   [(merge table-defaults
-           {:name    "users"
-            :details {:a "b"}})
-    (merge table-defaults
-           {:schema  "aviary"
-            :name    "toucanery"
-            :details {:owner "Cam"}
-            :columns [(merge field-defaults
-                             {:name    "beak_size"
-                              :is_pk   true
-                              :details {:inches 7, :base-type "type/Integer"}})]})]]
-  (tt/with-temp* [Database [{database-id :id, :as db}]]
-    [(get-tables database-id)
-     ;; now add a table
-     (do
-       (create-raw-table! database-id {:schema  nil
-                                       :name    "users"
-                                       :details {:a "b"}
-                                       :fields  []})
-       (get-tables database-id))
-     ;; now add another table, this time with a couple columns and some fks
-     (do
-       (create-raw-table! database-id {:schema  "aviary"
-                                       :name    "toucanery"
-                                       :details {:owner "Cam"}
-                                       :fields  [{:name      "beak_size"
-                                                  :base-type :type/Integer
-                                                  :pk?       true
-                                                  :details   {:inches 7}}]})
-       (get-tables database-id))]))
-
-
-;; update-raw-table
-(expect
-  [[(merge table-defaults
-           {:schema  "aviary"
-            :name    "toucanery"
-            :details {:owner "Cam"}})]
-   [(merge table-defaults
-           {:schema  "aviary"
-            :name    "toucanery"
-            :details {:owner "Cam", :sqft 10000}
-            :columns [(merge field-defaults
-                             {:name    "beak_size"
-                              :is_pk   true
-                              :details {:inches 7, :base-type "type/Integer"}})]})]]
-  (tt/with-temp* [Database [{database-id :id, :as db}]
-                  RawTable [table {:database_id database-id
-                                   :schema      "aviary"
-                                   :name        "toucanery"
-                                   :details     {:owner "Cam"}}]]
-    [(get-tables database-id)
-     ;; now update the table
-     (do
-       (update-raw-table! table {:schema  "aviary"
-                                 :name    "toucanery"
-                                 :details {:owner "Cam", :sqft 10000}
-                                 :fields [{:name      "beak_size"
-                                           :base-type :type/Integer
-                                           :pk?       true
-                                           :details   {:inches 7}}]})
-       (get-tables database-id))]))
-
-
-;; disable-raw-tables
-(expect
-  [[(merge table-defaults
-           {:schema  "a"
-            :name    "1"
-            :columns [(merge field-defaults {:name "size"})]})
-    (merge table-defaults
-           {:schema  "a"
-            :name    "2"
-            :columns [(merge field-defaults {:name "beak_size", :fk_target_column_id true})]})]
-   [(merge table-defaults
-           {:schema  "a"
-            :name    "1"
-            :columns [(merge field-defaults {:active false, :name "size"})]
-            :active  false})
-    (merge table-defaults
-           {:schema  "a"
-            :name    "2"
-            :columns [(merge field-defaults {:active false, :name "beak_size"})]
-            :active  false})]]
-  (tt/with-temp* [Database  [{database-id :id, :as db}]
-                  RawTable  [t1 {:database_id database-id, :schema "a", :name "1"}]
-                  RawColumn [c1 {:raw_table_id (:id t1), :name "size"}]
-                  RawTable  [t2 {:database_id database-id, :schema "a", :name "2"}]
-                  RawColumn [c2 {:raw_table_id (:id t2), :name "beak_size", :fk_target_column_id (:id c1)}]]
-    [(get-tables database-id)
-     (do
-       (disable-raw-tables! [(:id t1) (:id t2)])
-       (get-tables database-id))]))
-
-
-;;; introspect-database-and-update-raw-tables!
-(expect
-  [[]
-   moviedb/moviedb-raw-tables
-   moviedb/moviedb-raw-tables
-   (conj (vec (drop-last moviedb/moviedb-raw-tables))
-         (-> (last moviedb/moviedb-raw-tables)
-             (assoc :active false)
-             (update :columns (fn [columns]
-                                (for [column columns]
-                                  (assoc column
-                                    :active              false
-                                    :fk_target_column_id false))))))]
-  (tt/with-temp* [Database [{database-id :id, :as db} {:engine :moviedb}]]
-    [(get-tables database-id)
-     ;; first sync should add all the tables, fields, etc
-     (do
-       (introspect/introspect-database-and-update-raw-tables! (moviedb/->MovieDbDriver) db)
-       (get-tables database-id))
-     ;; run the sync a second time to see how we respond to repeat syncing
-     (do
-       (introspect/introspect-database-and-update-raw-tables! (moviedb/->MovieDbDriver) db)
-       (get-tables database-id))
-     ;; one more time, but this time we'll remove a table and make sure that's handled properly
-     (do
-       (introspect/introspect-database-and-update-raw-tables! (moviedb/->MovieDbDriver) (assoc db :exclude-tables #{"roles"}))
-       (get-tables database-id))]))
diff --git a/test/metabase/sync_database/sync_dynamic_test.clj b/test/metabase/sync_database/sync_dynamic_test.clj
index 8876096b732e6b1be70e08a65117ed808bc2d6b7..a7cfc25ec38aae5a3e3a4311b8456e472782b6fb 100644
--- a/test/metabase/sync_database/sync_dynamic_test.clj
+++ b/test/metabase/sync_database/sync_dynamic_test.clj
@@ -1,13 +1,15 @@
 (ns metabase.sync-database.sync-dynamic-test
+  "Tests for databases with a so-called 'dynamic' schema, i.e. one that is not hard-coded somewhere.
+   A Mongo database is an example of such a DB. "
   (:require [expectations :refer :all]
+            [metabase
+             [sync :as sync]
+             [util :as u]]
             [metabase.models
              [database :refer [Database]]
              [field :refer [Field]]
-             [raw-table :refer [RawTable]]
              [table :refer [Table]]]
-            [metabase.sync-database
-             [introspect :as introspect]
-             [sync-dynamic :refer :all]]
+            [metabase.sync.sync-metadata :as sync-metadata]
             [metabase.test.mock.toucanery :as toucanery]
             [metabase.test.util :as tu]
             [toucan
@@ -15,190 +17,128 @@
              [hydrate :refer [hydrate]]]
             [toucan.util.test :as tt]))
 
-(tu/resolve-private-vars metabase.sync-database.sync-dynamic
-  save-table-fields!)
+(defn- remove-nonsense
+  "Remove fields that aren't really relevant in the output for TABLES and their FIELDS.
+   Done for the sake of making debugging some of the tests below easier."
+  [tables]
+  (for [table tables]
+    (-> (u/select-non-nil-keys table [:schema :name :raw_table_id :fields])
+        (update  :fields (fn [fields]
+                           (for [field fields]
+                             (u/select-non-nil-keys field [:table_id :name :fk_target_field_id :parent_id :base_type :special_type])))))))
 
-(defn- get-tables [database-id]
-  (->> (hydrate (db/select Table, :db_id database-id, {:order-by [:id]}) :fields)
+(defn- get-tables [database-or-id]
+  (->> (hydrate (db/select Table, :db_id (u/get-id database-or-id), {:order-by [:id]}) :fields)
        (mapv tu/boolean-ids-and-timestamps)))
 
-(def ^:private ^:const field-defaults
-  {:id                 true
-   :table_id           true
-   :raw_column_id      false
-   :description        nil
-   :caveats            nil
-   :points_of_interest nil
-   :visibility_type    :normal
-   :special_type       nil
-   :parent_id          false
-   :fk_target_field_id false
-   :last_analyzed      false
-   :created_at         true
-   :updated_at         true})
+;; basic test to make sure syncing nested fields works. This is sort of a higher-level test.
+(expect
+  (remove-nonsense toucanery/toucanery-tables-and-fields)
+  (tt/with-temp* [Database [db {:engine :toucanery}]]
+    (sync/sync-database! db)
+    (remove-nonsense (get-tables db))))
+
+
+;;; ------------------------------------------------------------ Tests for sync-metadata ------------------------------------------------------------
+
+;; TODO - At some point these tests should be moved into a `sync-metadata-test` or `sync-metadata.fields-test` namespace
+
+;; make sure nested fields get resynced correctly if their parent field didn't change
+(expect
+  #{"weight" "age"}
+  (tt/with-temp* [Database [db {:engine :toucanery}]]
+    ;; do the initial sync
+    (sync-metadata/sync-db-metadata! db)
+    ;; delete our entry for the `transactions.toucan.details.age` field
+    (let [transactions-table-id (u/get-id (db/select-one-id Table :db_id (u/get-id db), :name "transactions"))
+          toucan-field-id       (u/get-id (db/select-one-id Field :table_id transactions-table-id, :name "toucan"))
+          details-field-id      (u/get-id (db/select-one-id Field :table_id transactions-table-id, :name "details", :parent_id toucan-field-id))
+          age-field-id          (u/get-id (db/select-one-id Field :table_id transactions-table-id, :name "age", :parent_id details-field-id))]
+      (db/delete! Field :id age-field-id)
+      ;; now sync again.
+      (sync-metadata/sync-db-metadata! db)
+      ;; field should be added back
+      (db/select-field :name Field :table_id transactions-table-id, :parent_id details-field-id, :active true))))
+
+;; Now do the exact same test where we make the Field inactive. Should get reactivated
+(expect
+  (tt/with-temp* [Database [db {:engine :toucanery}]]
+    ;; do the initial sync
+    (sync-metadata/sync-db-metadata! db)
+    ;; delete our entry for the `transactions.toucan.details.age` field
+    (let [transactions-table-id (u/get-id (db/select-one-id Table :db_id (u/get-id db), :name "transactions"))
+          toucan-field-id       (u/get-id (db/select-one-id Field :table_id transactions-table-id, :name "toucan"))
+          details-field-id      (u/get-id (db/select-one-id Field :table_id transactions-table-id, :name "details", :parent_id toucan-field-id))
+          age-field-id          (u/get-id (db/select-one-id Field :table_id transactions-table-id, :name "age", :parent_id details-field-id))]
+      (db/update! Field age-field-id :active false)
+      ;; now sync again.
+      (sync-metadata/sync-db-metadata! db)
+      ;; field should be reactivated
+      (db/select-field :active Field :id age-field-id))))
 
-;; save-table-fields!  (also covers save-nested-fields!)
+;; nested fields should also get reactivated if the parent field gets reactivated
 (expect
-  [[]
-   ;; initial sync
-   [(merge field-defaults {:base_type    :type/Integer
-                           :special_type :type/PK
-                           :name         "First"
-                           :display_name "First"})
-    (merge field-defaults {:base_type    :type/Text
-                           :name         "Second"
-                           :display_name "Second"})
-    (merge field-defaults {:base_type    :type/Boolean
-                           :special_type nil
-                           :name         "Third"
-                           :display_name "Third"})]
-   ;; add column, modify first column, add some nested fields
-   [(merge field-defaults {:base_type    :type/Decimal
-                           :special_type :type/PK
-                           :name         "First"
-                           :display_name "First"})
-    (merge field-defaults {:base_type    :type/Text
-                           :name         "Second"
-                           :display_name "Second"})
-    (merge field-defaults {:base_type    :type/Boolean
-                           :name         "Third"
-                           :display_name "Third"})
-    (merge field-defaults {:base_type    :type/Integer
-                           :special_type :type/Category
-                           :name         "rating"
-                           :display_name "Rating"})
-    (merge field-defaults {:base_type    :type/Text
-                           :special_type :type/City
-                           :name         "city"
-                           :display_name "City"
-                           :parent_id    true})
-    (merge field-defaults {:base_type    :type/Text
-                           :special_type :type/Category
-                           :name         "type"
-                           :display_name "Type"
-                           :parent_id    true})]
-   ;; first column retired, 3rd column now a pk, another nested field
-   [(merge field-defaults {:base_type    :type/Decimal
-                           :special_type :type/PK
-                           :name         "First"
-                           :display_name "First"})
-    (merge field-defaults {:base_type    :type/Text
-                           :name         "Second"
-                           :display_name "Second"})
-    (merge field-defaults {:base_type    :type/Boolean
-                           :special_type :type/PK
-                           :name         "Third"
-                           :display_name "Third"})
-    (merge field-defaults {:name         "rating"
-                           :display_name "Rating"
-                           :base_type    :type/Integer
-                           :special_type :type/Category})
-    (merge field-defaults {:base_type    :type/Text
-                           :special_type :type/City
-                           :name         "city"
-                           :display_name "City"
-                           :parent_id    true})
-    (merge field-defaults {:base_type    :type/Text
-                           :special_type :type/Category
-                           :name         "type"
-                           :display_name "Type"
-                           :parent_id    true})
-    (merge field-defaults {:base_type    :type/Boolean
-                           :name         "new"
-                           :display_name "New"
-                           :parent_id    true})]]
-  (tt/with-temp* [Database  [{database-id :id}]
-                  RawTable  [{raw-table-id :id}       {:database_id database-id}]
-                  Table     [{table-id :id, :as table} {:db_id database-id, :raw_table_id raw-table-id}]]
-    (let [get-fields   (fn []
-                         (for [field (db/select Field, :table_id table-id, {:order-by [:id]})]
-                           (dissoc (tu/boolean-ids-and-timestamps field)
-                                   :active :position :preview_display)))
-          save-fields! (fn [& fields]
-                         (save-table-fields! table fields)
-                         (get-fields))]
-      ;; start with no fields
-      [(get-fields)
-       ;; first sync will add all the fields
-       (save-fields! {:name "First", :base-type :type/Integer, :pk? true}
-                     {:name "Second", :base-type :type/Text}
-                     {:name "Third", :base-type :type/Boolean})
-       ;; now add another column (with nested-fields!) and modify the first
-       (save-fields! {:name "First", :base-type :type/Decimal, :pk? false}
-                     {:name "Second", :base-type :type/Text}
-                     {:name "Third", :base-type :type/Boolean}
-                     {:name "rating", :base-type :type/Integer, :nested-fields [{:name "city", :base-type :type/Text}
-                                                                                {:name "type", :base-type :type/Text}]})
-       ;; now remove the first column (should have no effect), and make tweaks to the nested columns
-       (save-fields! {:name "Second", :base-type :type/Text}
-                     {:name "Third", :base-type :type/Boolean, :pk? true}
-                     {:name "rating", :base-type :type/Integer, :nested-fields [{:name "new", :base-type :type/Boolean}]})])))
+  (tt/with-temp* [Database [db {:engine :toucanery}]]
+    ;; do the initial sync
+    (sync-metadata/sync-db-metadata! db)
+    ;; delete our entry for the `transactions.toucan.details.age` field
+    (let [transactions-table-id (u/get-id (db/select-one-id Table :db_id (u/get-id db), :name "transactions"))
+          toucan-field-id       (u/get-id (db/select-one-id Field :table_id transactions-table-id, :name "toucan"))
+          details-field-id      (u/get-id (db/select-one-id Field :table_id transactions-table-id, :name "details", :parent_id toucan-field-id))
+          age-field-id          (u/get-id (db/select-one-id Field :table_id transactions-table-id, :name "age", :parent_id details-field-id))]
+      (db/update! Field details-field-id :active false)
+      ;; now sync again.
+      (sync-metadata/sync-db-metadata! db)
+      ;; field should be reactivated
+      (db/select-field :active Field :id age-field-id))))
 
 
-;; scan-table-and-update-data-model!
+;; make sure nested fields can get marked inactive
 (expect
-  [[(last toucanery/toucanery-tables-and-fields)]
-   [(last toucanery/toucanery-tables-and-fields)]
-   [(assoc (last toucanery/toucanery-tables-and-fields)
-      :active false
-      :fields [])]]
-  (tt/with-temp* [Database [{database-id :id, :as db} {:engine :toucanery}]]
-    (let [driver (toucanery/->ToucaneryDriver)]
-      ;; do a quick introspection to add the RawTables to the db
-      (introspect/introspect-database-and-update-raw-tables! driver db)
-      ;; stub out the Table we are going to sync for real below
-      (let [raw-table-id (db/select-one-id RawTable, :database_id database-id, :name "transactions")
-            tbl          (db/insert! Table
-                           :db_id        database-id
-                           :raw_table_id raw-table-id
-                           :name         "transactions"
-                           :active       true)]
-        [ ;; now lets run a sync and check what we got
-         (do
-           (scan-table-and-update-data-model! driver db tbl)
-           (get-tables database-id))
-         ;; run the sync a second time to see how we respond to repeat syncing (should be same since nothing changed)
-         (do
-           (scan-table-and-update-data-model! driver db tbl)
-           (get-tables database-id))
-         ;; one more time, but lets disable the table this time and ensure that's handled properly
-         (do
-           (db/update-where! RawTable {:database_id database-id
-                                       :name        "transactions"}
-             :active false)
-           (scan-table-and-update-data-model! driver db tbl)
-           (get-tables database-id))]))))
+  false
+  (tt/with-temp* [Database [db {:engine :toucanery}]]
+    ;; do the initial sync
+    (sync-metadata/sync-db-metadata! db)
+    ;; Add an entry for a `transactions.toucan.details.gender` field
+    (let [transactions-table-id (u/get-id (db/select-one-id Table :db_id (u/get-id db), :name "transactions"))
+          toucan-field-id       (u/get-id (db/select-one-id Field :table_id transactions-table-id, :name "toucan"))
+          details-field-id      (u/get-id (db/select-one-id Field :table_id transactions-table-id, :name "details", :parent_id toucan-field-id))
+          gender-field-id       (u/get-id (db/insert! Field
+                                            :name     "gender"
+                                            :base_type "type/Text"
+                                            :table_id transactions-table-id
+                                            :parent_id details-field-id
+                                            :active true))]
 
+      ;; now sync again.
+      (sync-metadata/sync-db-metadata! db)
+      ;; field should become inactive
+      (db/select-one-field :active Field :id gender-field-id))))
 
-;; scan-database-and-update-data-model!
+;; make sure when a nested field gets marked inactive, so does it's children
 (expect
-  [toucanery/toucanery-raw-tables-and-columns
-   toucanery/toucanery-tables-and-fields
-   toucanery/toucanery-tables-and-fields
-   (conj (vec (drop-last toucanery/toucanery-tables-and-fields))
-         (assoc (last toucanery/toucanery-tables-and-fields)
-           :active false
-           :fields []))]
-  (tt/with-temp* [Database [{database-id :id, :as db} {:engine :toucanery}]]
-    (let [driver (toucanery/->ToucaneryDriver)]
-      ;; do a quick introspection to add the RawTables to the db
-      (introspect/introspect-database-and-update-raw-tables! driver db)
+  false
+  (tt/with-temp* [Database [db {:engine :toucanery}]]
+    ;; do the initial sync
+    (sync-metadata/sync-db-metadata! db)
+    ;; Add an entry for a `transactions.toucan.details.gender` field
+    (let [transactions-table-id (u/get-id (db/select-one-id Table :db_id (u/get-id db), :name "transactions"))
+          toucan-field-id       (u/get-id (db/select-one-id Field :table_id transactions-table-id, :name "toucan"))
+          details-field-id      (u/get-id (db/select-one-id Field :table_id transactions-table-id, :name "details", :parent_id toucan-field-id))
+          food-likes-field-id   (u/get-id (db/insert! Field
+                                            :name     "food-likes"
+                                            :base_type "type/Dictionary"
+                                            :table_id transactions-table-id
+                                            :parent_id details-field-id
+                                            :active true))
+          blueberries-field-id (u/get-id (db/insert! Field
+                                           :name "blueberries"
+                                           :base_type "type/Boolean"
+                                           :table_id transactions-table-id
+                                           :parent_id food-likes-field-id
+                                           :active true))]
 
-      [ ;; first check that the raw tables stack up as expected, especially that fields were skipped because this is a :dynamic-schema db
-       (->> (hydrate (db/select RawTable, :database_id database-id, {:order-by [:id]}) :columns)
-            (mapv tu/boolean-ids-and-timestamps))
-       ;; now lets run a sync and check what we got
-       (do
-         (scan-database-and-update-data-model! driver db)
-         (get-tables database-id))
-       ;; run the sync a second time to see how we respond to repeat syncing (should be same since nothing changed)
-       (do
-         (scan-database-and-update-data-model! driver db)
-         (get-tables database-id))
-       ;; one more time, but lets disable a table this time and ensure that's handled properly
-       (do
-         (db/update-where! RawTable {:database_id database-id
-                                     :name        "transactions"}
-           :active false)
-         (scan-database-and-update-data-model! driver db)
-         (get-tables database-id))])))
+      ;; now sync again.
+      (sync-metadata/sync-db-metadata! db)
+      ;; field should become inactive
+      (db/select-one-field :active Field :id blueberries-field-id))))
diff --git a/test/metabase/sync_database/sync_test.clj b/test/metabase/sync_database/sync_test.clj
deleted file mode 100644
index f41579d6f4b2ae5c86646478aecf14e706201f36..0000000000000000000000000000000000000000
--- a/test/metabase/sync_database/sync_test.clj
+++ /dev/null
@@ -1,381 +0,0 @@
-(ns metabase.sync-database.sync-test
-  (:require [expectations :refer :all]
-            [metabase.models
-             [database :refer [Database]]
-             [field :refer [Field]]
-             [raw-column :refer [RawColumn]]
-             [raw-table :refer [RawTable]]
-             [table :refer [Table]]]
-            [metabase.sync-database
-             [introspect :as introspect]
-             [sync :refer :all]]
-            [metabase.test
-             [data :as data]
-             [util :as tu]]
-            [metabase.test.data.interface :as i]
-            [metabase.test.mock
-             [moviedb :as moviedb]
-             [schema-per-customer :as schema-per-customer]]
-            [toucan
-             [db :as db]
-             [hydrate :refer [hydrate]]]
-            [toucan.util.test :as tt]))
-
-(tu/resolve-private-vars metabase.sync-database.sync
-  save-fks! save-table-fields!)
-
-(defn- get-tables [database-id]
-  (->> (hydrate (db/select Table, :db_id database-id, {:order-by [:id]}) :fields)
-       (mapv tu/boolean-ids-and-timestamps)))
-
-
-;; save-fks!
-(expect
-  [[{:special_type nil, :name "fk1", :fk_target_field_id false}]
-   [{:special_type :type/FK, :name "fk1", :fk_target_field_id true}]
-   [{:special_type :type/FK, :name "fk1", :fk_target_field_id true}]
-   [{:special_type :type/FK, :name "fk1", :fk_target_field_id true}]]
-  (tt/with-temp* [Database  [{database-id :id}]
-                  RawTable  [{raw-table-id1 :id, :as table}  {:database_id database-id, :name "fk_source"}]
-                  RawColumn [{raw-fk1 :id}                   {:raw_table_id raw-table-id1, :name "fk1"}]
-                  Table     [{t1 :id}                        {:db_id database-id, :raw_table_id raw-table-id1, :name "fk_source"}]
-                  Field     [{fk1 :id}                       {:table_id t1, :raw_column_id raw-fk1, :name "fk1"}]
-                  RawTable  [{raw-table-id2 :id, :as table1} {:database_id database-id, :name "fk_target"}]
-                  RawColumn [{raw-target1 :id}               {:raw_table_id raw-table-id2, :name "target1"}]
-                  RawColumn [{raw-target2 :id}               {:raw_table_id raw-table-id2, :name "target2"}]
-                  Table     [{t2 :id}                        {:db_id database-id, :raw_table_id raw-table-id2, :name "fk_target"}]
-                  Field     [{target1 :id}                   {:table_id t2, :raw_column_id raw-target1, :name "target1"}]
-                  Field     [{target2 :id}                   {:table_id t2, :raw_column_id raw-target2, :name "target2"}]]
-    (let [get-fields (fn [table-id]
-                       (->> (db/select [Field :name :special_type :fk_target_field_id], :table_id table-id)
-                            (mapv tu/boolean-ids-and-timestamps)))]
-      [ ;; original list should not have any fks
-       (get-fields t1)
-       ;; now add a fk
-       (do
-         (save-fks! [{:source-column raw-fk1, :target-column raw-target1}])
-         (get-fields t1))
-       ;; if the source/target is wack nothing bad happens
-       (do
-         (save-fks! [{:source-column raw-fk1, :target-column 87893243}
-                     {:source-column 987234, :target-column raw-target1}])
-         (get-fields t1))
-       ;; replacing an existing fk
-       (do
-         (save-fks! [{:source-column raw-fk1, :target-column raw-target2}])
-         (get-fields t1))])))
-
-
-;; sync-metabase-metadata-table!
-(expect
-  [{:name "movies"
-    :description nil
-    :id true
-    :fields [{:name "filming"
-              :description nil}]}
-   {:name "movies"
-    :description "A cinematic adventure."
-    :id true
-    :fields [{:name "filming"
-              :description "If the movie is currently being filmed."}]}]
-  (tt/with-temp* [Database [{database-id :id, :as db} {:engine :moviedb}]]
-    ;; setup a couple things we'll use in the test
-    (introspect/introspect-database-and-update-raw-tables! (moviedb/->MovieDbDriver) db)
-    (let [raw-table-id (db/select-one-id RawTable, :database_id database-id, :name "movies")
-          table        (db/insert! Table
-                         :db_id        database-id
-                         :raw_table_id raw-table-id
-                         :name         "movies"
-                         :active       true)
-          get-table    #(-> (db/select-one [Table :id :name :description], :id (:id table))
-                            (hydrate :fields)
-                            (update :fields (fn [fields]
-                                              (for [f fields
-                                                    :when (= "filming" (:name f))]
-                                                (select-keys f [:name :description]))))
-                            tu/boolean-ids-and-timestamps)]
-
-      (update-data-models-for-table! table)
-      ;; here we go
-      [(get-table)
-       (do
-         (sync-metabase-metadata-table! (moviedb/->MovieDbDriver) db {})
-         (get-table))])))
-
-
-(def ^:private ^:const field-defaults
-  {:id                 true
-   :table_id           true
-   :raw_column_id      true
-   :description        nil
-   :caveats            nil
-   :points_of_interest nil
-   :visibility_type    :normal
-   :special_type       nil
-   :parent_id          false
-   :fk_target_field_id false
-   :last_analyzed      false
-   :created_at         true
-   :updated_at         true})
-
-;; save-table-fields!
-;; this test also covers create-field-from-field-def! and update-field-from-field-def!
-(expect
-  [[]
-   ;; initial sync
-   [(merge field-defaults {:name         "First"
-                           :display_name "First"
-                           :base_type    :type/Integer
-                           :special_type :type/PK})
-    (merge field-defaults {:name         "Second"
-                           :display_name "Second"
-                           :base_type    :type/Text})
-    (merge field-defaults {:name         "Third"
-                           :display_name "Third"
-                           :base_type    :type/Boolean
-                           :special_type nil})]
-   ;; add column, modify first column
-   [(merge field-defaults {:name         "First"
-                           :display_name "First"
-                           :base_type    :type/Decimal
-                           :special_type :type/PK}) ; existing special types are NOT modified
-    (merge field-defaults {:name         "Second"
-                           :display_name "Second"
-                           :base_type    :type/Text})
-    (merge field-defaults {:name         "Third"
-                           :display_name "Third"
-                           :base_type    :type/Boolean
-                           :special_type nil})
-    (merge field-defaults {:name         "rating"
-                           :display_name "Rating"
-                           :base_type    :type/Integer
-                           :special_type :type/Category})]
-   ;; first column retired, 3rd column now a pk
-   [(merge field-defaults {:name            "First"
-                           :display_name    "First"
-                           :base_type       :type/Decimal
-                           :visibility_type :retired ; field retired when RawColumn disabled
-                           :special_type    :type/PK})
-    (merge field-defaults {:name         "Second"
-                           :display_name "Second"
-                           :base_type    :type/Text})
-    (merge field-defaults {:name         "Third"
-                           :display_name "Third"
-                           :base_type    :type/Boolean
-                           :special_type :type/PK}) ; special type can be set if it was nil before
-    (merge field-defaults {:name         "rating"
-                           :display_name "Rating"
-                           :base_type    :type/Integer
-                           :special_type :type/Category})]]
-  (tt/with-temp* [Database  [{database-id :id}]
-                  RawTable  [{raw-table-id :id, :as table} {:database_id database-id}]
-                  RawColumn [{raw-column-id1 :id}          {:raw_table_id raw-table-id, :name "First", :is_pk true, :details {:base-type "type/Integer"}}]
-                  RawColumn [{raw-column-id2 :id}          {:raw_table_id raw-table-id, :name "Second", :details {:base-type "type/Text"}}]
-                  RawColumn [{raw-column-id3 :id}          {:raw_table_id raw-table-id, :name "Third", :details {:base-type "type/Boolean"}}]
-                  Table     [{table-id :id, :as tbl}       {:db_id database-id, :raw_table_id raw-table-id}]]
-    (let [get-fields #(->> (db/select Field, :table_id table-id, {:order-by [:id]})
-                           (mapv tu/boolean-ids-and-timestamps)
-                           (mapv (fn [m]
-                                   (dissoc m :active :position :preview_display))))
-          initial-fields (get-fields)
-          first-sync     (do
-                           (save-table-fields! tbl)
-                           (get-fields))]
-      (tt/with-temp* [RawColumn [_ {:raw_table_id raw-table-id, :name "rating", :details {:base-type "type/Integer"}}]]
-        ;; start with no fields
-        [initial-fields
-         ;; first sync will add all the fields
-         first-sync
-         ;; now add another column and modify the first
-         (do
-           (db/update! RawColumn raw-column-id1, :is_pk false, :details {:base-type "type/Decimal"})
-           (save-table-fields! tbl)
-           (get-fields))
-         ;; now disable the first column
-         (do
-           (db/update! RawColumn raw-column-id1, :active false)
-           (db/update! RawColumn raw-column-id3, :is_pk true)
-           (save-table-fields! tbl)
-           (get-fields))]))))
-
-
-;; retire-tables!
-(expect
-  (let [disabled-movies-table (fn [table]
-                                (if-not (= "movies" (:name table))
-                                  table
-                                  (assoc table
-                                    :active false
-                                    :fields [])))]
-    [moviedb/moviedb-tables-and-fields
-     (mapv disabled-movies-table moviedb/moviedb-tables-and-fields)])
-  (tt/with-temp* [Database [{database-id :id, :as db} {:engine :moviedb}]]
-    ;; setup a couple things we'll use in the test
-    (introspect/introspect-database-and-update-raw-tables! (moviedb/->MovieDbDriver) db)
-    (update-data-models-from-raw-tables! db)
-    (let [get-tables #(->> (hydrate (db/select Table, :db_id database-id, {:order-by [:id]}) :fields)
-                           (mapv tu/boolean-ids-and-timestamps))]
-      ;; here we go
-      [(get-tables)
-       (do
-         ;; disable the table
-         (db/update-where! RawTable {:database_id database-id
-                                     :name        "movies"}
-           :active false)
-         ;; run our retires function
-         (retire-tables! db)
-         ;; now we should see the table and its fields disabled
-         (get-tables))])))
-
-
-;; update-data-models-for-table!
-(expect
-  (let [disable-fks (fn [fields]
-                      (for [field fields]
-                        (if (isa? (:special_type field) :type/FK)
-                          (assoc field
-                            :special_type       nil
-                            :fk_target_field_id false)
-                          field)))]
-    [[(-> (last moviedb/moviedb-tables-and-fields)
-          (update :fields disable-fks))]
-     [(-> (last moviedb/moviedb-tables-and-fields)
-          (update :fields disable-fks))]
-     [(-> (last moviedb/moviedb-tables-and-fields)
-          (assoc :active false
-                 :fields []))]])
-  (tt/with-temp* [Database [{database-id :id, :as db} {:engine :moviedb}]]
-    (let [driver (moviedb/->MovieDbDriver)]
-      ;; do a quick introspection to add the RawTables to the db
-      (introspect/introspect-database-and-update-raw-tables! driver db)
-
-      ;; stub out the Table we are going to sync for real below
-      (let [raw-table-id (db/select-one-id RawTable, :database_id database-id, :name "roles")
-            table        (db/insert! Table
-                           :db_id        database-id
-                           :raw_table_id raw-table-id
-                           :name         "roles"
-                           :active       true)]
-        [ ;; now lets run a sync and check what we got
-         (do
-           (update-data-models-for-table! table)
-           (get-tables database-id))
-         ;; run the sync a second time to see how we respond to repeat syncing (should be same since nothing changed)
-         (do
-           (update-data-models-for-table! table)
-           (get-tables database-id))
-         ;; one more time, but lets disable the table this time and ensure that's handled properly
-         (do
-           (db/update-where! RawTable {:database_id database-id
-                                       :name        "roles"}
-             :active false)
-           (update-data-models-for-table! table)
-           (get-tables database-id))]))))
-
-
-;; update-data-models-from-raw-tables!
-(expect
-  [moviedb/moviedb-raw-tables
-   moviedb/moviedb-tables-and-fields
-   moviedb/moviedb-tables-and-fields
-   (conj (vec (drop-last moviedb/moviedb-tables-and-fields))
-         (-> (last moviedb/moviedb-tables-and-fields)
-             (assoc :active false
-                    :fields [])))]
-  (tt/with-temp* [Database [{database-id :id, :as db} {:engine :moviedb}]]
-    (let [driver (moviedb/->MovieDbDriver)]
-      ;; do a quick introspection to add the RawTables to the db
-      (introspect/introspect-database-and-update-raw-tables! driver db)
-
-      [;; first check that the raw tables stack up as expected
-       (->> (hydrate (db/select RawTable, :database_id database-id, {:order-by [:id]}) :columns)
-            (mapv tu/boolean-ids-and-timestamps))
-       ;; now lets run a sync and check what we got
-       (do
-         (update-data-models-from-raw-tables! db)
-         (get-tables database-id))
-       ;; run the sync a second time to see how we respond to repeat syncing (should be same since nothing changed)
-       (do
-         (update-data-models-from-raw-tables! db)
-         (get-tables database-id))
-       ;; one more time, but lets disable a table this time and ensure that's handled properly
-       (do
-         (db/update-where! RawTable {:database_id database-id
-                                     :name        "roles"}
-           :active false)
-         (update-data-models-from-raw-tables! db)
-         (get-tables database-id))])))
-
-
-(defn- resolve-fk-targets
-  "Convert :fk_target_[column|field]_id into more testable information with table/schema names."
-  [m]
-  (let [resolve-raw-column (fn [column-id]
-                             (when-let [{col-name :name, table :raw_table_id} (db/select-one [RawColumn :raw_table_id :name], :id column-id)]
-                               (-> (db/select-one [RawTable :schema :name], :id table)
-                                   (assoc :col-name col-name))))
-        resolve-field      (fn [field-id]
-                             (when-let [{col-name :name, table :table_id} (db/select-one [Field :table_id :name], :id field-id)]
-                               (-> (db/select-one [Table :schema :name], :id table)
-                                   (assoc :col-name col-name))))
-        resolve-fk         (fn [m]
-                             (cond
-                               (:fk_target_column_id m)
-                               (assoc m :fk_target_column (resolve-raw-column (:fk_target_column_id m)))
-
-                               (:fk_target_field_id m)
-                               (assoc m :fk_target_field (resolve-field (:fk_target_field_id m)))
-
-                               :else
-                               m))]
-    (update m (if (:database_id m) :columns :fields) #(mapv resolve-fk %))))
-
-;; special test case which validates a fairly complex multi-schema setup with lots of FKs
-(expect
-  [schema-per-customer/schema-per-customer-raw-tables
-   schema-per-customer/schema-per-customer-tables-and-fields
-   schema-per-customer/schema-per-customer-tables-and-fields]
-  (tt/with-temp* [Database [{database-id :id, :as db} {:engine :schema-per-customer}]]
-    (let [driver     (schema-per-customer/->SchemaPerCustomerDriver)
-          db-tables  #(->> (hydrate (db/select Table, :db_id %, {:order-by [:id]}) :fields)
-                           (mapv resolve-fk-targets)
-                           (mapv tu/boolean-ids-and-timestamps))]
-      ;; do a quick introspection to add the RawTables to the db
-      (introspect/introspect-database-and-update-raw-tables! driver db)
-
-      [;; first check that the raw tables stack up as expected
-       (->> (hydrate (db/select RawTable, :database_id database-id, {:order-by [:id]}) :columns)
-            (mapv resolve-fk-targets)
-            (mapv tu/boolean-ids-and-timestamps))
-       ;; now lets run a sync and check what we got
-       (do
-         (update-data-models-from-raw-tables! db)
-         (db-tables database-id))
-       ;; run the sync a second time to see how we respond to repeat syncing (should be same since nothing changed)
-       (do
-         (update-data-models-from-raw-tables! db)
-         (db-tables database-id))])))
-
-
-;;; ------------------------------------------------------------ Make sure that "crufty" tables are marked as such ------------------------------------------------------------
-(i/def-database-definition ^:const ^:private db-with-some-cruft
-  ["acquired_toucans"
-   [{:field-name "species",              :base-type :type/Text}
-    {:field-name "cam_has_acquired_one", :base-type :type/Boolean}]
-   [["Toco"               false]
-    ["Chestnut-Mandibled" true]
-    ["Keel-billed"        false]
-    ["Channel-billed"     false]]]
-  ["south_migrationhistory"
-   [{:field-name "app_name",  :base-type :type/Text}
-    {:field-name "migration", :base-type :type/Text}]
-   [["main" "0001_initial"]
-    ["main" "0002_add_toucans"]]])
-
-;; south_migrationhistory, being a CRUFTY table, should still be synced, but marked as such
-(expect
-  #{{:name "SOUTH_MIGRATIONHISTORY", :visibility_type :cruft}
-    {:name "ACQUIRED_TOUCANS",       :visibility_type nil}}
-  (data/dataset metabase.sync-database.sync-test/db-with-some-cruft
-    (set (for [table (db/select [Table :name :visibility_type], :db_id (data/id))]
-           (into {} table)))))
diff --git a/test/metabase/sync_database_test.clj b/test/metabase/sync_database_test.clj
index 7a8c4628e520fe7f13675fcdfb1d6b25e77e2fa1..badf18be90d20769468ae6a5105ce4e3b0b14629 100644
--- a/test/metabase/sync_database_test.clj
+++ b/test/metabase/sync_database_test.clj
@@ -5,16 +5,14 @@
             [metabase
              [db :as mdb]
              [driver :as driver]
-             [sync-database :refer :all]
+             [sync :refer :all]
              [util :as u]]
             [metabase.driver.generic-sql :as sql]
             [metabase.models
              [database :refer [Database]]
              [field :refer [Field]]
-             [field-values :refer [FieldValues]]
-             [raw-table :refer [RawTable]]
+             [field-values :as field-values :refer [FieldValues]]
              [table :refer [Table]]]
-            metabase.sync-database.analyze
             [metabase.test
              [data :refer :all]
              [util :as tu]]
@@ -38,32 +36,40 @@
                        {:name      "name"
                         :base-type :type/Text}}}})
 
+
+;; TODO - I'm 90% sure we could just reüse the "MovieDB" instead of having this subset of it used here
 (defrecord SyncTestDriver []
   clojure.lang.Named
   (getName [_] "SyncTestDriver"))
 
+
+(defn- describe-database [& _]
+  {:tables (set (for [table (vals sync-test-tables)]
+                  (dissoc table :fields)))})
+
+(defn- describe-table [_ _ table]
+  (get sync-test-tables (:name table)))
+
+(defn- describe-table-fks [_ _ table]
+  (set (when (= "movie" (:name table))
+         #{{:fk-column-name   "studio"
+            :dest-table       {:name   "studio"
+                               :schema nil}
+            :dest-column-name "studio"}})))
+
 (extend SyncTestDriver
   driver/IDriver
   (merge driver/IDriverDefaultsMixin
-         {:analyze-table      (constantly nil)
-          :describe-database  (constantly {:tables (set (for [table (vals sync-test-tables)]
-                                                          (dissoc table :fields)))})
-          :describe-table     (fn [_ _ table]
-                                (get sync-test-tables (:name table)))
-          :describe-table-fks (fn [_ _ table]
-                                (if (= "movie" (:name table))
-                                  #{{:fk-column-name   "studio"
-                                     :dest-table       {:name "studio"
-                                                        :schema nil}
-                                     :dest-column-name "studio"}}
-                                  #{}))
-          :features           (constantly #{:foreign-keys})
-          :details-fields     (constantly [])}))
+         {:describe-database     describe-database
+          :describe-table        describe-table
+          :describe-table-fks    describe-table-fks
+          :features              (constantly #{:foreign-keys})
+          :details-fields        (constantly [])
+          :field-values-lazy-seq (constantly [])}))
 
-(driver/register-driver! :sync-test (SyncTestDriver.))
 
+(driver/register-driver! :sync-test (SyncTestDriver.))
 
-(def ^:private venues-table (delay (Table (id :venues))))
 
 (defn- table-details [table]
   (into {} (-> (dissoc table :db :pk_field :field_values)
@@ -71,10 +77,10 @@
                                 (into {} (dissoc field :table :db :children :qualified-name :qualified-name-components :values :target))))
                tu/boolean-ids-and-timestamps)))
 
-(def ^:private ^:const table-defaults
+(def ^:private table-defaults
   {:id                      true
    :db_id                   true
-   :raw_table_id            true
+   :raw_table_id            false
    :schema                  nil
    :description             nil
    :caveats                 nil
@@ -88,10 +94,10 @@
    :created_at              true
    :updated_at              true})
 
-(def ^:private ^:const field-defaults
+(def ^:private field-defaults
   {:id                 true
    :table_id           true
-   :raw_column_id      true
+   :raw_column_id      false
    :description        nil
    :caveats            nil
    :points_of_interest nil
@@ -103,8 +109,8 @@
    :fk_target_field_id false
    :created_at         true
    :updated_at         true
-   :last_analyzed      true})
-
+   :last_analyzed      true
+   :fingerprint        nil})
 
 ;; ## SYNC DATABASE
 (expect
@@ -171,19 +177,17 @@
                                  :name         "title"
                                  :display_name "Title"
                                  :base_type    :type/Text})]})
-  (tt/with-temp* [Database [db        {:engine :sync-test}]
-                  RawTable [raw-table {:database_id (u/get-id db), :name "movie", :schema "default"}]
-                  Table    [table     {:raw_table_id (u/get-id raw-table)
-                                       :name         "movie"
-                                       :schema       "default"
-                                       :db_id        (u/get-id db)}]]
+  (tt/with-temp* [Database [db    {:engine :sync-test}]
+                  Table    [table {:name   "movie"
+                                   :schema "default"
+                                   :db_id  (u/get-id db)}]]
     (sync-table! table)
     (table-details (Table (:id table)))))
 
 
 ;; test that we prevent running simultaneous syncs on the same database
 
-(defonce ^:private sync-count (atom 0))
+(defonce ^:private calls-to-describe-database (atom 0))
 
 (defrecord ConcurrentSyncTestDriver []
   clojure.lang.Named
@@ -192,9 +196,8 @@
 (extend ConcurrentSyncTestDriver
   driver/IDriver
   (merge driver/IDriverDefaultsMixin
-         {:analyze-table     (constantly nil)
-          :describe-database (fn [_ _]
-                               (swap! sync-count inc)
+         {:describe-database (fn [_ _]
+                               (swap! calls-to-describe-database inc)
                                (Thread/sleep 1000)
                                {:tables #{}})
           :describe-table    (constantly nil)
@@ -204,29 +207,34 @@
 
 ;; only one sync should be going on at a time
 (expect
-  1
-  (tt/with-temp* [Database [db {:engine :concurrent-sync-test}]]
-    (reset! sync-count 0)
-    ;; start a sync processes in the background. It should take 1000 ms to finish
-    (future (sync-database! db))
-    ;; wait 200 ms to make sure everything is going
-    (Thread/sleep 200)
-    ;; Start another in the background. Nothing should happen here because the first is already running
-    (future (sync-database! db))
-    ;; Start another in the foreground. Again, nothing should happen here because the original should still be running
-    (sync-database! db)
-    ;; Check the number of syncs that took place. Should be 1 (just the first)
-    @sync-count))
-
-
-;;; Test that we will remove field-values when they aren't appropriate
+ ;; describe-database gets called twice during a single sync process, once for syncing tables and a second time for syncing the _metabase_metadata table
+ 2
+ (tt/with-temp* [Database [db {:engine :concurrent-sync-test}]]
+   (reset! calls-to-describe-database 0)
+   ;; start a sync processes in the background. It should take 1000 ms to finish
+   (let [f1 (future (sync-database! db))
+         f2 (do
+              ;; wait 200 ms to make sure everything is going
+              (Thread/sleep 200)
+              ;; Start another in the background. Nothing should happen here because the first is already running
+              (future (sync-database! db)))]
+     ;; Start another in the foreground. Again, nothing should happen here because the original should still be running
+     (sync-database! db)
+     ;; make sure both of the futures have finished
+     (deref f1)
+     (deref f2)
+     ;; Check the number of syncs that took place. Should be 2 (just the first)
+     @calls-to-describe-database)))
+
+
+;; Test that we will remove field-values when they aren't appropriate.
+;; Calling `sync-database!` below should cause them to get removed since the Field doesn't have an appropriate special type
 (expect
   [[1 2 3]
-   [1 2 3]]
-  (tt/with-temp* [Database [db    {:engine :sync-test}]
-                  RawTable [table {:database_id (u/get-id db), :name "movie", :schema "default"}]]
+   nil]
+  (tt/with-temp* [Database [db {:engine :sync-test}]]
     (sync-database! db)
-    (let [table-id (db/select-one-id Table, :raw_table_id (:id table))
+    (let [table-id (db/select-one-id Table, :schema "default", :name "movie")
           field-id (db/select-one-id Field, :table_id table-id, :name "title")]
       (tt/with-temp FieldValues [_ {:field_id field-id
                                     :values   "[1,2,3]"}]
@@ -251,14 +259,14 @@
      (do (db/update! Field (id :venues :id), :special_type nil)
          (get-special-type))
      ;; Calling sync-table! should set the special type again
-     (do (sync-table! @venues-table)
+     (do (sync-table! (Table (id :venues)))
          (get-special-type))
      ;; sync-table! should *not* change the special type of fields that are marked with a different type
      (do (db/update! Field (id :venues :id), :special_type :type/Latitude)
          (get-special-type))
      ;; Make sure that sync-table runs set-table-pks-if-needed!
      (do (db/update! Field (id :venues :id), :special_type nil)
-         (sync-table! @venues-table)
+         (sync-table! (Table (id :venues)))
          (get-special-type))]))
 
 ;; ## FK SYNCING
@@ -308,7 +316,7 @@
      (do (db/delete! FieldValues :id (get-field-values-id))
          (get-field-values))
      ;; 3. Now re-sync the table and make sure they're back
-     (do (sync-table! @venues-table)
+     (do (sync-table! (Table (id :venues)))
          (get-field-values))])
 
   ;; Test that syncing will cause FieldValues to be updated
@@ -322,11 +330,11 @@
      (do (db/update! FieldValues (get-field-values-id), :values [1 2 3])
          (get-field-values))
      ;; 3. Now re-sync the table and make sure the value is back
-     (do (sync-table! @venues-table)
+     (do (sync-table! (Table (id :venues)))
          (get-field-values))]))
 
-
-;;; -------------------- Make sure that if a Field's cardinality passes `metabase.sync-database.analyze/low-cardinality-threshold` (currently 300) (#3215) --------------------
+;; Make sure that if a Field's cardinality passes `low-cardinality-threshold` (currently 300)
+;; the corresponding FieldValues entry will be deleted (#3215)
 (defn- insert-range-sql [rang]
   (str "INSERT INTO blueberries_consumed (num) VALUES "
        (str/join ", " (for [n rang]
@@ -337,20 +345,36 @@
   (let [details {:db (str "mem:" (tu/random-name) ";DB_CLOSE_DELAY=10")}]
     (binding [mdb/*allow-potentailly-unsafe-connections* true]
       (tt/with-temp Database [db {:engine :h2, :details details}]
-        (let [driver (driver/engine->driver :h2)
-              spec   (sql/connection-details->spec driver details)
-              exec!  #(doseq [statement %]
-                        (jdbc/execute! spec [statement]))]
-          ;; create the `blueberries_consumed` table and insert a 100 values
-          (exec! ["CREATE TABLE blueberries_consumed (num INTEGER NOT NULL);"
-                  (insert-range-sql (range 100))])
-          (sync-database! db, :full-sync? true)
-          (let [table-id (db/select-one-id Table :db_id (u/get-id db))
-                field-id (db/select-one-id Field :table_id table-id)]
-            ;; field values should exist...
-            (assert (= (count (db/select-one-field :values FieldValues :field_id field-id))
-                       100))
-            ;; ok, now insert enough rows to push the field past the `low-cardinality-threshold` and sync again, there should be no more field values
-            (exec! [(insert-range-sql (range 100 (+ 100 @(resolve 'metabase.sync-database.analyze/low-cardinality-threshold))))])
-            (sync-database! db, :full-sync? true)
-            (db/exists? FieldValues :field_id field-id)))))))
+        (jdbc/with-db-connection [conn (sql/connection-details->spec (driver/engine->driver :h2) details)]
+          (let [exec! #(doseq [statement %]
+                         (jdbc/execute! conn [statement]))]
+            ;; create the `blueberries_consumed` table and insert a 100 values
+            (exec! ["CREATE TABLE blueberries_consumed (num INTEGER NOT NULL);"
+                    (insert-range-sql (range 100))])
+            (sync-database! db {:full-sync? true})
+            (let [table-id (db/select-one-id Table :db_id (u/get-id db))
+                  field-id (db/select-one-id Field :table_id table-id)]
+              ;; field values should exist...
+              (assert (= (count (db/select-one-field :values FieldValues :field_id field-id))
+                         100))
+              ;; ok, now insert enough rows to push the field past the `low-cardinality-threshold` and sync again, there should be no more field values
+              (exec! [(insert-range-sql (range 100 (+ 100 field-values/low-cardinality-threshold)))])
+              (sync-database! db)
+              (db/exists? FieldValues :field_id field-id))))))))
+
+(defn- narrow-to-min-max [row]
+  (-> row
+      (get-in [:type :type/Number])
+      (select-keys [:min :max])
+      (update :min #(u/round-to-decimals 4 %))
+      (update :max #(u/round-to-decimals 4 %))))
+
+(expect
+  [{:min -165.374 :max -73.9533}
+   {:min 10.0646 :max 40.7794}]
+  (tt/with-temp* [Database [database {:details (:details (Database (id))), :engine :h2}]
+                  Table    [table    {:db_id (u/get-id database), :name "VENUES"}]]
+    (sync-table! table)
+    (map narrow-to-min-max
+         [(db/select-one-field :fingerprint Field, :id (id :venues :longitude))
+          (db/select-one-field :fingerprint Field, :id (id :venues :latitude))])))
diff --git a/test/metabase/test/data.clj b/test/metabase/test/data.clj
index 157e1475fd3506a6621a4575a789e8ec784d5b1b..6d52a97638b92007e00afea10546666e19cd2a7a 100644
--- a/test/metabase/test/data.clj
+++ b/test/metabase/test/data.clj
@@ -1,22 +1,24 @@
 (ns metabase.test.data
   "Code related to creating and deleting test databases + datasets."
-  (:require [clojure
+  (:require [cheshire.core :as json]
+            [clojure
              [string :as str]
              [walk :as walk]]
             [clojure.tools.logging :as log]
             [metabase
              [driver :as driver]
              [query-processor :as qp]
-             [sync-database :as sync-database]
+             [sync :as sync]
              [util :as u]]
             metabase.driver.h2
             [metabase.models
              [database :refer [Database]]
+             [dimension :refer [Dimension]]
              [field :as field :refer [Field]]
+             [field-values :refer [FieldValues]]
              [table :refer [Table]]]
-            [metabase.query-processor
-             [expand :as ql]
-             [interface :as qi]]
+            [metabase.query-processor.interface :as qi]
+            [metabase.query-processor.middleware.expand :as ql]
             [metabase.test.data
              [dataset-definitions :as defs]
              [datasets :refer [*driver*]]
@@ -129,7 +131,7 @@
   (i/format-name *driver* (name nm)))
 
 (defn- get-table-id-or-explode [db-id table-name]
-  {:pre [(integer? db-id) (u/string-or-keyword? table-name)]}
+  {:pre [(integer? db-id) ((some-fn keyword? string?) table-name)]}
   (let [table-name (format-name table-name)]
     (or (db/select-one-id Table, :db_id db-id, :name table-name)
         (db/select-one-id Table, :db_id db-id, :name (i/db-qualified-table-name (db/select-one-field :name Database :id db-id) table-name))
@@ -167,6 +169,11 @@
   []
   (contains? (driver/features *driver*) :foreign-keys))
 
+(defn binning-supported?
+  "Does the current engine support binning?"
+  []
+  (contains? (driver/features *driver*) :binning))
+
 (defn default-schema [] (i/default-schema *driver*))
 (defn id-field-type  [] (i/id-field-type *driver*))
 
@@ -205,14 +212,16 @@
   ;; Create the database
   (i/create-db! driver database-definition)
   ;; Add DB object to Metabase DB
-  (u/prog1 (db/insert! Database
+  (let [db (db/insert! Database
              :name    database-name
              :engine  (name engine)
-             :details (i/database->connection-details driver :db database-definition))
+             :details (i/database->connection-details driver :db database-definition))]
     ;; sync newly added DB
-    (sync-database/sync-database! <>)
+    (sync/sync-database! db)
     ;; add extra metadata for fields
-    (add-extra-metadata! database-definition <>)))
+    (add-extra-metadata! database-definition db)
+    ;; make sure we're returing an up-to-date copy of the DB
+    (Database (u/get-id db))))
 
 (defn- reload-test-extensions [engine]
   (println "Reloading test extensions for driver:" engine)
@@ -288,3 +297,50 @@
   [dataset & body]
   `(with-temp-db [_# (resolve-dbdef '~dataset)]
      ~@body))
+
+(defn- delete-model-instance!
+  "Allows deleting a row by the model instance toucan returns when
+  it's inserted"
+  [{:keys [id] :as instance}]
+  (db/delete! (-> instance name symbol) :id id))
+
+(defn call-with-data
+  "Takes a thunk `DATA-LOAD-FN` that returns a seq of toucan model
+  instances that will be deleted after `BODY-FN` finishes"
+  [data-load-fn body-fn]
+  (let [result-instances (data-load-fn)]
+    (try
+      (body-fn)
+      (finally
+        (doseq [instance result-instances]
+          (delete-model-instance! instance))))))
+
+(defmacro with-data [data-load-fn & body]
+  `(call-with-data ~data-load-fn (fn [] ~@body)))
+
+(def venue-categories
+  (map vector (defs/field-values defs/test-data-map "categories" "name")))
+
+(defn create-venue-category-remapping
+  "Returns a thunk that adds an internal remapping for category_id in
+  the venues table aliased as `REMAPPING-NAME`. Can be used in a
+  `with-data` invocation."
+  [remapping-name]
+  (fn []
+    [(db/insert! Dimension {:field_id (id :venues :category_id)
+                            :name remapping-name
+                            :type :internal})
+     (db/insert! FieldValues {:field_id (id :venues :category_id)
+                              :values (json/generate-string (range 0 (count venue-categories)))
+                              :human_readable_values (json/generate-string (map first venue-categories))})]))
+
+(defn create-venue-category-fk-remapping
+  "Returns a thunk that adds a FK remapping for category_id in the
+  venues table aliased as `REMAPPING-NAME`. Can be used in a
+  `with-data` invocation."
+  [remapping-name]
+  (fn []
+    [(db/insert! Dimension {:field_id (id :venues :category_id)
+                            :name remapping-name
+                            :type :external
+                            :human_readable_field_id (id :categories :name)})]))
diff --git a/test/metabase/test/data/dataset_definitions.clj b/test/metabase/test/data/dataset_definitions.clj
index f4cf1903279bdc48150dd779bf545a7da50c5978..255b1bb208eaf5316153f37a677522adee27fa92 100644
--- a/test/metabase/test/data/dataset_definitions.clj
+++ b/test/metabase/test/data/dataset_definitions.clj
@@ -25,12 +25,26 @@
 
 (def-database-definition-edn geographical-tips)
 
-;; A tiny dataset where half the NON-NULL values are valid URLs
-(def-database-definition-edn half-valid-urls)
-
 ;; A very tiny dataset with a list of places and a booleans
 (def-database-definition-edn places-cam-likes)
 
 ;; A small dataset with users and a set of messages between them. Each message has *2* foreign keys to user --
 ;; sender and reciever -- allowing us to test situations where multiple joins for a *single* table should occur.
 (def-database-definition-edn avian-singles)
+
+(def test-data-map
+  "Converts data from `test-data` to a map of maps like the following:
+
+   {<table-name> [{<field-name> <field value> ...}]."
+  (reduce (fn [acc {:keys [table-name field-definitions rows]}]
+            (let [field-names (mapv :field-name field-definitions)]
+              (assoc acc table-name
+                     (for [row rows]
+                       (zipmap field-names row)))))
+          {} (:table-definitions test-data)))
+
+(defn field-values
+  "Returns the field values for the given `TABLE` and `COLUMN` found
+  in the data-map `M`."
+  [m table column]
+  (mapv #(get % column) (get m table)))
diff --git a/test/metabase/test/data/dataset_definitions/half-valid-urls.edn b/test/metabase/test/data/dataset_definitions/half-valid-urls.edn
deleted file mode 100644
index 349176309155017cb7d79d7e5f2996ceb9dc1b58..0000000000000000000000000000000000000000
--- a/test/metabase/test/data/dataset_definitions/half-valid-urls.edn
+++ /dev/null
@@ -1,11 +0,0 @@
-[["urls" [{:field-name "url"
-           :base-type  :type/Text}]
-  [["http://www.camsaul.com"]
-   ["http://camsaul.com"]
-   ["https://en.wikipedia.org/wiki/Toucan"]
-   ["ABC"]
-   ["DEF"]
-   [nil]
-   ["https://en.wikipedia.org/wiki/Bird"]
-   ["EFG"]
-   [""]]]]
diff --git a/test/metabase/test/mock/moviedb.clj b/test/metabase/test/mock/moviedb.clj
index cf011e4d95f3fc3f0c402d5bebfa9bdc9f6bdf37..b7e151462a91ccea8a266e800bdd9b92f5411d78 100644
--- a/test/metabase/test/mock/moviedb.clj
+++ b/test/metabase/test/mock/moviedb.clj
@@ -1,192 +1,112 @@
 (ns metabase.test.mock.moviedb
   "A simple relational schema based mocked for testing. 4 tables w/ some FKs."
-  (:require [metabase.driver :as driver]))
+  (:require [metabase.driver :as driver]
+            [metabase.test.mock.util :refer [table-defaults field-defaults]]))
 
 
-(def ^:private ^:const moviedb-tables
-  {"movies"  {:name   "movies"
-              :schema nil
-              :fields #{{:name      "id"
-                         :base-type :type/Integer}
-                        {:name      "title"
-                         :base-type :type/Text}
-                        {:name      "filming"
-                         :base-type :type/Boolean}}}
-   "actors"  {:name   "actors"
-              :schema nil
-              :fields #{{:name      "id"
-                         :base-type :type/Integer}
-                        {:name      "name"
-                         :base-type :type/Text}}}
-   "roles"   {:name   "roles"
-              :schema nil
-              :fields #{{:name      "id"
-                         :base-type :type/Integer}
-                        {:name      "movie_id"
-                         :base-type :type/Integer}
-                        {:name      "actor_id"
-                         :base-type :type/Integer}
-                        {:name      "character"
-                         :base-type :type/Text}
-                        {:name      "salary"
-                         :base-type :type/Decimal}}
-              :fks    #{{:fk-column-name   "movie_id"
-                         :dest-table       {:name "movies"
-                                            :schema nil}
-                         :dest-column-name "id"}
-                        {:fk-column-name   "actor_id"
-                         :dest-table       {:name "actors"
-                                            :schema nil}
-                         :dest-column-name "id"}}}
-   "reviews" {:name   "reviews"
-              :schema nil
-              :fields #{{:name      "id"
-                         :base-type :type/Integer}
-                        {:name      "movie_id"
-                         :base-type :type/Integer}
-                        {:name      "stars"
-                         :base-type :type/Integer}}
-              :fks    #{{:fk-column-name   "movie_id"
-                         :dest-table       {:name   "movies"
-                                            :schema nil}
-                         :dest-column-name "id"}}}})
+(def ^:private moviedb-tables
+  {"movies"
+   {:name   "movies"
+    :schema nil
+    :fields #{{:name      "id"
+               :base-type :type/Integer}
+              {:name      "title"
+               :base-type :type/Text}
+              {:name      "filming"
+               :base-type :type/Boolean}}}
+
+   "actors"
+   {:name   "actors"
+    :schema nil
+    :fields #{{:name      "id"
+               :base-type :type/Integer}
+              {:name      "name"
+               :base-type :type/Text}}}
+
+   "roles"
+   {:name   "roles"
+    :schema nil
+    :fields #{{:name      "id"
+               :base-type :type/Integer}
+              {:name      "movie_id"
+               :base-type :type/Integer}
+              {:name      "actor_id"
+               :base-type :type/Integer}
+              {:name      "character"
+               :base-type :type/Text}
+              {:name      "salary"
+               :base-type :type/Decimal}}
+    :fks    #{{:fk-column-name   "movie_id"
+               :dest-table       {:name   "movies"
+                                  :schema nil}
+               :dest-column-name "id"}
+              {:fk-column-name   "actor_id"
+               :dest-table       {:name   "actors"
+                                  :schema nil}
+               :dest-column-name "id"}}}
+
+   "reviews"
+   {:name   "reviews"
+    :schema nil
+    :fields #{{:name      "id"
+               :base-type :type/Integer}
+              {:name      "movie_id"
+               :base-type :type/Integer}
+              {:name      "stars"
+               :base-type :type/Integer}}
+    :fks    #{{:fk-column-name   "movie_id"
+               :dest-table       {:name   "movies"
+                                  :schema nil}
+               :dest-column-name "id"}}}
+
+   "_metabase_metadata"
+   {:name   "_metabase_metadata"
+    :schema nil
+    :fields #{{:name      "keypath"
+               :base-type :type/Text}
+              {:name      "value"
+               :base-type :type/Text}}}})
+
 
 (defrecord MovieDbDriver []
   clojure.lang.Named
   (getName [_] "MovieDbDriver"))
 
-(extend MovieDbDriver
-  driver/IDriver
-  (merge driver/IDriverDefaultsMixin
-         {:analyze-table      (constantly nil)
-          :describe-database  (fn [_ {:keys [exclude-tables]}]
-                                (let [tables (for [table (vals moviedb-tables)
-                                                   :when (not (contains? exclude-tables (:name table)))]
-                                               (select-keys table [:schema :name]))]
-                                  {:tables (set tables)}))
-          :describe-table     (fn [_ _ table]
-                                (-> (get moviedb-tables (:name table))
-                                    (dissoc :fks)))
-          :describe-table-fks (fn [_ _ table]
-                                (-> (get moviedb-tables (:name table))
-                                    :fks
-                                    set))
-          :features           (constantly #{:foreign-keys})
-          :details-fields     (constantly [])
-          :table-rows-seq     (constantly [{:keypath "movies.filming.description", :value "If the movie is currently being filmed."}
-                                           {:keypath "movies.description", :value "A cinematic adventure."}])}))
-
-(driver/register-driver! :moviedb (MovieDbDriver.))
 
-(def ^:private ^:const raw-table-defaults
-  {:schema      nil
-   :database_id true
-   :updated_at  true
-   :details     {}
-   :active      true
-   :id          true
-   :created_at  true})
+(defn- describe-database [_ {:keys [exclude-tables]}]
+  (let [tables (for [table (vals moviedb-tables)
+                     :when (not (contains? exclude-tables (:name table)))]
+                 (select-keys table [:schema :name]))]
+    {:tables (set tables)}))
 
-(def ^:private ^:const raw-field-defaults
-  {:raw_table_id        true
-   :fk_target_column_id false
-   :updated_at          true
-   :active              true
-   :id                  true
-   :is_pk               false
-   :created_at          true
-   :column_type         nil})
+(defn- describe-table [_ _ table]
+  (-> (get moviedb-tables (:name table))
+      (dissoc :fks)))
 
+(defn- describe-table-fks [_ _ table]
+  (-> (get moviedb-tables (:name table))
+      :fks
+      set))
 
-(def ^:const moviedb-raw-tables
-  [(merge raw-table-defaults
-          {:columns [(merge raw-field-defaults
-                            {:name    "id"
-                             :details {:base-type "type/Integer"}})
-                     (merge raw-field-defaults
-                            {:name    "name"
-                             :details {:base-type "type/Text"}})]
-           :name    "actors"})
-   (merge raw-table-defaults
-          {:columns [(merge raw-field-defaults
-                            {:name    "filming"
-                             :details {:base-type "type/Boolean"}})
-                     (merge raw-field-defaults
-                            {:name    "id"
-                             :details {:base-type "type/Integer"}})
-                     (merge raw-field-defaults
-                            {:name    "title"
-                             :details {:base-type "type/Text"}})]
-           :name    "movies"})
-   (merge raw-table-defaults
-          {:columns [(merge raw-field-defaults
-                            {:name    "id"
-                             :details {:base-type "type/Integer"}})
-                     (merge raw-field-defaults
-                            {:name                "movie_id"
-                             :details             {:base-type "type/Integer"}
-                             :fk_target_column_id true})
-                     (merge raw-field-defaults
-                            {:name    "stars"
-                             :details {:base-type "type/Integer"}})]
-           :name    "reviews"})
-   (merge raw-table-defaults
-          {:columns [(merge raw-field-defaults
-                            {:name                "actor_id"
-                             :details             {:base-type "type/Integer"}
-                             :fk_target_column_id true})
-                     (merge raw-field-defaults
-                            {:name    "character"
-                             :details {:base-type "type/Text"}})
-                     (merge raw-field-defaults
-                            {:name    "id"
-                             :details {:base-type "type/Integer"}})
-                     (merge raw-field-defaults
-                            {:name                "movie_id"
-                             :details             {:base-type "type/Integer"}
-                             :fk_target_column_id true})
-                     (merge raw-field-defaults
-                            {:name    "salary"
-                             :details {:base-type "type/Decimal"}})]
-           :name    "roles"})])
+(defn- table-rows-seq [_ _ table]
+  (when (= (:name table) "_metabase_metadata")
+    [{:keypath "movies.filming.description", :value "If the movie is currently being filmed."}
+     {:keypath "movies.description", :value "A cinematic adventure."}]))
 
 
-(def ^:private ^:const table-defaults
-  {:description             nil
-   :entity_type             nil
-   :caveats                 nil
-   :points_of_interest      nil
-   :show_in_getting_started false
-   :schema                  nil
-   :raw_table_id            true
-   :rows                    nil
-   :updated_at              true
-   :entity_name             nil
-   :active                  true
-   :id                      true
-   :db_id                   true
-   :visibility_type         nil
-   :created_at              true})
+(extend MovieDbDriver
+  driver/IDriver
+  (merge driver/IDriverDefaultsMixin
+         {:describe-database  describe-database
+          :describe-table     describe-table
+          :describe-table-fks describe-table-fks
+          :features           (constantly #{:foreign-keys})
+          :details-fields     (constantly [])
+          :table-rows-seq     table-rows-seq}))
 
-(def ^:privaet ^:const field-defaults
-  {:description        nil
-   :table_id           true
-   :caveats            nil
-   :points_of_interest nil
-   :special_type       nil
-   :fk_target_field_id false
-   :updated_at         true
-   :active             true
-   :parent_id          false
-   :id                 true
-   :raw_column_id      true
-   :last_analyzed      false
-   :position           0
-   :visibility_type    :normal
-   :preview_display    true
-   :created_at         true})
+(driver/register-driver! :moviedb (MovieDbDriver.))
 
-(def ^:const moviedb-tables-and-fields
+(def moviedb-tables-and-fields
   [(merge table-defaults
           {:name         "actors"
            :fields       [(merge field-defaults
diff --git a/test/metabase/test/mock/schema_per_customer.clj b/test/metabase/test/mock/schema_per_customer.clj
index 28e3beedaae7f4f380a474ac8969c1b827553c43..ff07bb39fb11aaac6896d2ef42af7535d05fe81b 100644
--- a/test/metabase/test/mock/schema_per_customer.clj
+++ b/test/metabase/test/mock/schema_per_customer.clj
@@ -1,11 +1,12 @@
 (ns metabase.test.mock.schema-per-customer
   "A relational database that replicates a set of tables multiple times such that schema1.* and schema2.* have the
    same set of tables.  This is common in apps that provide an 'instance' per customer."
-  (:require [metabase.driver :as driver]))
+  (:require [metabase.driver :as driver]
+            [metabase.test.mock.util :refer [table-defaults field-defaults]]))
 
 
 ;; NOTE: we throw in a "common" schema which shares an FK across all other schemas just to get tricky
-(def ^:private ^:const schema-per-customer-tables
+(def ^:private schema-per-customer-tables
   {nil      {"city"   {:name   "city"
                        :fields #{{:name         "id"
                                   :base-type    :type/Integer
@@ -47,8 +48,7 @@
 (extend SchemaPerCustomerDriver
   driver/IDriver
   (merge driver/IDriverDefaultsMixin
-         {:analyze-table       (constantly nil)
-          :describe-database   (fn [_ _]
+         {:describe-database   (fn [_ _]
                                  {:tables (conj (->> (for [schema ["s1" "s2" "s3"]]
                                                        (for [table (keys (get schema-per-customer-tables nil))]
                                                          {:schema schema, :name table}))
@@ -79,212 +79,7 @@
 
 (driver/register-driver! :schema-per-customer (SchemaPerCustomerDriver.))
 
-(def ^:private ^:const raw-table-defaults
-  {:schema      nil
-   :database_id true
-   :columns     []
-   :updated_at  true
-   :details     {}
-   :active      true
-   :id          true
-   :created_at  true})
-
-(def ^:private ^:const raw-field-defaults
-  {:column_type         nil
-   :raw_table_id        true
-   :fk_target_column_id false
-   :updated_at          true
-   :details             {}
-   :active              true
-   :id                  true
-   :is_pk               false
-   :created_at          true})
-
-(def ^:const schema-per-customer-raw-tables
-  [(merge raw-table-defaults
-          {:schema  "s3"
-           :columns [(merge raw-field-defaults
-                            {:name    "id"
-                             :details {:base-type "type/Integer"}
-                             :is_pk   true})
-                     (merge raw-field-defaults
-                            {:name    "name"
-                             :details {:base-type "type/Text", :special-type "type/Name"}})]
-           :name    "city"})
-   (merge raw-table-defaults
-          {:schema  "s2"
-           :columns [(merge raw-field-defaults
-                            {:name    "id"
-                             :details {:base-type "type/Integer"}
-                             :is_pk   true})
-                     (merge raw-field-defaults
-                            {:name                "reviewer_id"
-                             :fk_target_column_id true
-                             :fk_target_column    {:schema "common", :name "user", :col-name "id"}
-                             :details             {:base-type "type/Integer"}})
-                     (merge raw-field-defaults
-                            {:name    "text"
-                             :details {:base-type "type/Text", :special-type "type/Name"}})
-                     (merge raw-field-defaults
-                            {:name                "venue_id"
-                             :fk_target_column_id true
-                             :fk_target_column    {:schema "s2", :name "venue", :col-name "id"}
-                             :details             {:base-type "type/Integer"}})]
-           :name    "review"})
-   (merge raw-table-defaults
-          {:schema  "s3"
-           :columns [(merge raw-field-defaults
-                            {:name                "city_id"
-                             :fk_target_column_id true
-                             :fk_target_column    {:schema "s3", :name "city", :col-name "id"}
-                             :details             {:base-type "type/Integer"}})
-                     (merge raw-field-defaults
-                            {:name    "id"
-                             :details {:base-type "type/Integer"}
-                             :is_pk   true})
-                     (merge raw-field-defaults
-                            {:name    "name"
-                             :details {:base-type "type/Text", :special-type "type/Name"}})]
-           :name    "venue"})
-   (merge raw-table-defaults
-          {:schema  "s2"
-           :columns [(merge raw-field-defaults
-                            {:name    "id"
-                             :details {:base-type "type/Integer"}
-                             :is_pk   true})
-                     (merge raw-field-defaults
-                            {:name    "name"
-                             :details {:base-type "type/Text", :special-type "type/Name"}})]
-           :name    "city"})
-   (merge raw-table-defaults
-          {:schema  "s1"
-           :columns [(merge raw-field-defaults
-                            {:name                "city_id"
-                             :fk_target_column_id true
-                             :fk_target_column    {:schema "s1", :name "city", :col-name "id"}
-                             :details             {:base-type "type/Integer"}})
-                     (merge raw-field-defaults
-                            {:name    "id"
-                             :details {:base-type "type/Integer"}
-                             :is_pk   true})
-                     (merge raw-field-defaults
-                            {:name    "name"
-                             :details {:base-type "type/Text", :special-type "type/Name"}})]
-           :name    "venue"})
-   (merge raw-table-defaults
-          {:schema  "common"
-           :columns [(merge raw-field-defaults
-                            {:name    "id"
-                             :details {:base-type "type/Integer"}
-                             :is_pk   true})
-                     (merge raw-field-defaults
-                            {:name    "name"
-                             :details {:base-type "type/Text"}})]
-           :name    "user"})
-   (merge raw-table-defaults
-          {:schema  "s3"
-           :columns [(merge raw-field-defaults
-                            {:name                "id"
-                             :details {:base-type "type/Integer"}
-                             :is_pk               true})
-                     (merge raw-field-defaults
-                            {:name                "reviewer_id"
-                             :fk_target_column_id true
-                             :fk_target_column    {:schema "common", :name "user", :col-name "id"}
-                             :details             {:base-type "type/Integer"}})
-                     (merge raw-field-defaults
-                            {:name    "text"
-                             :details {:base-type "type/Text", :special-type "type/Name"}})
-                     (merge raw-field-defaults
-                            {:name                "venue_id"
-                             :fk_target_column_id true
-                             :fk_target_column    {:schema "s3", :name "venue", :col-name "id"}
-                             :details             {:base-type "type/Integer"}})]
-           :name    "review"})
-   (merge raw-table-defaults
-          {:schema  "s2"
-           :columns [(merge raw-field-defaults
-                            {:name                "city_id"
-                             :fk_target_column_id true
-                             :fk_target_column    {:schema "s2", :name "city", :col-name "id"}
-                             :details             {:base-type "type/Integer"}})
-                     (merge raw-field-defaults
-                            {:name    "id"
-                             :details {:base-type "type/Integer"}
-                             :is_pk   true})
-                     (merge raw-field-defaults
-                            {:name    "name"
-                             :details {:base-type "type/Text", :special-type "type/Name"}})]
-           :name    "venue"})
-   (merge raw-table-defaults
-          {:schema  "s1"
-           :columns [(merge raw-field-defaults
-                            {:name    "id"
-                             :details {:base-type "type/Integer"}
-                             :is_pk   true})
-                     (merge raw-field-defaults
-                            {:name                "reviewer_id"
-                             :fk_target_column_id true
-                             :fk_target_column    {:schema "common", :name "user", :col-name "id"}
-                             :details             {:base-type "type/Integer"}})
-                     (merge raw-field-defaults
-                            {:name    "text"
-                             :details {:base-type "type/Text", :special-type "type/Name"}})
-                     (merge raw-field-defaults
-                            {:name                "venue_id"
-                             :fk_target_column_id true
-                             :fk_target_column    {:schema "s1", :name "venue", :col-name "id"}
-                             :details             {:base-type "type/Integer"}})]
-           :name    "review"})
-   (merge raw-table-defaults
-          {:schema  "s1"
-           :columns [(merge raw-field-defaults
-                            {:name    "id"
-                             :details {:base-type "type/Integer"}
-                             :is_pk   true})
-                     (merge raw-field-defaults
-                            {:name    "name"
-                             :details {:base-type "type/Text", :special-type "type/Name"}})]
-           :name    "city"})])
-
-
-(def ^:private ^:const table-defaults
-  {:description             nil
-   :entity_type             nil
-   :caveats                 nil
-   :points_of_interest      nil
-   :show_in_getting_started false
-   :schema                  nil
-   :raw_table_id            true
-   :fields                  []
-   :rows                    nil
-   :updated_at              true
-   :entity_name             nil
-   :active                  true
-   :id                      true
-   :db_id                   true
-   :visibility_type         nil
-   :created_at              true})
-
-
-(def ^:private ^:const field-defaults
-  {:description        nil
-   :table_id           true
-   :caveats            nil
-   :points_of_interest nil
-   :fk_target_field_id false
-   :updated_at         true
-   :active             true
-   :parent_id          false
-   :id                 true
-   :raw_column_id      true
-   :last_analyzed      false
-   :position           0
-   :visibility_type    :normal
-   :preview_display    true
-   :created_at         true})
-
-(def ^:const schema-per-customer-tables-and-fields
+(def schema-per-customer-tables-and-fields
   [(merge table-defaults
           {:schema       "common"
            :name         "user"
diff --git a/test/metabase/test/mock/toucanery.clj b/test/metabase/test/mock/toucanery.clj
index 8dd616e0fd192abc1d953c93ddec6b0ead5874e8..4749d3d32a75b5f732ecbc7394203bfb6d7b58cd 100644
--- a/test/metabase/test/mock/toucanery.clj
+++ b/test/metabase/test/mock/toucanery.clj
@@ -2,7 +2,8 @@
   "A document style database mocked for testing.
    This is a `:dynamic-schema` db with `:nested-fields`.
    Most notably meant to serve as a representation of a Mongo database."
-  (:require [metabase.driver :as driver]))
+  (:require [metabase.driver :as driver]
+            [metabase.test.mock.util :refer [table-defaults field-defaults]]))
 
 
 (def ^:private ^:const toucanery-tables
@@ -38,6 +39,22 @@
                           {:name      "name"
                            :base-type :type/Text}}}})
 
+
+(defn- describe-database [_ {:keys [exclude-tables]}]
+  (let [tables (for [table (vals toucanery-tables)
+                     :when (not (contains? exclude-tables (:name table)))]
+                 (select-keys table [:schema :name]))]
+    {:tables (set tables)}))
+
+(defn- describe-table [_ _ table]
+  (get toucanery-tables (:name table)))
+
+(defn- table-rows-seq [_ _ table]
+  (when (= (:name table) "_metabase_metadata")
+    [{:keypath "movies.filming.description", :value "If the movie is currently being filmed."}
+     {:keypath "movies.description", :value "A cinematic adventure."}]))
+
+
 (defrecord ToucaneryDriver []
   clojure.lang.Named
   (getName [_] "ToucaneryDriver"))
@@ -45,73 +62,16 @@
 (extend ToucaneryDriver
   driver/IDriver
   (merge driver/IDriverDefaultsMixin
-         {:analyze-table     (constantly nil)
-          :describe-database (fn [_ {:keys [exclude-tables]}]
-                               (let [tables (for [table (vals toucanery-tables)
-                                                  :when (not (contains? exclude-tables (:name table)))]
-                                              (select-keys table [:schema :name]))]
-                                 {:tables (set tables)}))
-          :describe-table    (fn [_ _ table]
-                               (get toucanery-tables (:name table)))
-          :features          (constantly #{:dynamic-schema :nested-fields})
-          :details-fields    (constantly [])
-          :table-rows-seq    (constantly [{:keypath "movies.filming.description", :value "If the movie is currently being filmed."}
-                                          {:keypath "movies.description", :value "A cinematic adventure."}])}))
+         {:describe-database     describe-database
+          :describe-table        describe-table
+          :features              (constantly #{:dynamic-schema :nested-fields})
+          :details-fields        (constantly [])
+          :field-values-lazy-seq (constantly nil)
+          :table-rows-seq        table-rows-seq}))
 
 (driver/register-driver! :toucanery (ToucaneryDriver.))
 
-(def ^:private ^:const raw-table-defaults
-  {:schema      nil
-   :database_id true
-   :columns     []
-   :updated_at  true
-   :details     {}
-   :active      true
-   :id          true
-   :created_at  true})
-
-(def ^:const toucanery-raw-tables-and-columns
-  [(merge raw-table-defaults {:name "employees"})
-   (merge raw-table-defaults {:name "transactions"})])
-
-
-(def ^:private ^:const table-defaults
-  {:description             nil
-   :entity_type             nil
-   :caveats                 nil
-   :points_of_interest      nil
-   :show_in_getting_started false
-   :schema                  nil
-   :raw_table_id            true
-   :fields                  []
-   :rows                    nil
-   :updated_at              true
-   :entity_name             nil
-   :active                  true
-   :id                      true
-   :db_id                   true
-   :visibility_type         nil
-   :created_at              true})
-
-(def ^:private ^:const field-defaults
-  {:description        nil
-   :table_id           true
-   :caveats            nil
-   :points_of_interest nil
-   :fk_target_field_id false
-   :updated_at         true
-   :active             true
-   :parent_id          false
-   :special_type       nil
-   :id                 true
-   :raw_column_id      false
-   :last_analyzed      false
-   :position           0
-   :visibility_type    :normal
-   :preview_display    true
-   :created_at         true})
-
-(def ^:const toucanery-tables-and-fields
+(def toucanery-tables-and-fields
   [(merge table-defaults
           {:name         "employees"
            :fields       [(merge field-defaults
diff --git a/test/metabase/test/mock/util.clj b/test/metabase/test/mock/util.clj
new file mode 100644
index 0000000000000000000000000000000000000000..778c8c99971f5037ea29067eed6fe5f1abcba1d7
--- /dev/null
+++ b/test/metabase/test/mock/util.clj
@@ -0,0 +1,37 @@
+(ns metabase.test.mock.util)
+
+(def table-defaults
+  {:description             nil
+   :entity_type             nil
+   :caveats                 nil
+   :points_of_interest      nil
+   :show_in_getting_started false
+   :schema                  nil
+   :raw_table_id            false
+   :fields                  []
+   :rows                    nil
+   :updated_at              true
+   :entity_name             nil
+   :active                  true
+   :id                      true
+   :db_id                   true
+   :visibility_type         nil
+   :created_at              true})
+
+(def field-defaults
+  {:description        nil
+   :table_id           true
+   :caveats            nil
+   :points_of_interest nil
+   :fk_target_field_id false
+   :updated_at         true
+   :active             true
+   :parent_id          false
+   :special_type       nil
+   :id                 true
+   :raw_column_id      false
+   :last_analyzed      true
+   :position           0
+   :visibility_type    :normal
+   :preview_display    true
+   :created_at         true})
diff --git a/test/metabase/test/util.clj b/test/metabase/test/util.clj
index 24edbaeeebf1acd76b5f3f056b4652858356431f..a6a23f89f37d45637da37d764f28154e58193be0 100644
--- a/test/metabase/test/util.clj
+++ b/test/metabase/test/util.clj
@@ -15,8 +15,6 @@
              [permissions-group :refer [PermissionsGroup]]
              [pulse :refer [Pulse]]
              [pulse-channel :refer [PulseChannel]]
-             [raw-column :refer [RawColumn]]
-             [raw-table :refer [RawTable]]
              [revision :refer [Revision]]
              [segment :refer [Segment]]
              [setting :as setting]
@@ -86,21 +84,24 @@
   (str (random-name) "@metabase.com"))
 
 (defn boolean-ids-and-timestamps
-  "Useful for unit test comparisons. Converts map keys with 'id' or '_at' to booleans."
-  [m]
-  (let [f (fn [v]
-            (cond
-              (map? v) (boolean-ids-and-timestamps v)
-              (coll? v) (mapv boolean-ids-and-timestamps v)
-              :else v))]
-    (into {} (for [[k v] m]
-               (if (or (= :id k)
-                       (.endsWith (name k) "_id")
-                       (= :created_at k)
-                       (= :updated_at k)
-                       (= :last_analyzed k))
-                 [k (not (nil? v))]
-                 [k (f v)])))))
+  "Useful for unit test comparisons. Converts map keys found in `DATA`
+  satisfying `PRED` with booleans when not nil"
+  ([data]
+   (boolean-ids-and-timestamps
+    (every-pred (some-fn keyword? string?)
+                (some-fn #{:id :created_at :updated_at :last_analyzed :created-at :updated-at :field-value-id :field-id}
+                         #(.endsWith (name %) "_id")))
+    data))
+  ([pred data]
+   (walk/prewalk (fn [maybe-map]
+                   (if (map? maybe-map)
+                     (reduce-kv (fn [acc k v]
+                                  (if (pred k)
+                                    (assoc acc k (not (nil? v)))
+                                    (assoc acc k v)))
+                                {} maybe-map)
+                     maybe-map))
+                 data)))
 
 
 (defn- user-id [username]
@@ -170,16 +171,6 @@
                                     :schedule_type :daily
                                     :schedule_hour 15})})
 
-(u/strict-extend (class RawColumn)
-  test/WithTempDefaults
-  {:with-temp-defaults (fn [_] {:active true
-                                :name   (random-name)})})
-
-(u/strict-extend (class RawTable)
-  test/WithTempDefaults
-  {:with-temp-defaults (fn [_] {:active true
-                                :name   (random-name)})})
-
 (u/strict-extend (class Revision)
   test/WithTempDefaults
   {:with-temp-defaults (fn [_] {:user_id      (rasta-id)
@@ -323,3 +314,32 @@
                      (vec form)
                      form))
                  x))
+
+(defn- update-in-if-present
+  "If the path `KS` is found in `M`, call update-in with the original
+  arguments to this function, otherwise, return `M`"
+  [m ks f & args]
+  (if (= ::not-found (get-in m ks ::not-found))
+    m
+    (apply update-in m ks f args)))
+
+(defn- round-fingerprint-fields [fprint-type-map fields]
+  (reduce (fn [fprint field]
+            (update-in-if-present fprint [field] (fn [num]
+                                                   (if (integer? num)
+                                                     num
+                                                     (u/round-to-decimals 3 num)))))
+          fprint-type-map fields))
+
+(defn round-fingerprint
+  "Rounds the numerical fields of a fingerprint to 4 decimal places"
+  [field]
+  (-> field
+      (update-in-if-present [:fingerprint :type :type/Number] round-fingerprint-fields [:min :max :avg])
+      (update-in-if-present [:fingerprint :type :type/Text] round-fingerprint-fields [:percent-json :percent-url :percent-email :average-length])))
+
+(defn  round-fingerprint-cols [query-results]
+  (let [maybe-data-cols (if (contains? query-results :data)
+                          [:data :cols]
+                          [:cols])]
+    (update-in query-results maybe-data-cols #(map round-fingerprint %))))
diff --git a/test/metabase/test_setup.clj b/test/metabase/test_setup.clj
index a5b2f8914c1f97530beed2eefc7790d68facc1f5..ce79b3578066f9daa68b022a828867a8a2ac617c 100644
--- a/test/metabase/test_setup.clj
+++ b/test/metabase/test_setup.clj
@@ -99,3 +99,17 @@
   []
   (log/info "Shutting down Metabase unit test runner")
   (core/stop-jetty!))
+
+(defn call-with-test-scaffolding
+  "Runs `test-startup` and ensures `test-teardown` is always
+  called. This function is useful for running a test (or test
+  namespace) at the repl with the appropriate environment setup for
+  the test to pass."
+  [f]
+  (try
+    (test-startup)
+    (f)
+    (catch Exception e
+      (throw e))
+    (finally
+      (test-teardown))))
diff --git a/test/metabase/timeseries_query_processor_test.clj b/test/metabase/timeseries_query_processor_test.clj
index ed647a33f823072dd909f969334aac0ed6e56630..0d1e282be33cbdcae6f487e6ab3952290345bff8 100644
--- a/test/metabase/timeseries_query_processor_test.clj
+++ b/test/metabase/timeseries_query_processor_test.clj
@@ -4,7 +4,7 @@
   (:require [metabase
              [query-processor-test :refer [first-row format-rows-by rows]]
              [util :as u]]
-            [metabase.query-processor.expand :as ql]
+            [metabase.query-processor.middleware.expand :as ql]
             [metabase.test.data :as data]
             [metabase.test.data
              [dataset-definitions :as defs]
diff --git a/test/metabase/util_test.clj b/test/metabase/util_test.clj
index 8232181092db3bae876e196c3de9d437f5994a29..5fe7f719f5ea526b38aa6f04741e8afd9cd031a7 100644
--- a/test/metabase/util_test.clj
+++ b/test/metabase/util_test.clj
@@ -242,3 +242,7 @@
   (select-keys-when {:a 100, :b nil, :d 200, :e nil}
     :present #{:a :b :c}
     :non-nil #{:d :e :f}))
+
+(expect
+  [-2 -1 0 1 2 3 0 3]
+  (map order-of-magnitude [0.01 0.5 4 12 444 1023 0 -1444]))
diff --git a/test_resources/log4j.properties b/test_resources/log4j.properties
index 161945c47bd0cc0b9aece51e130d6ff59e51c407..a085ebb0927e78d16ea47dfe4d800017a298cd5e 100644
--- a/test_resources/log4j.properties
+++ b/test_resources/log4j.properties
@@ -21,3 +21,6 @@ log4j.logger.metabase=ERROR
 log4j.logger.metabase.test-setup=INFO
 log4j.logger.metabase.test.data.datasets=INFO
 log4j.logger.metabase.util.encryption=INFO
+# NOCOMMIT
+log4j.logger.metabase.sync=DEBUG
+log4j.logger.metabase.test.data=DEBUG
diff --git a/yarn.lock b/yarn.lock
index 1127442bfe07f31a67e3b8e948620b45da7dd592..5322c93e8bb31b7a48942367a171629496f794a3 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -4829,6 +4829,10 @@ lcid@^1.0.0:
   dependencies:
     invert-kv "^1.0.0"
 
+leaflet.heat@^0.2.0:
+  version "0.2.0"
+  resolved "https://registry.yarnpkg.com/leaflet.heat/-/leaflet.heat-0.2.0.tgz#109d8cf586f0adee41f05aff031e27a77fecc229"
+
 leaflet-draw@^0.4.9:
   version "0.4.9"
   resolved "https://registry.yarnpkg.com/leaflet-draw/-/leaflet-draw-0.4.9.tgz#44105088310f47e4856d5ede37d47ecfad0cf2d5"